content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
"""
********************************************************************************
post_processing
********************************************************************************
Polyline simplification
=======================
.. autosummary::
:toctree: generated/
:nosignatures:
simplify_paths_rdp
Sorting
=======
.. autosummary::
:toctree: generated/
:nosignatures:
seams_align
seams_smooth
sort_into_vertical_layers
reorder_vertical_layers
sort_paths_minimum_travel_time
zig_zag_open_paths
Additional
==========
.. autosummary::
:toctree: generated/
:nosignatures:
generate_brim
generate_raft
spiralize_contours
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Polyline simplification
from .simplify_paths_rdp import * # noqa: F401 E402 F403
# Sorting
from .seams_align import * # noqa: F401 E402 F403
from .seams_smooth import * # noqa: F401 E402 F403
from .sort_into_vertical_layers import * # noqa: F401 E402 F403
from .reorder_vertical_layers import * # noqa: F401 E402 F403
from .sort_paths_minimum_travel_time import * # noqa: F401 E402 F403
# Orienting
from .unify_paths_orientation import * # noqa: F401 E402 F403
# Additional
from .generate_brim import * # noqa: F401 E402 F403
from .generate_raft import * # noqa: F401 E402 F403
from .spiralize_contours import * # noqa: F401 E402 F403
from .zig_zag_open_paths import * # noqa: F401 E402 F403
__all__ = [name for name in dir() if not name.startswith('_')]
|
python
|
#!/usr/bin/env python2.7
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import json
import logging
import os
import pprint
import sys
import time
LOG = logging.getLogger()
LOG.setLevel(logging.DEBUG) # For now, let Handlers control the levels.
def system(call, checked=False):
LOG.info(call)
rc = os.system(call)
msg = '{} <- {!r}'.format(rc, call)
LOG.debug(msg)
if checked and rc:
raise Exception(msg)
return rc
def touch(fname):
with open(fname, 'a'):
os.utime(fname, None)
def foo(srtc):
LOG.info('In foo')
outputs = srtc['output_files']
options = srtc['options']
import pprint
print('options:{}'.format(pprint.pformat(options)))
uows = options['snafu.task_options.uows']
with open(outputs[0], 'w') as stream:
data = ['FOO{}'.format(i) for i in range(uows)]
json_txt = json.dumps(data, indent=2)
stream.write(json_txt)
def bar(srtc):
LOG.info('In bar')
outputs = srtc['output_files']
touch(outputs[0])
def fubar(srtc):
LOG.info('In fubar')
inputs = srtc['input_files']
outputs = srtc['output_files']
with open(inputs[0]) as stream:
data = json.loads(stream.read())
with open(outputs[0], 'w') as stream:
stream.write(json.dumps(data))
def scatter_fubar(srtc):
LOG.info('In scatter_fubar')
inputs = srtc['input_files']
outputs = srtc['output_files']
max_nchunks = srtc['max_nchunks']
#chunk_keys = srtc['chunk_keys']
from . import scatter_json_list as mod
mod.run('scatter_fubar', max_nchunks, inputs[0], outputs[0])
def scatter_json_list_plus_txt(srtc):
LOG.info('In scatter_json_list_plus_txt: {}'.format(repr(srtc)))
inputs = srtc['input_files']
outputs = srtc['output_files']
max_nchunks = srtc['max_nchunks']
tcid = srtc['tool_contract_id']
basename = os.path.splitext(tcid)[1][1:]
#chunk_keys = srtc['chunk_keys']
from . import scatter_json_list_plus_txt as mod
mod.run(basename, max_nchunks, inputs[0], inputs[1], outputs[0])
def gather_json_list(srtc):
LOG.info('In gather_json')
inputs = srtc['input_files']
outputs = srtc['output_files']
chunk_key = srtc['chunk_key']
chunk_input_json_fn = inputs[0]
output_fn = outputs[0]
from . import gather_json_list as mod
mod.run(chunk_key, chunk_input_json_fn, output_fn)
def run_rtc(args):
setup_logging(args)
LOG.info('sys.executable={!r}'.format(sys.executable))
LOG.info('Parsed args (after logging setup): {!r}'.format(vars(args)))
LOG.info('rtc_path: {!r}'.format(args.rtc_path))
rtc_path = args.rtc_path
rtc = json.load(open(args.rtc_path))
LOG.info('rtc: {!s}'.format(pprint.pformat(rtc)))
srtc = rtc['resolved_tool_contract']
tcid = srtc['tool_contract_id']
options = srtc['options']
log_level = srtc['log_level']
input_files = srtc['input_files']
output_files = srtc['output_files']
nproc = srtc['nproc']
#resources = srtc['resources']
task_func = {
'foo': foo,
'bar': bar,
'task_run_fubar_jobs': fubar,
'scatter_fubar': scatter_fubar,
'gather_fubar': gather_json_list,
'task_falcon0_dazzler_tan_apply_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_dazzler_daligner_apply_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_dazzler_lamerge_apply_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_run_daligner_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_run_las_merge_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_run_cns_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon1_run_daligner_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon1_run_las_merge_jobs_scatter': scatter_json_list_plus_txt,
'task_falcon0_dazzler_tan_apply_jobs_gather': gather_json_list,
'task_falcon0_dazzler_daligner_apply_jobs_gather': gather_json_list,
'task_falcon0_dazzler_lamerge_apply_jobs_gather': gather_json_list,
'task_falcon0_run_daligner_jobs_gather': gather_json_list,
'task_falcon0_run_las_merge_jobs_gather': gather_json_list,
'task_falcon0_run_cns_jobs_gather': gather_json_list,
'task_falcon1_run_daligner_jobs_gather': gather_json_list,
'task_falcon1_run_las_merge_jobs_gather': gather_json_list,
}
func_name = os.path.splitext(tcid)[1][1:]
func = task_func[func_name]
func(srtc)
def emit_one(args):
pass
def emit_all(args):
pass
def setup_logging(args):
handler = get_logging_handler(args)
LOG.addHandler(handler)
try:
import logging_tree
print('logging_tree:')
logging_tree.printout()
except ImportError:
pass
del_logging_flags(args)
def get_logging_handler(args):
"""Return new logging Handler.
Also, remove related flags from argparse args.
"""
fmt = '[%(levelname)s]%(message)s'
log_level = args.log_level
if args.log_level is not None:
log_level = args.log_level
if args.verbose:
log_level = 'INFO'
if args.quiet:
log_level = 'CRITICAL'
if args.debug:
log_level = 'DEBUG'
fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(fmt=fmt)
logging.Formatter.converter = time.gmtime
if args.log_file:
handler = logging.FileHandler(args._log_file, mode='a')
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
handler.setLevel(log_level)
return handler
def add_logging_flags(parser):
"""
--log-file LOG_FILE Write the log to file. Default(None) will write to
stdout.
--log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
Set log level (default: INFO)
"""
parser.add_argument('--log-file',
help='Write the log to file. By default, write to stdout.')
parser.add_argument('--log-level',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='INFO',
help='Set log level.')
parser.add_argument('--verbose', '-v',
help='Set the verbosity level. (Only partially supported for now.)')
parser.add_argument('--quiet',
help='Alias for setting log level to CRITICAL')
parser.add_argument('--debug',
help='Alias for setting log level to DEBUG')
def del_logging_flags(args):
delattr(args, 'log_file')
delattr(args, 'log_level')
delattr(args, 'verbose')
delattr(args, 'quiet')
delattr(args, 'debug')
class HelpF(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
pass
def main(argv=sys.argv):
description = 'Multi-quick-tool-runner for pbsmrtpipe tasks'
epilog = 'Real tool should be inferred from the resolved_tool_contract->tool_contract_id field.'
parser = argparse.ArgumentParser(
description=description, epilog=epilog,
formatter_class=HelpF,
)
parser.add_argument('--version',
action='version', version='0.0.0',
help="show program's version number and exit"
)
subparsers = parser.add_subparsers(
help='sub-command help')
parser_run = subparsers.add_parser('run-rtc',
formatter_class=HelpF)
parser_emit_one = subparsers.add_parser('emit-tool-contract',
formatter_class=HelpF)
parser_emit_all = subparsers.add_parser('emit-tool-contracts',
formatter_class=HelpF)
parser_run.set_defaults(func=run_rtc)
parser_emit_one.set_defaults(func=emit_one)
parser_emit_all.set_defaults(func=emit_all)
parser_run.add_argument('rtc_path',
help='Path to resolved tool contract')
parser_emit_one.add_argument('tc_id',
help='Tool Contract Id')
parser_emit_all.add_argument('--output-dir', '-o',
default=os.getcwd(),
help='Emit all Tool Contracts to output directory')
add_logging_flags(parser_run)
args = parser.parse_args(argv[1:])
args.func(args)
if __name__ == "__main__":
main()
|
python
|
import json
import asyncio
from os import environ
from functools import partial
from aiohttp import ClientSession, ClientConnectionError
from pyee import AsyncIOEventEmitter
from aiohttp_sse_client.client import EventSource
DEFAULT_STREAM_URL = 'https://stream.flowdock.com/flows'
__all__ = ["EventStream"]
class EventStream(AsyncIOEventEmitter):
def __init__(self, auth, flows, url=None, session=None, params=None, loop=None):
super().__init__(loop or asyncio.get_event_loop())
self._evt = None
self.auth = auth
self.flows = flows
self.params = params or dict()
self.session = session or ClientSession()
self.url = url or environ.get("FLOWDOCK_STREAM_URL", DEFAULT_STREAM_URL)
async def connect(self, retry=3):
if self._evt is not None:
return
self._evt = EventSource(self.url, session=self.session,
timeout=-1,
on_open=partial(self.emit, 'connected'),
on_error=partial(self.emit, 'error'),
**self._options())
retry = 0 if retry < 0 else retry
await self._evt.connect(retry)
async def _process_data(event_source, emit, loop):
try:
async for evt in event_source:
emit("rawdata", evt)
msg = await loop.run_in_executor(None, json.loads, evt.data)
emit("message", msg)
except ClientConnectionError as e:
emit("disconnected", e)
except Exception as e:
emit("clientError", e)
coro = _process_data(self._evt, self.emit, self._loop)
self._loop.create_task(coro)
async def end(self):
if self._evt is not None:
await self._evt.close()
self._evt = None
def _options(self):
qs = dict(filter=",".join(self.flows))
qs.update(self.params)
options = {
"params": qs,
"headers": {
"Authorization": self.auth
}
}
return options
|
python
|
"""
File: My_drawing.py
Name:Elsa
----------------------
TODO:
"""
from campy.graphics.gobjects import GOval, GRect
from campy.graphics.gwindow import GWindow
from campy.graphics.gobjects import GOval, GRect,GPolygon,GLabel
from campy.graphics.gwindow import GWindow
def main():
"""
TODO:
This figure uses campy module to demonstrate personality.
A lot of faiths hold by people, just like the shape of circles or triangles,
while eventually others can only see the polygon.
"""
window=GWindow(600,600)
# color of background
rect=GRect(800,800)
rect.filled=True
rect.fill_color='lightgrey'
window.add(rect)
# polygon, circle ,rect and triangle with different colors
polygon1=GPolygon()
polygon1.add_vertex((550, 590))
polygon1.add_vertex((570, 360))
polygon1.add_vertex((100, 60))
polygon1.filled=True
polygon1.fill_color='greenyellow'
window.add(polygon1)
rect1=GRect(335,335,x=135,y=150)
rect1.filled=True
rect1.fill_color='sage'
rect2=GRect(370,370,x=120,y=135)
rect2.filled=True
rect2.fill_color='magenta'
rect3=GRect(400,400,x=105,y=120)
rect3.filled=True
rect3.fill_color='purple'
rect4=GRect(440,440,x=85,y=100)
rect4.filled=True
rect4.fill_color='peachpuff'
window.add(rect4)
window.add(rect3)
window.add(rect2)
window.add(rect1)
circle5=GOval(265,265,x=170,y=185)
circle5.filled=True
circle5.fill_color='lightsage'
circle6=GOval(285,285,x=160,y=175)
circle6.filled=True
circle6.fill_color='tan'
circle7=GOval(305,305,x=150,y=165)
circle7.filled=True
circle7.fill_color='midnightblue'
circle8=GOval(325,325,x=140,y=155)
circle8.filled=True
circle8.fill_color='powderblue'
window.add(circle8)
window.add(circle7)
window.add(circle6)
window.add(circle5)
triangle1=GPolygon()
triangle1.add_vertex((300,230))
triangle1.add_vertex((225,340))
triangle1.add_vertex((375,340))
triangle2=GPolygon()
triangle2.add_vertex((300,215))
triangle2.add_vertex((210,350))
triangle2.add_vertex((390,350))
triangle1.filled=True
triangle1.fill_color='pink'
triangle2.filled=True
triangle2.fill_color='lightgrey'
triangle3=GPolygon()
triangle3.add_vertex((300,200))
triangle3.add_vertex((195,360))
triangle3.add_vertex((405,360))
triangle4=GPolygon()
triangle4.add_vertex((300,185))
triangle4.add_vertex((180,370))
triangle4.add_vertex((420,370))
triangle3.filled=True
triangle3.fill_color='linen'
triangle4.filled=True
triangle4.fill_color='yellow'
window.add(triangle4)
window.add(triangle3)
window.add(triangle2)
window.add(triangle1)
circle1=GOval(20,20,x=290,y=290)
circle1.filled=True
circle1.fill_color='aquamarine'
circle2=GOval(40,40,x=280,y=280)
circle2.filled=True
circle2.fill_color='aqua'
circle3=GOval(60,60,x=270,y=270)
circle3.filled=True
circle3.fill_color='darkblue'
circle4=GOval(80,80,x=260,y=260)
circle4.filled=True
circle4.fill_color='blueviolet'
window.add(circle4)
window.add(circle3)
window.add(circle2)
window.add(circle1)
polygon=GPolygon()
polygon.add_vertex((100, 60))
polygon.add_vertex((50,100))
polygon.add_vertex((40,180))
polygon.add_vertex((20,400))
polygon.add_vertex((30,550))
polygon.add_vertex((180,580))
polygon.add_vertex((400, 550))
polygon.add_vertex((550, 590))
polygon.filled=True
polygon.fill_color='salmon'
window.add(polygon)
# logo
sc101=GLabel('SC101-2020.Nov')
sc101.font='Courier-15-bold-italic'
window.add(sc101,0,window.height-sc101.height+20)
if __name__ == '__main__':
main()
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import json
from lib import xmltodict
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
SCRAMBLED = u'да'
class ReferenceBase(object):
def __init__(self, url):
self.url = url
self.data = {}
self.session = requests.session()
raw_data = self.request(self.url)
for c in raw_data:
mcast, sid, pids, crypt = c["SCR_VYHODNAYA_GRUPPA"], c["SID_TRSC"], c["REQUIRED_PIDS"], c["SHIFROVANIE"]
#print mcast, sid, crypt, crypt.strip() == SCRAMBLED
if mcast not in self.data:
self.data[mcast] = {sid: {"pids": pids.split(",") if pids else [],
"crypt": crypt.strip() == SCRAMBLED}}
else:
if sid not in self.data[mcast]:
self.data[mcast].update({sid: {"pids": pids.split(",") if pids else [],
"crypt": crypt.strip() == SCRAMBLED}})
def request(self, url):
request = self.session.get(url, verify=False)
data = request.text.encode("utf-8")
data = data[1:-1]
result = []
for _ in range(data.count('}')):
index = data.find('}')
if index == -1:
break
part = data[:index+1]
result += [json.loads(part)]
data = data[index+2:]
return result
def check(self, sencore_tss):
for ts_name in sencore_tss:
ts_data = sencore_tss[ts_name]
try:
ts_mcast = ts_data["dst_addr"].split(":")[0]
except Exception as why:
logging.exception(why)
continue
for sid in ts_data["services"]:
if sid == "count":
continue
reference = self.data.get(
ts_mcast, {}
).get(
str(sid), {}
)
if reference:
reference_sid_s = set(reference["pids"])
sencore_sid_s = set(map(lambda o: str(o), ts_data["services"][sid]["pids"].keys()))
diff = list(reference_sid_s.difference(sencore_sid_s))
sencore_tss[ts_name]["services"][sid]["pids_ok"] = ",".join(diff) if diff else "OK"
crypt_ok = reference["crypt"] == sencore_tss[ts_name]["services"][sid]["scrambled"]
if crypt_ok:
sencore_tss[ts_name]["services"][sid]["scrambled_ok"] = 0
else:
sencore_tss[ts_name]["services"][sid]["scrambled_ok"] = 1
else:
sencore_tss[ts_name]["services"][sid]["pids_ok"] = "REFERENCE_DOES_NOT_EXIST"
sencore_tss[ts_name]["services"][sid]["scrambled_ok"] = "REFERENCE_DOES_NOT_EXIST"
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__name__ = "Phoniebox"
import configparser # needed only for the exception types ?!
from ConfigParserExtended import ConfigParserExtended
import codecs
import subprocess # needed for aplay call
import os,sys
from time import sleep
from mpd import MPDClient
# get absolute path of this script
dir_path = os.path.dirname(os.path.realpath(__file__))
defaultconfigFilePath = os.path.join(dir_path,'./phoniebox.conf')
# TODO: externalize helper functions for the package. How?
def is_int(s):
""" return True if string is an int """
try:
int(s)
return True
except ValueError:
return False
def str2bool(s):
""" convert string to a python boolean """
return s.lower() in ("yes", "true", "t", "1")
def str2num(s):
""" convert string to an int or a float """
try:
return int(s)
except ValueError:
return float(s)
def find_modified_files(path,since):
modified_files = []
for root, dirs, files in os.walk(path):
for basename in files:
filename = os.path.join(path, basename)
status = os.stat(filename)
if status.st_mtime > since:
modified_files.append(filename)
return modified_files
def file_modified(filename,since):
if os.stat(filename).st_mtime > since:
return True
else:
return False
class Phoniebox(object):
def __init__(self,configFilePath=defaultconfigFilePath):
print("Using configuration file {}".format(configFilePath))
self.read_config(configFilePath)
# read cardAssignments from given card assignments file
card_assignments_file = self.get_setting("phoniebox","card_assignments_file")
self.cardAssignments = self.read_cardAssignments()
if self.get_setting("phoniebox","translate_legacy_cardassignments","bool") == True:
self.log("Translating legacy cardAssignment config from folder.conf files.",3)
legacy_cardAssignments = self.translate_legacy_cardAssignments()
self.update_cardAssignments(legacy_cardAssignments)
def log(self,msg,level=3):
""" level based logging to stdout """
log_level_map = {0:None,1:"error",2:"warning",3:"info",4:"extended",5:"debug"}
log_level = int(self.get_setting("phoniebox","log_level"))
if log_level >= level and log_level != -1:
print("{}: {}".format(log_level_map[level].upper(),msg))
def mpd_init_connection(self):
""" connect to mpd """
host = self.get_setting("mpd","host")
if host == -1:
host = "localhost"
port = self.get_setting("mpd","port")
if port == -1:
port = 6600
timeout = self.get_setting("mpd","timeout")
if timeout == -1:
timeout = 3
self.client = MPDClient()
self.client.host = host
self.client.port = port
self.client.timeout = timeout
#ret = self.mpd_connect_timeout()
if self.mpd_connect_timeout() != 0:
sys.exit()
else:
self.log("connected to MPD with settings host = {}, port = {}, timeout = {}".format(host,port,timeout),3)
def mpd_connect_timeout(self):
""" establishes the connection to MPD when disconnected """
success = False
runtime = 0
try:
self.client.disconnect()
except:
pass
while success != True and runtime <= self.client.timeout:
try:
self.client.connect(self.client.host,self.client.port)
success = True
self.log("Connected to MPD at {} on port {}.".format(self.client.host,self.client.port),5)
return 0
except:
self.log("Could not connect to MPD, retrying.",5)
sleep(0.2)
runtime += 0.2
if runtime >= self.client.timeout:
self.log("Could not connect to MPD for {}s, giving up.".format(self.client.timeout),2)
return 1
def do_second_swipe(self):
""" react to the second swipe of the same card according to settings"""
second_swipe_map = { 'default': self.do_restart_playlist,
'restart': self.do_restart_playlist,
'restart_track':self.do_restart_track,
'stop': self.do_stop,
'pause': self.do_toggle,
'noaudioplay': self.do_pass,
'skipnext': self.do_next,
}
setting_key = "second_swipe"
map_key = self.config.get("phoniebox",setting_key)
try:
second_swipe_map[map_key]()
except KeyError as e:
self.log("Unknown setting \"{} = {}\", using \"{} = default\".".format(setting_key,map_key,setting_key),5)
second_swipe_map['default']()
def do_restart_playlist(self):
""" restart the same playlist from the beginning """
# TODO: Any reason not to just start the first item in the current playlist?
self.mpd_connect_timeout()
self.set_mpd_playmode(self.lastplayedID)
self.play_mpd(self.get_cardsetting(self.lastplayedID,"uri"))
def do_restart_track(self):
""" restart currently playing track """
self.mpd_connect_timeout()
mpd_status = self.client.status()
self.set_mpd_playmode(self.lastplayedID)
# restart current track
self.client.play(mpd_status['song'])
def do_start_playlist(self,cardid):
""" restart the same playlist, eventually resume """
if self.get_cardsetting(self.lastplayedID,"resume"):
self.resume(self.lastplayedID,"save")
self.mpd_connect_timeout()
self.set_mpd_playmode(cardid)
self.play_mpd(self.get_cardsetting(cardid,"uri"))
if self.get_cardsetting(cardid,"resume"):
self.resume(cardid,"resume")
self.lastplayedID = cardid
def do_toggle(self):
""" toggle play/pause """
self.mpd_connect_timeout()
status = self.client.status()
if status['state'] == "play":
self.client.pause()
else:
self.client.play()
def do_pass(self):
""" do nothing (on second swipe with noaudioplay) """
pass
def do_next(self):
""" skip to next track or restart playlist if stopped (on second swipe with noaudioplay) """
self.mpd_connect_timeout()
status = self.client.status()
# start playlist if in stop state or there is only one song in the playlist (virtually loop)
if (status["state"] == "stop") or (status["playlistlength"] == "1"):
self.do_restart_playlist()
else:
self.client.next()
def do_stop(self):
""" do nothing (on second swipe with noaudioplay) """
self.mpd_connect_timeout()
self.client.stop()
def play_alsa(self,audiofile):
""" pause mpd and play file on alsa player """
self.mpd_connect_timeout()
self.client.pause()
# TODO: use the standard audio device or set them via phoniebox.conf
subprocess.call(["aplay -q -Dsysdefault:CARD=sndrpijustboomd " + audiofile], shell=True)
subprocess.call(["aplay -q -Dsysdefault " + audiofile], shell=True)
def play_mpd(self,uri):
""" play uri in mpd """
self.mpd_connect_timeout()
self.client.clear()
self.client.add(uri)
self.client.play()
self.log("phoniebox: playing {}".format(uri.encode('utf-8')),3)
# TODO: is there a better way to check for "value not present" than to return -1?
def get_setting(self,section,key,opt_type="string"):
""" get a setting from configFile file or cardAssignmentsFile
if not present, return -1
"""
try:
num = str2num(section)
parser = self.cardAssignments
except ValueError:
parser = self.config
try:
opt = parser.get(section,key)
except configparser.NoOptionError:
print("No option {} in section {}".format(key,section))
return -1
except configparser.NoSectionError:
print("No section {}".format(section))
return -1
if "bool" in opt_type.lower():
return str2bool(opt)
else:
try:
return str2num(opt)
except ValueError:
return opt
def get_cardsetting(self,cardid,key,opt_type="string"):
""" catches Errors """
return self.get_setting(cardid,key,opt_type)
def mpd_init_settings(self):
""" set initial mpd state:
max_volume
initial_volume """
mpd_status = self.client.status()
max_volume = self.get_setting("phoniebox","max_volume")
init_volume = self.get_setting("phoniebox","init_volume")
if max_volume == -1:
max_volume = 100 # the absolute max_volume is 100%
if init_volume == -1:
init_volume = 0 # to be able to compare
if max_volume < init_volume:
self.log("init_volume cannot exceed max_volume.",2)
init_volume = max_volume # do not exceed max_volume
if mpd_status["volume"] > max_volume:
self.client.setvol(init_volume)
def set_mpd_playmode(self,cardid):
""" set playmode in mpd according to card settings """
playmode_defaults_map = {"repeat":0,"random":0,"single":0,"consume":0}
set_playmode_map = { "repeat":self.client.repeat,
"random":self.client.random,
"single":self.client.single,
"consume":self.client.consume }
for key in set_playmode_map.keys():
# option is set if config file contains "option = 1" or just "option" without value.
playmode_setting = self.get_cardsetting(cardid,key)
if playmode_setting == -1 or playmode_setting == 1:
playmode_setting = 1
else:
playmode_setting = playmode_defaults_map[key]
# set value
set_playmode_map[key](playmode_setting)
self.log("setting mpd {} = {}".format(key,playmode_setting),5)
def resume(self,cardid,action="resume"):
""" seek to saved position if resume is activated """
self.mpd_connect_timeout()
mpd_status = self.client.status()
print(mpd_status)
if action in ["resume","restore"]:
opt_resume = self.get_cardsetting(cardid,"resume")
if opt_resume == -1 or opt_resume == 1:
resume_elapsed = self.get_cardsetting(cardid,"resume_elapsed")
resume_song = self.get_cardsetting(cardid,"resume_song")
if resume_song == -1:
resume_song = 0
if resume_elapsed != -1 and resume_elapsed != 0:
self.log("{}: resume song {} at time {}s".format(cardid,
self.get_cardsetting(cardid,"resume_song"),
self.get_cardsetting(cardid,"resume_elapsed")),5)
self.client.seek(resume_song,resume_elapsed)
elif action in ["save","store"]:
try:
self.log("{}: save state, song {} at time {}s".format(cardid,
mpd_status["song"],mpd_status["elapsed"]),5)
self.cardAssignments.set(cardid,"resume_elapsed",
mpd_status["elapsed"])
self.cardAssignments.set(cardid,"resume_song",
mpd_status["song"])
except KeyError as e:
print("KeyError: {}".format(e))
except ValueError as e:
print("ValueError: {}".format(e))
def read_cardAssignments(self):
card_assignments_file = self.config.get("phoniebox","card_assignments_file")
parser = ConfigParserExtended(allow_no_value=True)
dataset = parser.read(card_assignments_file)
if len(dataset) != 1:
raise ValueError("Config file {} not found!".format(card_assignments_file))
return parser
def update_cardAssignments(self,static_cardAssignments):
"""card_assignments_file = self.config.get("phoniebox","card_assignments_file")
parser = ConfigParserExtended(allow_no_value=True)
dataset = parser.read(card_assignments_file)
if len(dataset) != 1:
raise ValueError("Config file {} not found!".format(card_assignments_file))
# if cardAssignments is still empty, store new cardAssignments directly
# otherwise compare new values with old values and update only certain values
if hasattr(self, 'cardAssignments'):
self.debug("cardAssignments already set, updating data in memory with new data from file {}".format(card_assignments_file))
static_cardAssignments = parser"""
self.log("Updating changes in cardAssignments from disk.",3)
keep_cardsettings = ["resume_song","resume_elapsed"]
common_sections = list(set(static_cardAssignments.sections()).intersection(self.cardAssignments.sections()))
for section in common_sections:
for option in keep_cardsettings:
if self.cardAssignments.has_option(section,option):
value = self.cardAssignments.get(section,option)
static_cardAssignments.set(section,option,value)
self.log("Updating cardid {} with \"{} = {}\".".format(section,option,value),5)
# finally assign new values
self.cardAssignments = static_cardAssignments
def read_config(self,configFilePath=defaultconfigFilePath):
""" read config variables from file """
configParser = ConfigParserExtended(allow_no_value=True,interpolation=configparser.BasicInterpolation())
dataset = configParser.read(configFilePath)
if len(dataset) != 1:
raise ValueError("Config file {} not found!".format(configFilePath))
self.config = configParser
def translate_legacy_cardAssignments(self,last_translate_legacy_cardAssignments=0):
""" reads the card settings data from the old scheme an translates them """
shortcuts_path = self.get_setting("phoniebox","shortcuts_path")
audiofolders_path = self.get_setting("phoniebox","audiofolders_path")
if shortcuts_path != -1:
configParser = ConfigParserExtended()
shortcut_files = [f for f in os.listdir(shortcuts_path) if os.path.isfile(os.path.join(shortcuts_path,f)) and is_int(f)]
# filename is the cardid
for filename in shortcut_files:
with open(os.path.join(shortcuts_path,filename)) as f:
uri = f.readline().strip().decode('utf-8')
# add default settings
if not filename in configParser.sections():
self.log("Adding section {} to cardAssignments".format(filename),5)
configParser.add_section(filename)
configParser[filename] = self.config["default_cardsettings"]
configParser.set(filename,"cardid",filename)
configParser.set(filename,"uri",uri)
# translate and add folder.conf settings if they contradict default_cardsettings
cardsettings_map = {"CURRENTFILENAME":None,
"ELAPSED":"resume_elapsed",
"PLAYSTATUS":None,
"RESUME":"resume",
"SHUFFLE":"random",
"LOOP":"repeat"}
folderconf = os.path.join(audiofolders_path,uri,"folder.conf")
if os.path.isfile(folderconf) and file_modified(folderconf,last_translate_legacy_cardAssignments):
with open(folderconf) as f:
lines = f.readlines()
cardsettings_old = dict([l.strip().replace('"','').split("=") for l in lines])
for key in cardsettings_old.keys():
if cardsettings_map[key] != None:
# ignore 0 and OFF values, drop settings that have None in cardsettings_map
if key != "ELAPSED":
if cardsettings_old[key] != "0" and cardsettings_old[key] != "OFF":
configParser.set(filename,cardsettings_map[key],"1")
else:
configParser.set(filename,cardsettings_map[key],"0")
else:
try:
elapsed_val = float(cardsettings_old[key])
except ValueError:
elaped_val = 0
configParser.set(filename,cardsettings_map[key],str(elapsed_val))
return configParser
def write_new_cardAssignments(self):
""" updates the cardsettings with according to playstate """
card_assignments_file = self.config.get("phoniebox","card_assignments_file")
self.log("Write new card assignments to file {}.".format(card_assignments_file),3)
with codecs.open(card_assignments_file,'w','utf-8') as f:
self.cardAssignments.write(f)
def print_to_file(self,filename,string):
""" simple function to write a string to a file """
with codecs.open(filename,'w','utf-8') as f:
f.write(string)
if __name__ == "__main__":
print("This module is not to be run! Use \"from Phoniebox import Phoniebox\" instead!")
else:
print("Phoniebox imported. Use \"box = Phoniebox(configFile)\" to get it working.")
|
python
|
from datetime import timedelta
from django.db import models
from django.utils import timezone
import time
from .config import YEKPAY_SIMULATION
class TransactionManager(models.Manager):
""" Manager for :class:`Transaction` """
def create_transaction(self, transaction_data):
transaction_data["status"] = "PENDING"
transaction_data["simulation"] = YEKPAY_SIMULATION
created_transaction = self.create(**transaction_data)
created_transaction.order_number = self.generate_uniq_order_number()
created_transaction.save(update_fields=["order_number"])
return created_transaction
def generate_uniq_order_number(self):
order_number = self._generate_order_number()
while self.filter(order_number=order_number).exists():
order_number += 1
return order_number
def _generate_order_number(self):
return int(round(time.time()))
def get_old_pending_transactions(self):
return self.filter(
created_at__lt=timezone.now() - timedelta(minutes=30),
status="PENDING",
)
|
python
|
from Utility.Types.Reconstruction import Reconstruction
class Background_Reconstruction(Reconstruction):
def __init__(self, cams, points, image_folder_path, sparse_reconstruction_type):
super(Background_Reconstruction, self).__init__(
cams,
points,
image_folder_path,
sparse_reconstruction_type)
self.ground_mesh = None
def add_ground_mesh(self, mesh):
self.ground_mesh = mesh
def get_ground_mesh(self):
return self.ground_mesh
|
python
|
from .approach import Approach
from .challenge import Challenge
from .review_history import ReviewHistory
from .submission import Submission
from .task import Task
from .team import Team
from .team_invitation import TeamInvitation
__all__ = ['Approach', 'Challenge', 'ReviewHistory', 'Submission', 'Task', 'Team', 'TeamInvitation']
|
python
|
# Telegram
# TELEGRAM
import telegram
from telegram import ReplyKeyboardMarkup
from telegram.error import NetworkError, Unauthorized
# ACCESO A DATOS EN SERVIDORES (usado por telegram)
import json
import requests
import config
import emailUtil
import Datos
# mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
# FUNCIONES TELERAM
# mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
#URL de la API de TELEGRAM
URL = "https://api.telegram.org/bot{}/".format(config.TOKEN)
chat_id = 0
update_id = None
user_keyboard = [['/info','/fig'],['/email', '/txt'],['/save','/ayuda'],['/deleteOld','/deleteNew']]
user_keyboard_markup = ReplyKeyboardMarkup(user_keyboard, one_time_keyboard=True)
""" poner en marcha el bot """
telegram_bot_experimento_bio = telegram.Bot(config.TOKEN)
#comandos a mostrar al pedir '/ayuda'
listaComandos = ["/ayuda - Mostrar esta Ayuda", \
"/email - envia datos completos por email",\
"/info - Mostrar datos actuales", \
"/txt - envia datos completos a telegram", \
"/fig - Grafico de Evolucion",\
"/deleteOld - Borra los 15 primeros datos",\
"/deleteNew - Borra los 15 ultimos datos",\
"/save - Realiza una copia de seguridad","\n"]
FLAG_enviar_PNG = False #controla el proceso de envio de grafica al usuario
FLAG_enviar_TXT = False #controla el proceso de envio de fichero de datos al usuario
FLAG_delete_old = False #control de borrado de los primeros datos tomados
FLAG_delete_new = False #control de borrado de los ultimos datos tomados
FLAG_pruebas = False #Para hacer pruebas con telegram (sin uso)
FLAG_enviar_INFO = False
FLAG_save_DATA = False
FLAG_send_DATA = False
#bucle para generar el texto encadenando todos los comandos de ayuda.
#Para el mensaje que se envia por telegram al pedir '/ayuda'
listaComandosTxt = ""
for comando in listaComandos:
listaComandosTxt += comando+"\n"
def get_url(url):
'''
Funcion de apoyo a la recogida de telegramas,
Recoge el contenido desde la url de telegram
'''
response = requests.get(url)
content = response.content.decode("utf8")
return content
def send_picture(picture):
url = URL+"sendPhoto";
files = {'photo': open(picture, 'rb')}
data = {'chat_id' : chat_id}
r= requests.post(url, files=files, data=data)
def send_document(doc):
url = URL+"sendDocument";
files = {'document': open(doc, 'rb')}
data = {'chat_id' : chat_id}
r= requests.post(url, files=files, data=data)
#-----------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------
def send_message(text):
'''
Funcion para enviar telergamas atraves de la API
'''
try:
url = URL + "sendMessage?text={}&chat_id={}".format(text, chat_id)
#print("url >> ",url)
get_url(url)
except:
print("ERROR de envio")
#-----------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------
def atenderTelegramas():
'''
Funcion principal de la gestion de telegramas.
Los atiende y procesa, ejecutando aquellos que son ordenes directas.
Solicita la 'ayuda' de otras funciones para aquellos comandos
complejos que contiene parametros
'''
global text, chat_id, chat_time, comando, chat_user_name
global FLAG_enviar_PNG, FLAG_pruebas, FLAG_enviar_TXT, FLAG_delete_old, FLAG_delete_new, FLAG_enviar_INFO,FLAG_save_DATA,FLAG_send_DATA
global update_id
try:
# Request updates after the last update_id
for update in telegram_bot_experimento_bio.get_updates(offset=update_id, timeout=0): #timeout=5, si nos da problemas con internet lento
update_id = update.update_id +1
if update.message: # porque se podrian recibir updates sin mensaje...
comando = update.message.text #MENSAJE_RECIBIDO
chat_time = update.message.date
user = update.message.from_user #USER_FULL
chat_id = int(update.message.from_user.id)
chat_user_name = user.first_name #USER_REAL_NAME
usuario = chat_user_name
try:
# para DEBUG, imprimimos lo que va llegando
print (str(chat_time) + " >>> " + str(chat_id) +": " + usuario + " --> " + comando)
if update.message.entities[0].type == "bot_command" and update.message.text == "/start":
update.message.reply_text("Bienvenido a Experimento Bio v1.1", reply_markup=user_keyboard_markup)
# =============== INTERPRETAR LOS COMANDOS QUE LLEGAN Y ACTUAR EN CONSECUENCIA ===============
if comando == "/send" and (chat_id == config.ADMIN_USER or config.ADMIN_USER == None): #decidir quien puede enviar correos
FLAG_send_DATA = True
return
if comando == "/save" and (chat_id == config.ADMIN_USER or config.ADMIN_USER == None): #solo el administrador puede forzar el salvado de datos no programado
FLAG_save_DATA = True
return
# Lista de comandos para usuarios basicos (clientes)
if comando == "/ayuda":
send_message (listaComandosTxt)
return
if comando == "/info":
FLAG_enviar_INFO = True
return
if comando == "/fig":
FLAG_enviar_PNG = True
return
if comando == "/txt":
FLAG_enviar_TXT = True
return
if comando == "/deleteOld" and (chat_id == config.ADMIN_USER or config.ADMIN_USER == None):
FLAG_delete_old = True
return
if comando == "/deleteNew" and (chat_id == config.ADMIN_USER or config.ADMIN_USER == None):
FLAG_delete_new = True
return
except:
print ("----- ERROR ATENDIENDO TELEGRAMAS ----------------------")
if chat_id != 0:
#ante cualquier comando desconocido devolvemos 'ok', para despistar a los que intenten 'probar suerte'
send_message ("OK")
except:
pass
#-----------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------
|
python
|
# Andrew Riker
# CS1400 - LW2 XL
# Assignment #04
import math
# user enters length of sides
length = eval(input("Enter length of the polygon sides: "))
# user enters number of sides
numOfSides = eval(input("Enter the number of sides the polygon has: "))
# calculate the area of the polygon
area = (numOfSides * math.pow(length, 2)) / (4 * (math.tan(math.pi / numOfSides)))
# print the area of polygon
print("The area of the polygon is:", str(round(area, 5)))
|
python
|
# flake8: noqa
import geonomics as gnx
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
from mpl_toolkits.axes_grid1 import make_axes_locatable
# define number of individuals to plot tracks for, and number of timesteps for
# tracks
n_individs = 20
n_timesteps = 5000
# make figure
fig = plt.figure(figsize=(9.25, 4.5))
gs = gridspec.GridSpec(1, 2, width_ratios=[1, 1.065])
# make model
mod = gnx.make_model(gnx.read_parameters_file(('/home/drew/Desktop/stuff/berk/'
'research/projects/sim/'
'methods_paper/make_movesurf_img/'
'movesurf_img_params.py')))
# plot the movement_surface
ax1 = plt.subplot(gs[0])
mod.plot_movement_surface(0, 'chist', ticks=False)
ax1.set_title('mixture histograms', fontsize=20)
# plot tracks
ax2 = plt.subplot(gs[1])
im = plt.pcolormesh(np.linspace(0, 7, 8), np.linspace(0, 7, 8),
mod.land[0].rast, cmap='plasma')
#gnx.help.plot_movement(mod.comm[0], mod.land, n_timesteps,
# 0, mod.params, subset_spp=n_individs-1,
# ticks=False, color='gray', color_by_individ=False,
# increasing_linewidth=False, alpha=0.5,
# include_start_points=False)
gnx.help.plot_movement(mod.comm[0], mod.land, n_timesteps,
0, mod.params, subset_spp=1, ticks=False,
increasing_linewidth=False, alpha=0.7, color='black',
include_start_points=False)
divider = make_axes_locatable(ax2)
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = plt.colorbar(im, cax=cax)
cbar.set_label('conductance', rotation=270, labelpad=25, y=0.5, fontsize=18)
cbar.ax.tick_params(labelsize=15)
#ax2.set_title('Sample movement tracks\nfor %i individuals' % n_individs)
ax2.set_title('movement tracks', fontsize=20)
fig.tight_layout()
plt.show()
#fig.savefig(('/home/drew/Desktop/stuff/berk/research/projects/sim/'
# 'methods_paper/img/final/move_surf.pdf'),
# format='pdf', dpi=1000)
|
python
|
#!/usr/bin/env python
import paramiko
import sys
hostname = sys.argv[1]
port = 22
usr = 'user'
pwd = 'pass'
try:
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
client.connect(hostname, port=port, username=usr, password=pwd)
except paramiko.SSHException as e:
raise
|
python
|
#!/usr/bin/env python3
# Imports
import prometheus_client
import traceback
import speedtest
import threading
import argparse
import time
# Arguments
parser = argparse.ArgumentParser(description='Prometheus exporter where it reports speedtest statistics based on user\'s preference.')
parser.add_argument('--web.listen-address', action='store', dest='listen_addr', help='Specify host and port for Prometheus to use to display metrics for scraping.')
parser.add_argument('--servers', action='store', dest='servers', help='Specific a or list of server ID(s) by comma to perform speedtests with.')
parser.add_argument('--source', action='store', dest='source', help='Specify source IP for speedtest to use to perform test.')
parser.add_argument('--interval', action='store', dest='interval', help='How often in seconds the tests should be performed.')
# Attributes
metrics = {
'speedtest_ping': prometheus_client.Gauge('speedtest_ping', 'Ping time in milliseconds.', ['server_name', 'server_loc', 'server_id']),
'speedtest_download': prometheus_client.Gauge('speedtest_download', 'Network download speed in Mbps.', ['server_name', 'server_loc', 'server_id']),
'speedtest_upload': prometheus_client.Gauge('speedtest_upload', 'Network upload speed in Mbps.', ['server_name', 'server_loc', 'server_id'])
}
# Classes
class UpdateMetrics(threading.Thread):
def __init__(self, _servers, _source, _interval):
threading.Thread.__init__(self)
self.servers = _servers
self.interval = _interval
def run(self):
while True:
try:
print('INFO: Updating metrics...', flush=True)
# Perform test
tester = speedtest.Speedtest()
tester.get_servers(self.servers)
tester.get_best_server()
tester.download()
tester.upload()
result = tester.results.dict()
# Convert bytes to Mbps
download_speed = result['download'] / 1000000.0
upload_speed = result['upload'] / 1000000.0
# Update metrics
metrics['speedtest_ping'].labels(server_name=result['server']['name'], server_loc=result['server']['country'], server_id=result['server']['id']).set(result['ping'])
metrics['speedtest_download'].labels(server_name=result['server']['name'], server_loc=result['server']['country'], server_id=result['server']['id']).set(download_speed)
metrics['speedtest_upload'].labels(server_name=result['server']['name'], server_loc=result['server']['country'], server_id=result['server']['id']).set(upload_speed)
print('INFO: Metrics updated!', flush=True)
except Exception:
# Set metrics to -1
metrics['speedtest_ping'].labels(server_name='', server_loc='', server_id=0).set(-1)
metrics['speedtest_download'].labels(server_name='', server_loc='', server_id=0).set(-1)
metrics['speedtest_upload'].labels(server_name='', server_loc='', server_id=0).set(-1)
print('ERROR: Unable to update metrics! Reason:\n{}'.format(traceback.print_exc()))
# Wait
time.sleep(self.interval)
# Main
if __name__ == '__main__':
print('INFO: Loading exporter...')
options = parser.parse_args()
host = '0.0.0.0'
port = 9100
servers = []
source = None
interval = 900
try:
if options.listen_addr:
host = options.listen_addr.split(':')[0]
port = int(options.listen_addr.split(':')[-1])
if options.servers:
if ',' in options.servers:
for server in options.servers.split(','):
servers.append(int(server))
if options.source:
source = options.source
if options.interval:
interval = int(options.interval)
except Exception:
print('ERROR: Invalid argument input! Reason:\n{}'.format(traceback.print_exc()))
print('INFO: Exporter ready!')
UpdateMetrics(_servers=servers, _source=source, _interval=interval).start()
prometheus_client.start_http_server(port, host)
|
python
|
import logging
import os
import signal
import socket
import time
from contextlib import contextmanager
from subprocess import Popen
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
class UserObject:
def predict(self, X, features_names):
logging.info("Predict called")
return X
class MicroserviceWrapper:
def __init__(self, app_location, envs={}, tracing=False):
self.app_location = app_location
self.env_vars = self._env_vars(envs)
self.cmd = self._get_cmd(tracing)
def _env_vars(self, envs):
env_vars = dict(os.environ)
s2i_env_file = os.path.join(self.app_location, ".s2i", "environment")
with open(s2i_env_file) as fh:
for line in fh.readlines():
line = line.strip()
if line:
key, value = line.split("=", 1)
key, value = key.strip(), value.strip()
if key and value:
env_vars[key] = value
env_vars.update(envs)
env_vars.update(
{
"PYTHONUNBUFFERED": "x",
"PYTHONPATH": self.app_location,
"APP_HOST": "127.0.0.1",
"PREDICTIVE_UNIT_HTTP_SERVICE_PORT": "9000",
"PREDICTIVE_UNIT_GRPC_SERVICE_PORT": "5000",
"PREDICTIVE_UNIT_METRICS_SERVICE_PORT": "6005",
"PREDICTIVE_UNIT_METRICS_ENDPOINT": "/metrics-endpoint",
}
)
return env_vars
def _get_cmd(self, tracing):
cmd = (
"seldon-core-microservice",
self.env_vars["MODEL_NAME"],
"--service-type",
self.env_vars["SERVICE_TYPE"],
)
if "PERSISTENCE" in self.env_vars:
cmd += ("--persistence", self.env_vars["PERSISTENCE"])
if tracing:
cmd += ("--tracing",)
return cmd
def __enter__(self):
try:
logging.info(f"starting: {' '.join(self.cmd)}")
self.p = Popen(
self.cmd, cwd=self.app_location, env=self.env_vars, preexec_fn=os.setsid
)
time.sleep(1)
self._wait_until_ready()
return self.p
except Exception:
logging.error("microservice failed to start")
raise RuntimeError("Server did not bind to 127.0.0.1:5000")
@retry(wait=wait_fixed(4), stop=stop_after_attempt(10))
def _wait_until_ready(self):
logging.debug("=== trying again")
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
r1 = s1.connect_ex(("127.0.0.1", 9000))
s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
r2 = s2.connect_ex(("127.0.0.1", 6005))
s3 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
r3 = s3.connect_ex(("127.0.0.1", 5000))
if r1 != 0 or r2 != 0 or r3 != 0:
raise EOFError("Server not ready yet")
logging.info("microservice ready")
def _get_return_code(self):
self.p.poll()
return self.p.returncode
def __exit__(self, exc_type, exc_val, exc_tb):
if self.p:
group_id = os.getpgid(self.p.pid)
# Kill the entire process groups (including subprocesses of self.p)
os.killpg(group_id, signal.SIGKILL)
|
python
|
__author__ = 'alex'
import os
import subprocess
import logging
from mountn.utils import lsblk, SubprocessException
from mountn.gui import gui
from locale import gettext as _
class TcplayDevice(object):
class Item(object):
def __init__(self, plugin, **kwargs):
self.plugin = plugin
self.active = kwargs.get("active", False)
self.device = kwargs.get("device", None)
self.name = kwargs.get("name", None)
self.uuid = kwargs.get("uuid", "")
def __str__(self):
return os.path.basename(self.device)
@property
def saved(self):
conf = self.plugin.settings.setdefault("items",[])
return self.uuid in conf
@property
def actions(self):
actions = []
if self.active:
actions.append((self.deactivate, _("Unmount")))
else:
actions.append((self.activate, _("Mount")))
if self.saved:
actions.append((self.unsave, _("Remove favourite")))
else:
actions.append((self.save, _("Add favourite")))
return actions
def activate(self):
cmd = [TcplayDevice.PKEXEC_BIN, TcplayDevice.TCPLAY_BIN, "--map="+self.name, "--device="+self.device]
password = gui.get_password(None, _("Enter password for %s:") % self.name, save_id="tcplay:%s" % self.uuid)
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(password+"\r")
if proc.returncode != 0:
logging.error(stderr)
raise SubprocessException("Process terminated with status %d" % proc.returncode, command=" ".join(cmd), retcode=proc.returncode, errout=stderr, stdout=stdout)
self.active = True
return True
def deactivate(self):
cmd = [TcplayDevice.PKEXEC_BIN, TcplayDevice.DMSETUP_BIN, "remove", self.name]
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
logging.error(stderr)
raise SubprocessException("Process terminated with status %d" % proc.returncode, command=" ".join(cmd), retcode=proc.returncode, errout=stderr, stdout=stdout)
self.active = False
return True
def save(self):
conf = self.plugin.settings.setdefault("items",[])
if self.uuid not in conf:
conf.append(self.uuid)
def unsave(self):
conf = self.plugin.settings.setdefault("items",[])
conf.remove(self.uuid)
PKEXEC_BIN = "pkexec"
TCPLAY_BIN = "tcplay"
DMSETUP_BIN = "dmsetup"
name = "TCPlay-Devices"
def __init__(self, settings):
self.settings = settings
@property
def items(self):
items = {}
for device in lsblk():
fname = os.path.basename(device["NAME"])
uuid = self._get_uuid(device)
if device["TYPE"] == "crypt" and fname.startswith("tc_"):
items[uuid] = TcplayDevice.Item(self, device=device["NAME"], name=os.path.basename(fname), uuid=uuid, active=True)
elif device["TYPE"] == "part" and device["MOUNTPOINT"] == "":
items[uuid] = TcplayDevice.Item(self, device=device["NAME"], name="tc_%s"%fname, uuid=uuid, active=False)
return items.values()
def _get_uuid(self, device):
ATTRS = ("PARTUUID", "WSN")
uuid = ""
for attr in ATTRS:
uuid = device.get(attr)
if uuid:
return uuid
if "PARENT" in device:
return self._get_uuid(device["PARENT"])
else:
return None
|
python
|
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from numpy import pi
qreg_q = QuantumRegister(3, 'q')
creg_c = ClassicalRegister(3, 'c')
circuit = QuantumCircuit(qreg_q, creg_c)
circuit.h(qreg_q[1])
circuit.cx(qreg_q[1], qreg_q[2])
circuit.barrier(qreg_q[1], qreg_q[2], qreg_q[0])
circuit.cx(qreg_q[0], qreg_q[1])
circuit.h(qreg_q[0])
circuit.barrier(qreg_q[2], qreg_q[0], qreg_q[1])
circuit.measure(qreg_q[0], creg_c[0])
circuit.measure(qreg_q[1], creg_c[1])
circuit.barrier(qreg_q[2], qreg_q[0], qreg_q[1])
circuit.cx(qreg_q[1], qreg_q[2])
circuit.cz(qreg_q[0], qreg_q[2])
|
python
|
from mmdet.models.necks.fpn import FPN
from .second_fpn import SECONDFPN
from .imvoxelnet import ImVoxelNeck, KittiImVoxelNeck, NuScenesImVoxelNeck
__all__ = ['FPN', 'SECONDFPN', 'ImVoxelNeck', 'KittiImVoxelNeck', 'NuScenesImVoxelNeck']
|
python
|
import cv2
face_cascade = cv2.CascadeClassifier("./haarcascade_frontalface_default.xml")
img = cv2.imread("face1.jpg")
gray_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
faces= face_cascade.detectMultiScale(gray_img, scaleFactor = 1.15, minNeighbors=5)
print(type(faces))
print(faces)
# for x,y,w,h in faces:
# print("x:",x)
# print("y:",y)
# print("w:",w)
# print("h:",h)
# img = cv2.rectangle(img,(x,y),(x+w,y+h),(0,0,255),3)
x,y,w,h = faces[0]
img = cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),3)
cv2.imshow("Face",img)
cv2.waitKey(0) # 0 : Closes as soon as we press any key
cv2.destroyAllWindows()
|
python
|
# Copyright 2018 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
PATH = '.messaging.internetofthings.ibmcloud.com:1883/api/v0002/device/types/'
def main(dict):
iot_org_id = dict['iot_org_id']
device_id = dict['device_id']
device_type = dict['device_type']
api_token = dict['api_token']
requests.post('http://' + iot_org_id + PATH + device_type +
'/devices/' + device_id + '/events/query',
headers={'Content-Type': 'application/json'},
json={
'payload': dict['payload'],
'client': dict['client'],
'language': dict['language']},
auth=('use-token-auth', api_token))
return {'msg': dict['msg']['text']}
|
python
|
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name='pytorch_custom',
version='0.0dev',
author='Alexander Soare',
packages=['pytorch_custom'],
url='https://github.com/alexander-soare/PyTorch-Custom',
license='Apache 2.0',
description='My own miscellaneous helpers for pytorch',
install_requires=[
'pandas',
'matplotlib',
'tqdm',
'numpy',
'scikit-learn',
],
)
|
python
|
'''
Copyright Hackers' Club, University Of Peradeniya
Author : E/13/181 (Samurdhi Karunarathne)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at *
http://www.apache.org/licenses/LICENSE-2.0 *
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
s=raw_input()
a=s.count('A')
d=s.count('D')
x=s.count('X')
y=s.count('Y')
p=s.count('P')
r=s.count('R')
value=min(a,d,x,y,p,r)
print value
|
python
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hustle Cloudlab Repeatable Experiment Profile
Default behavior:
By default, this uses a c220g5 with 100GB of storage and runs experiments at scale factor 1.
Numbered experiments will not be run unless provided with one or more arguments to use.
A common argument can be provided that will precede all per-experiment arguments.
Storage size may need to be increased for larger scale factors.
Instructions:
No additional instructions needed. Remember to access experiment results at: /mydata/results
"""
import geni.portal as portal
import geni.rspec.pg as pg
import json
try:
import urllib.parse as url_parser
except ImportError:
import urllib as url_parser
pc = portal.Context()
pc.defineParameter("hardware", "Hardware (Default: c220g5)", portal.ParameterType.STRING, "c220g5")
pc.defineParameter("storage", "Storage Size (Default: 100GB)", portal.ParameterType.STRING, "100GB")
pc.defineParameter("scale_factor", "SSB Scale Factor (Default: 1)", portal.ParameterType.INTEGER, 1)
pc.defineParameter("common_args",
"Common Experiment Args (Default: \"ssb hash-aggregate\", replace with \"skip\" if not in use.)",
portal.ParameterType.STRING, "ssb hash-aggregate")
pc.defineParameter("experiment_1_args", "Experiment 1 Args (Default: \"skip\")", portal.ParameterType.STRING, "skip")
pc.defineParameter("experiment_2_args", "Experiment 2 Args (Default: \"skip\")", portal.ParameterType.STRING, "skip")
pc.defineParameter("experiment_3_args", "Experiment 3 Args (Default: \"skip\")", portal.ParameterType.STRING, "skip")
pc.defineParameter("experiment_4_args", "Experiment 4 Args (Default: \"skip\")", portal.ParameterType.STRING, "skip")
pc.defineParameter("experiment_5_args", "Experiment 5 Args (Default: \"skip\")", portal.ParameterType.STRING, "skip")
params = portal.context.bindParameters()
'''
c220g5 224 nodes (Intel Skylake, 20 core, 2 disks)
CPU Two Intel Xeon Silver 4114 10-core CPUs at 2.20 GHz
RAM 192GB ECC DDR4-2666 Memory
Disk One 1 TB 7200 RPM 6G SAS HDs
Disk One Intel DC S3500 480 GB 6G SATA SSD
NIC Dual-port Intel X520-DA2 10Gb NIC (PCIe v3.0, 8 lanes)
NIC Onboard Intel i350 1Gb
Note that the sysvol is the SSD, while the nonsysvol is the 7200 RPM HD.
We almost always want to use the sysvol.
'''
rspec = pg.Request()
node = pg.RawPC("node")
node.hardware_type = params.hardware
bs = node.Blockstore("bs", "/mydata")
bs.size = params.storage
bs.placement = "sysvol"
# explicitly copy the needed params for better readability
out_params = {
"hardware": params.hardware,
"storage": params.storage,
"scale_factor": params.scale_factor,
"common_args": params.common_args,
"experiment_1_args": params.experiment_1_args,
"experiment_2_args": params.experiment_2_args,
"experiment_3_args": params.experiment_3_args,
"experiment_4_args": params.experiment_4_args,
"experiment_5_args": params.experiment_5_args,
}
enc_str = url_parser.quote_plus((json.dumps(out_params, separators=(',', ':'))))
execute_str = \
"sudo touch /mydata/params.json;" + \
"sudo chmod +777 /mydata/params.json;" + \
"echo " + enc_str + " > /mydata/params.json;" + \
"sudo chmod +777 /local/repository/scripts/cloudlab/cloudlab_setup.sh;" + \
"/local/repository/scripts/cloudlab/cloudlab_setup.sh " + str(params.scale_factor) + ";" + \
"sudo chmod +777 /mydata/repo/scripts/cloudlab/cloudlab.py;" + \
"python3 /mydata/repo/scripts/cloudlab/cloudlab.py >> /mydata/report.txt 2>&1;"
node.addService(pg.Execute(shell="bash", command=execute_str))
rspec.addResource(node)
pc.printRequestRSpec(rspec)
|
python
|
import datetime
import logging
import random
from GameParent import Game
from GameParent import SetupFailure, SetupSuccess
logger = logging.getLogger(__name__)
handler = logging.FileHandler('../logs/{}.log'.format(str(datetime.datetime.now()).replace(' ', '_').replace(':', 'h', 1).replace(':', 'm').split('.')[0][:-2]))
formatter = logging.Formatter('%(asctime)s::%(levelname)s::%(name)s::%(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
class GameObject(Game):
"""
Implements Russian Roulette
"""
@staticmethod
def get_game_name():
return "RussianRoulette"
@staticmethod
def how_to_play():
return "RussianRoulette is a game played where on each turn the users spins a virtual chamber where 1 in 6 bullets are filled. The user fires and lives or dies. If they live, then they pass the gun to the next user. The process continues until the gun is discharged. If you invoke this game with an integer argument, that integer will determine the amount of chambers present in the gun. If you invoke this with a boolean that toggles whether or not last man standing is enabled or not."
@staticmethod
def get_game_short_name():
return "RR"
async def setup(self, args):
self.__shots = 0
self.__gun = 6
self.__last_man_standing = True
self.__current_turn_index = 0
logger.info('Setting up a RussianRoulette game...')
if len(args) == 2:
if (type(args[0]) == bool and type(args[1]) == int) or (type(args[0]) == int and type(args[1]) == bool):
if type(args[0]) == bool:
self.__last_man_standing = args[0]
else:
self.__gun = args[0]
if type(args[1]) == bool:
self.__last_man_standing = args[1]
else:
self.__gun = args[1]
else:
logger.debug('Could not setup game, invalid arguments')
return SetupFailure(f'**Command \'play {self.get_game_short_name()}\' Usage: **`>play {self.get_game_short_name()} [users-to-play, ...] (number_of_empty_chambers=5 (int)) (last_man_standing=false (boolean))`')
elif len(args) == 1:
if type(args[0]) == bool or type(args) == int:
if type(args[0]) == bool:
self.__last_man_standing = args[0]
else:
self.__gun = args[0]
else:
logger.debug('Could not setup game, invalid arguments')
return SetupFailure(f'**Command \'play {self.get_game_short_name()}\' Usage: **`>play {self.get_game_short_name()} [users-to-play, ...] (number_of_empty_chambers=5 (int)) (last_man_standing=false (boolean))`')
elif len(args) > 0 and (len(args) == 1 and args[0].lower() == 'help'):
logger.debug('Could not setup game, invalid arguments or user requested help')
return SetupFailure(f'**Command \'play {self.get_game_short_name()}\' Usage: **`>play {self.get_game_short_name()} [users-to-play, ...] (number_of_empty_chambers=5) (last_man_standing=False)`')
elif len(self.players) < 2:
logger.debug('Could not setup game, user provided too few users to play')
return SetupFailure('You can\'t play RussianRoulette by yourself.')
if self.__gun < 1 or self.__gun > 1000:
logger.debug('Could not setup game, user provided too big playfield')
return SetupFailure('Invalid gun size.')
logger.debug('Passed standard checks setting up turn...')
random.shuffle(self.players)
self.__current_turn_index = 0
await self.channel.send("Playing with a gun with {} chambers, {}.".format(self.__gun, "last man standing" if self.__last_man_standing else "one bullet"))
pidx = 0
for player in self.players:
if pidx == self.__current_turn_index:
await self.channel.send("<@{0}>, you go first! Good luck!".format(player.id))
else:
await self.channel.send("<@{0}>, let\'s see what happens...".format(player.id))
pidx += 1
await self.show()
return SetupSuccess(self)
async def move(self, args, player):
logger.debug('Checking turn...')
if player != self.players[self.__current_turn_index]:
await self.channel.send('It is not your turn currently.')
return
self.__shots += 1
logger.debug("Getting number...")
if random.randint(1, self.__gun) == self.__gun // 2:
logger.debug("Will be a kill shot, sending message")
# Oh no!
await self.channel.send("**{0}** :skull::boom::gun:".format(self.get_current_player().name))
if not self.__last_man_standing:
logger.debug("Clearing game...")
await self.end_game()
logger.debug("Sending meta-data...")
await self.channel.send("**{0}** looses! It took {1} shots!".format(self.get_current_player().name, self.__shots))
else:
logger.debug("removing player and updating index")
self.players.remove(player)
if len(self.players) == 1:
logger.debug("Clearing game...")
await self.end_game()
logger.debug("Sending meta-data...")
await self.channel.send("**{0}** wins! It took {1} shots!".format(self.players[0].name, self.__shots))
else:
self.__current_turn_index = (self.__current_turn_index - 1) % len(self.players)
logger.debug("Calling next turn...")
self.next_turn()
logger.debug("Showing board...")
await self.show()
else:
logger.debug("Shot not lethal, click! Sending message")
await self.channel.send("**{0}** :sunglasses::gun: *click*".format(self.get_current_player().name))
logger.debug("Calling next turn...")
self.next_turn()
logger.debug("Showing board...")
await self.show()
def next_turn(self):
self.__current_turn_index = (self.__current_turn_index + 1) % len(self.players)
def get_current_player(self):
return self.players[self.__current_turn_index]
async def show(self):
board = "**{0}** :triumph::gun:".format(self.get_current_player().name)
await self.channel.send(board)
|
python
|
import torch.nn as nn
import torch
from .initModel import initModel
import torch.nn.functional as F
from torch.autograd import Variable
import codecs
import os
import json
class simplE(initModel):
def __init__(self, config):
super(simplE, self).__init__(config)
self.entHeadEmbedding = nn.Embedding(self.config.entTotal, self.config.embedding_dim)
self.entTailEmbedding = nn.Embedding(self.config.entTotal, self.config.embedding_dim)
self.relEmbedding = nn.Embedding(self.config.relTotal, self.config.embedding_dim)
self.relInverseEmbedding = nn.Embedding(self.config.relTotal, self.config.embedding_dim)
self.criterion = nn.Softplus()
self.batchSize = self.config.batchSize
self.init()
def init(self):
nn.init.xavier_uniform_(self.entHeadEmbedding.weight.data)
nn.init.xavier_uniform_(self.entTailEmbedding.weight.data)
nn.init.xavier_uniform_(self.relEmbedding.weight.data)
nn.init.xavier_uniform_(self.relInverseEmbedding.weight.data)
def loss(self, score_pos, score_neg):
y = Variable(torch.Tensor([-1]))
if self.config.cuda:
y = y.cuda()
#softplus
loss1 = torch.sum(self.criterion(-score_pos) + self.criterion(score_neg))
return loss1
def pos_neg_score(self,score):
pos_score = score[:self.batchSize]
neg_score = score[self.batchSize:].view(self.batchSize, -1)
neg_score = torch.mean(neg_score,dim=1)
pos_score = torch.clamp(pos_score, min=-20, max=20)
neg_score = torch.clamp(neg_score, min=-20, max=20)
return pos_score, neg_score
def forward(self, batch):
self.batchSize = batch.shape[0]//(1 + self.config.negativeSize * 2)
h = batch[:, 0]
t = batch[:, 1]
r = batch[:, 2]
emb_h_as_h = self.entHeadEmbedding(h)
emb_t_as_t = self.entTailEmbedding(t)
emb_r = self.relEmbedding(r)
emb_h_as_t = self.entTailEmbedding(h)
emb_t_as_h = self.entHeadEmbedding(t)
emb_r_inv = self.relInverseEmbedding(r)
score = torch.sum((emb_h_as_h * emb_r * emb_t_as_t + emb_h_as_t * emb_r_inv * emb_t_as_h)/2, -1)
score = self.pos_neg_score(score)
return score
def predict(self, h, r, t):
emb_h_as_h = self.entHeadEmbedding(h)
emb_t_as_t = self.entHeadEmbedding(t)
emb_r = self.relEmbedding(r)
emb_h_as_t = self.entTailEmbedding(h)
emb_t_as_h = self.entHeadEmbedding(t)
emb_r_inv = self.relInverseEmbedding(r)
score = torch.sum(1/2 * (emb_h_as_h * emb_r * emb_t_as_t + emb_h_as_t * emb_r_inv * emb_t_as_h), -1)
score = torch.clamp(score, min=-20, max=20)
return score
def save_embedding(self, emb_path, prefix):
ent_head_path = os.path.join(emb_path, "simplE_head_entity{}.embedding".format(prefix))
ent_tail_path = os.path.join(emb_path, "simplE_tail_entity{}.embedding".format(prefix))
rel_path = os.path.join(emb_path, "simplE_rel{}.embedding".format(prefix))
rel_rev_path = os.path.join(emb_path, "simplE_rel_rev{}.embedding".format(prefix))
with codecs.open(ent_head_path, "w") as f:
json.dump(self.entHeadEmbedding.cpu().weight.data.numpy().tolist(), f)
with codecs.open(ent_tail_path, "w") as f:
json.dump(self.entTailEmbedding.cpu().weight.data.numpy().tolist(), f)
with codecs.open(rel_path, "w") as f:
json.dump(self.relEmbedding.cpu().weight.data.numpy().tolist(), f)
with codecs.open(rel_rev_path, "w") as f:
json.dump(self.relInverseEmbedding.cpu().weight.data.numpy().tolist(), f)
|
python
|
__author__ = 'jonnyfunfun'
|
python
|
# A few convenient math functions for the bicorr project
import matplotlib
#matplotlib.use('agg') # for flux
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(style='ticks')
import sys
import os
import os.path
import scipy.io as sio
from scipy.optimize import curve_fit
import time
import numpy as np
np.set_printoptions(threshold=np.nan) # print entire matrices
import pandas as pd
from tqdm import *
# Don't import any bicorr modules here
# Other modules will import bicorr_math, but not the other way around
def prop_err_division(num,num_err,denom,denom_err):
A = num/denom
A_err = A*np.sqrt((num_err/num)**2+(denom_err/denom)**2)
return A, A_err
def calc_centers(edges):
"""
Simple method for returning centers from an array of bin edges. Calculates center between each point as difference between containing edges.
Example, plt.plot(bicorr.centers(edges),counts,'.k')
Serves as a shortcode to first producing array of bin centers.
Parameters
----------
edges : ndarray
Array of bin edges
Returns
-------
centers : ndarray
Array of bin edges
"""
return (edges[:-1]+edges[1:])/2
def calc_histogram_mean(bin_edges, counts, print_flag = False, bin_centers_flag = False):
"""
Calculate mean of a count rate distribution, counts vs. x.
Errors are calculated under the assumption that you are working
with counting statistics. (C_err = sqrt(C) in each bin)
Parameters
----------
bin_edges : ndarray
Bin edges for x
counts : ndarray
Bin counts
print_flag : bool
Option to print intermediate values
bin_centers_flag : bool
Option to provide bin centers instead of bin edges (useful for 2d histograms)
Returns
-------
x_mean : float
x_mean_err : float
"""
if bin_centers_flag == True:
bin_centers = bin_edges
else:
bin_centers = calc_centers(bin_edges)
num = np.sum(np.multiply(bin_centers,counts))
num_err = np.sqrt(np.sum(np.multiply(bin_centers**2,counts)))
denom = np.sum(counts)
denom_err = np.sqrt(denom)
if print_flag:
print('num: ',num)
print('num_err: ',num_err)
print('denom: ',denom)
print('denom_err: ',denom_err)
x_mean = num/denom
x_mean_err = x_mean * np.sqrt((num_err/num)**2+(denom_err/denom)**2)
if print_flag:
print('x_mean: ',x_mean)
print('x_mean_err:',x_mean_err)
return x_mean, x_mean_err
def convert_energy_to_time(energy, distance = 1.05522):
'''
Convert energy in MeV to time in ns for neutrons that travel 1 m. From Matthew's `reldist.m` script.
6/5/18 Changing default to 105.522 cm, which is mean distance.
Parameters
----------
energy : float
Neutron energy in MeV
distance : float, optional
Neutron flight distance in meters
Returns
-------
time : float
Time of flight of neutron
'''
# Constants
m_n = 939.565 # MeV/c2
c = 2.99e8 # m/s
# Calculations
v = c*np.sqrt(2*energy/m_n)
time = np.divide(distance/v,1e-9)
return time
def convert_time_to_energy(time, distance = 1.05522):
'''
Convert time in ns to energy in MeV for neutrons that travel 1 m. From Matthew's `reldist.m` script.
6/5/18 Changing default to 105.522 cm, which is mean distance.
If an array of times, use energy_bin_edges = np.asarray(np.insert([bicorr.convert_time_to_energy(t) for t in dt_bin_edges[1:]],0,10000))
Parameters
----------
time : float
Time of flight of neutron in ns
distance : float, optional
Neutron flight distance in meters
Returns
-------
energy : float
Neutron energy in MeV
'''
# Constants
m_n = 939.565 # MeV/c2
c = 2.99e8 # m/s
v = distance * 1e9 / time # ns -> s
energy = (m_n/2)*(v/c)**2
return energy
def f_line(x, m, b):
"""
Line fit with equation y = mx + b
Parameters
----------
x : array
x values
m : float
slope
b : float
y-intercept
Returns
-------
y : array
y values
"""
y = m*x + b
return y
def fit_f_line(x, y, y_err=None, p0=None, bounds=(-np.inf,np.inf)):
"""
Fit a straight line with equation y = mx + b
Parameters
----------
x : ndarray
y : ndarray
y_err : ndarray, optional
p0 : ndarra
Initial guess of coefficients
bounds : ndarray
Boundaries for searching for coefficients
Returns
-------
m, m_err : float
b, b_err : float
"""
if y_err is None:
y_err = np.ones(x.size)
# Only use dat apoints with non-zero error
w = np.where(y_err != 0)
popt, pcov = curve_fit(f_line, x[w], y[w], sigma=y_err[w], p0=p0, absolute_sigma = True, bounds = bounds)
errors = np.sqrt(np.diag(pcov))
[m, b] = popt
[m_err, b_err] = errors
return m, m_err, b, b_err
|
python
|
import sys
import os
project = u'Pelikan'
description = u"Unified cache backend. http://go/pelikan"
copyright = u'Twitter'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
]
exclude_patterns = ['_build']
html_static_path = ['_static']
source_suffix = '.rst'
master_doc = 'index'
language = u'C'
today_fmt = '%Y/%m/%d'
pygments_style = 'sphinx'
html_theme = "default"
html_logo = u'_static/img/white_pelican.jpg'
intersphinx_mapping = {'http://docs.python.org/': None}
|
python
|
from random import (randrange, shuffle)
from copy import deepcopy
from forest_calculations import (get_forest_dimensions, get_tree_counts)
from forest_transpormations import (flatten_forest, deflatten_forest)
from forest_constants import (LEAFY, CONIFEROUS)
def get_random_position(rows, cols):
return randrange(rows), randrange(cols)
def randomize_forest_1(forest):
forest_cpy = deepcopy(forest)
rows_num, cols_num = get_forest_dimensions(forest_cpy)
leafy_count, coniferous_count = get_tree_counts(forest_cpy)
if leafy_count > coniferous_count:
more_trees = LEAFY
less_trees = CONIFEROUS
less_trees_count = coniferous_count
else:
more_trees = CONIFEROUS
less_trees = LEAFY
less_trees_count = leafy_count
for row_index, row in enumerate(forest_cpy):
for col_index, _ in enumerate(row):
forest_cpy[row_index][col_index] = more_trees
for _ in range(less_trees_count):
while True:
random_row, random_col = get_random_position(rows_num, cols_num)
if forest_cpy[random_row][random_col] != less_trees:
forest_cpy[random_row][random_col] = less_trees
break
return forest_cpy
def randomize_forest_2(forest):
rows, _ = get_forest_dimensions(forest)
flat_forest = flatten_forest(forest)
shuffle(flat_forest)
return deflatten_forest(flat_forest, rows)
|
python
|
from img_utils import img_utils as _lib
from .utils import u8
def darken_pixels(src_path: str, dst_path: str, amount: int, cutoff: int):
""" Darken Pixels
Darkens all pixels in the image by percentage, specified by `amount`. Any pixel
that doesn't have a subpixel below than the `cutoff` will be ignored.
`amount` and `cutoff` are clamped between (inclusive) 0-255
```python
import img_utils
img_utils.darken_pixels(
src_path="in_file.jpg",
dst_path="out_file.jpg",
amount=80,
cutoff=200,
)
```
will take the `in_file.jpg` and lower each subpixel of the image by 80%, unless
all the subpixels are above 200.
The RGB pixel `100, 220, 220` will be turned into `20, 44, 44` while `210, 220,
230` will be left alone.
"""
_lib._darken_pixels(src_path, dst_path, u8(amount), u8(cutoff))
|
python
|
import asyncio
import logging
import os
import socket
import uuid
import pika
import pika.adapters.asyncio_connection
from .subscription import QueueSubscriptionObject, ExchangeSubscriptionObject
from ..broker import Broker
#
L = logging.getLogger(__name__)
#
class AMQPBroker(Broker):
'''
The broker that uses Advanced Message Queuing Protocol (AMQP) and it can be used with e.g. RabbitMQ as a message queue.
'''
ConfigDefaults = {
'url': 'amqp://username:password@localhost/virtualhost',
'appname': 'asab.mom',
'reconnect_delay': 10.0,
'prefetch_count': 5,
'exchange': 'amq.fanout',
'reply_exchange': '',
}
def __init__(self, app, accept_replies=False, task_service=None, config_section_name="asab:mom:amqp", config=None):
super().__init__(app, accept_replies, task_service, config_section_name, config)
self.Origin = '{}#{}'.format(socket.gethostname(), os.getpid())
self.Connection = None
self.SubscriptionObjects = {}
self.ReplyTo = None
self.InboundQueue = asyncio.Queue(loop=app.Loop)
self.OutboundQueue = asyncio.Queue(loop=app.Loop)
self.SenderFuture = None
self.Exchange = self.Config['exchange']
self.ReplyExchange = self.Config['reply_exchange']
async def finalize(self, app):
await super().finalize(app)
if self.SenderFuture is not None:
self.SenderFuture.cancel()
self.SenderFuture = None
def _reconnect(self):
if self.Connection is not None:
if not (self.Connection.is_closing or self.Connection.is_closed):
self.Connection.close()
self.Connection = None
if self.SenderFuture is not None:
self.SenderFuture.cancel()
self.SenderFuture = None
parameters = pika.URLParameters(self.Config['url'])
if parameters.client_properties is None:
parameters.client_properties = dict()
parameters.client_properties['application'] = self.Config['appname']
self.SubscriptionObjects.clear()
self.ReplyTo = None
self.Connection = pika.adapters.asyncio_connection.AsyncioConnection(
parameters=parameters,
on_open_callback=self._on_connection_open,
on_open_error_callback=self._on_connection_open_error,
on_close_callback=self._on_connection_close
)
# Connection callbacks
def _on_connection_open(self, connection):
L.info("AMQP connected")
asyncio.ensure_future(self.ensure_subscriptions(), loop=self.Loop)
self.Connection.channel(on_open_callback=self._on_sending_channel_open)
def _on_connection_close(self, connection, *args):
try:
code, reason = args
L.warning("AMQP disconnected ({}): {}".format(code, reason))
except ValueError:
error, = args
L.warning("AMQP disconnected: {}".format(error))
self.Loop.call_later(float(self.Config['reconnect_delay']), self._reconnect)
def _on_connection_open_error(self, connection, error_message=None):
L.error("AMQP error: {}".format(error_message if error_message is not None else 'Generic error'))
self.Loop.call_later(float(self.Config['reconnect_delay']), self._reconnect)
def _on_sending_channel_open(self, channel):
self.SenderFuture = asyncio.ensure_future(self._sender_future(channel), loop=self.Loop)
async def ensure_subscriptions(self):
if self.Connection is None:
return
if not self.Connection.is_open:
return
for s, pkwargs in self.Subscriptions.items():
if s in self.SubscriptionObjects:
continue
if pkwargs.get('exchange', False):
self.SubscriptionObjects[s] = ExchangeSubscriptionObject(self, s, **pkwargs)
else:
self.SubscriptionObjects[s] = QueueSubscriptionObject(self, s, **pkwargs)
async def main(self):
self._reconnect()
while True:
channel, method, properties, body = await self.InboundQueue.get()
try:
if self.AcceptReplies and (method.routing_key == self.ReplyTo):
await self.dispatch("reply", properties, body)
else:
await self.dispatch(method.routing_key, properties, body)
except BaseException:
L.exception("Error when processing inbound message")
channel.basic_nack(method.delivery_tag, requeue=False)
else:
channel.basic_ack(method.delivery_tag)
async def publish(
self,
body,
target: str = '',
content_type: str = None,
content_encoding: str = None,
correlation_id: str = None,
reply_to: str = None,
exchange: str = None
):
await self.OutboundQueue.put((
exchange if exchange is not None else self.Exchange, # Where to publish
target, # Routing key
body,
pika.BasicProperties(
content_type=content_type,
content_encoding=content_encoding,
delivery_mode=1,
correlation_id=correlation_id,
reply_to=self.ReplyTo,
message_id=uuid.uuid4().urn, # id
app_id=self.Origin, # origin
# headers = { }
)
))
async def reply(
self,
body,
reply_to: str,
content_type: str = None,
content_encoding: str = None,
correlation_id: str = None,
):
await self.OutboundQueue.put((
self.ReplyExchange, # Where to publish
reply_to, # Routing key
body,
pika.BasicProperties(
content_type=content_type,
content_encoding=content_encoding,
delivery_mode=1,
correlation_id=correlation_id,
message_id=uuid.uuid4().urn, # id
app_id=self.Origin, # origin
# headers = { }
)
))
async def _sender_future(self, channel):
if self.AcceptReplies:
self.ReplyTo = await self._create_exclusive_queue(channel, "~R@" + self.Origin)
while True:
exchange, routing_key, body, properties = await self.OutboundQueue.get()
channel.basic_publish(exchange, routing_key, body, properties)
async def _create_exclusive_queue(self, channel, queue_name):
lock = asyncio.Event()
lock.set()
def on_queue_declared(method):
lock.clear()
assert(method.method.queue == queue_name)
self.SubscriptionObjects[queue_name] = QueueSubscriptionObject(self, queue_name)
channel.queue_declare(
queue=queue_name,
exclusive=True,
auto_delete=True,
callback=on_queue_declared,
)
await lock.wait()
return queue_name
|
python
|
from urllib import urlencode
from django import forms
from django.conf import settings
from django.contrib import admin
from django.core import validators
from django.core.urlresolvers import resolve
from django.utils.html import format_html
from django.utils.translation import ugettext
from olympia import amo
from olympia.access import acl
from olympia.amo.urlresolvers import reverse
from . import models
class AddonAdmin(admin.ModelAdmin):
class Media:
css = {
'all': ('css/admin/l10n.css',)
}
js = ('js/admin/l10n.js',)
exclude = ('authors',)
list_display = ('__unicode__', 'type', 'status', 'average_rating')
list_filter = ('type', 'status')
fieldsets = (
(None, {
'fields': ('name', 'guid', 'default_locale', 'type', 'status'),
}),
('Details', {
'fields': ('summary', 'description', 'homepage', 'eula',
'privacy_policy', 'developer_comments', 'icon_type',
),
}),
('Support', {
'fields': ('support_url', 'support_email'),
}),
('Stats', {
'fields': ('average_rating', 'bayesian_rating', 'total_ratings',
'text_ratings_count',
'weekly_downloads', 'total_downloads',
'average_daily_users'),
}),
('Truthiness', {
'fields': ('disabled_by_user', 'view_source', 'requires_payment',
'public_stats', 'is_experimental',
'external_software', 'dev_agreement'),
}),
('Dictionaries', {
'fields': ('target_locale', 'locale_disambiguation'),
}))
def queryset(self, request):
return models.Addon.unfiltered
class FeatureAdmin(admin.ModelAdmin):
raw_id_fields = ('addon',)
list_filter = ('application', 'locale')
list_display = ('addon', 'application', 'locale')
class FrozenAddonAdmin(admin.ModelAdmin):
raw_id_fields = ('addon',)
class CompatOverrideRangeInline(admin.TabularInline):
model = models.CompatOverrideRange
# Exclude type since firefox only supports blocking right now.
exclude = ('type',)
class CompatOverrideAdminForm(forms.ModelForm):
def clean(self):
if '_confirm' in self.data:
raise forms.ValidationError('Click "Save" to confirm changes.')
return self.cleaned_data
class CompatOverrideAdmin(admin.ModelAdmin):
raw_id_fields = ('addon',)
inlines = [CompatOverrideRangeInline]
form = CompatOverrideAdminForm
class ReplacementAddonForm(forms.ModelForm):
def clean_path(self):
path = None
try:
path = self.data.get('path')
site = settings.SITE_URL
if models.ReplacementAddon.path_is_external(path):
if path.startswith(site):
raise forms.ValidationError(
'Paths for [%s] should be relative, not full URLs '
'including the domain name' % site)
validators.URLValidator()(path)
else:
path = ('/' if not path.startswith('/') else '') + path
resolve(path)
except forms.ValidationError as validation_error:
# Re-raise the ValidationError about full paths for SITE_URL.
raise validation_error
except Exception:
raise forms.ValidationError('Path [%s] is not valid' % path)
return path
class ReplacementAddonAdmin(admin.ModelAdmin):
list_display = ('guid', 'path', 'guid_slug', '_url')
form = ReplacementAddonForm
def _url(self, obj):
guid_param = urlencode({'guid': obj.guid})
return format_html(
'<a href="{}">Test</a>',
reverse('addons.find_replacement') + '?%s' % guid_param)
def guid_slug(self, obj):
try:
slug = models.Addon.objects.get(guid=obj.guid).slug
except models.Addon.DoesNotExist:
slug = ugettext(u'- Add-on not on AMO -')
return slug
def has_module_permission(self, request):
# If one can see the changelist, then they have access to the module.
return self.has_change_permission(request)
def has_change_permission(self, request, obj=None):
# If an obj is passed, then we're looking at the individual change page
# for a replacement addon, otherwise we're looking at the list. When
# looking at the list, we also allow users with Addons:Edit - they
# won't be able to make any changes but they can see the list.
if obj is not None:
return super(ReplacementAddonAdmin, self).has_change_permission(
request, obj=obj)
else:
return (
acl.action_allowed(request, amo.permissions.ADDONS_EDIT) or
super(ReplacementAddonAdmin, self).has_change_permission(
request, obj=obj))
admin.site.register(models.DeniedGuid)
admin.site.register(models.Addon, AddonAdmin)
admin.site.register(models.FrozenAddon, FrozenAddonAdmin)
admin.site.register(models.CompatOverride, CompatOverrideAdmin)
admin.site.register(models.ReplacementAddon, ReplacementAddonAdmin)
|
python
|
def ext_gcd(p, q):
if p == 0:
return q, 0, 1
else:
# gcd, s_i, t_i
gcd, u, v = ext_gcd(q % p, p)
return gcd, v - (q // p) * u, u
p = 240
q = 46
gcd, u, v = ext_gcd(p, q)
print("[+] GCD: {}".format(gcd))
print("[+] u,v: {},{}".format(u,v))
print(f"\n[*] FLAG: crypto{{{u},{v}}}")
|
python
|
# Generated by Django 2.2.6 on 2019-11-21 17:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('selections', '0009_auto_20190529_0937'),
]
operations = [
migrations.AlterField(
model_name='selection',
name='is_no_target',
field=models.BooleanField(default=False, verbose_name='This fragment does not contain a target'),
),
migrations.AlterField(
model_name='selection',
name='tense',
field=models.CharField(blank=True, max_length=200, verbose_name='Tense'),
),
]
|
python
|
import urllib
import time
def main(request, response):
index = request.request_path.index("?")
args = request.request_path[index+1:].split("&")
headersSent = 0
for arg in args:
if arg.startswith("ignored"):
continue
elif arg.endswith("ms"):
time.sleep(float(arg[0:-2]) / 1E3);
elif arg.startswith("redirect:"):
return (302, "WEBPERF MARKETING"), [("Location", urllib.unquote(arg[9:]))], "TEST"
elif arg.startswith("mime:"):
response.headers.set("Content-Type", urllib.unquote(arg[5:]))
elif arg.startswith("send:"):
text = urllib.unquote(arg[5:])
if headersSent == 0:
response.write_status_headers()
headersSent = 1
response.writer.write_content(text)
# else:
# error " INVALID ARGUMENT %s" % arg
|
python
|
import dotenv
from pathlib import Path
from .exceptions import EnvKeyNotFoundError, EnvNotFoundError
BASE_PATH = Path(__file__).resolve().parent.parent
if not (ENV := dotenv.dotenv_values(BASE_PATH / '.env')):
raise EnvNotFoundError()
if not (BOT_CLIENT_TOKEN := ENV.get((key := 'BOT_CLIENT_TOKEN'))):
raise EnvKeyNotFoundError(key)
DISCORD_API_ROOT = 'https://discord.com/api/v8/'
DISCORD_OAUTH_ROOT = DISCORD_API_ROOT + 'oauth2/'
DATABASE_PATH = BASE_PATH / 'db.sqlite3'
|
python
|
from django.db import models
class Customer(models.Model):
id = models.AutoField(primary_key=True, null=False)
name = models.CharField(max_length=200, null=False)
keyAPI = models.CharField(max_length=200, null=False)
pathTrainingDataSet = models.CharField(max_length=1000, null=True)
status = models.BooleanField(default=1, null=False)
class Meta:
db_table = "Customer"
class User(models.Model):
id = models.AutoField(primary_key=True, null=False)
identificationProfileId = models.CharField(max_length=200, null=False)
pathNN = models.CharField(max_length=1000, null=True)
status = models.BooleanField(default=1, null=False)
idCostumer = models.ForeignKey(Customer, on_delete=models.DO_NOTHING)
class Meta:
db_table = "User"
# class Choice(models.Model):
# question = models.ForeignKey(Question, on_delete=models.CASCADE)
# choice_text = models.CharField(max_length=200)
# votes = models.IntegerField(default=0)
# rating = models.CharField(max_length=400, default='some string')
# def __str__(self):
# return self.choice_text
# Create your models here.
|
python
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import configparser
import nbformat
from .static_text import Common, EvasionAttack
# Type of printing.
OK = 'ok' # [*]
NOTE = 'note' # [+]
FAIL = 'fail' # [-]
WARNING = 'warn' # [!]
NONE = 'none' # No label.
# Create report.
class IpynbReport:
def __init__(self, utility):
self.utility = utility
self.report_util = None
# Read config file.
config = configparser.ConfigParser()
self.file_name = os.path.basename(__file__)
self.full_path = os.path.dirname(os.path.abspath(__file__))
self.root_path = os.path.join(self.full_path, '../')
config.read(os.path.join(self.root_path, 'config.ini'))
# model/dataset path.
self.model_path = ''
self.dataset_path = ''
self.label_path = ''
self.dataset_num = 0
# Create common part.
def create_common(self, nb):
self.utility.print_message(OK, 'Creating common part...')
# Introduction.
if self.lang == 'en':
nb['cells'] = [
nbformat.v4.new_markdown_cell(Common.md_report_title.value),
nbformat.v4.new_markdown_cell(Common.md_1_1_title.value),
nbformat.v4.new_markdown_cell(Common.md_1_1_text.value),
nbformat.v4.new_markdown_cell(Common.md_1_2_title.value),
nbformat.v4.new_markdown_cell(Common.md_1_2_text.value)
]
else:
nb['cells'] = [
nbformat.v4.new_markdown_cell(Common.md_report_title.value),
nbformat.v4.new_markdown_cell(Common.md_1_1_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_1_1_text_ja.value),
nbformat.v4.new_markdown_cell(Common.md_1_2_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_1_2_text_ja.value)
]
# Preparation
if self.lang == 'en':
nb['cells'].extend([nbformat.v4.new_markdown_cell(Common.md_2_title.value),
nbformat.v4.new_markdown_cell(Common.md_2_text.value),
nbformat.v4.new_markdown_cell(Common.md_2_1_title.value),
nbformat.v4.new_markdown_cell(Common.md_2_1_text.value),
nbformat.v4.new_code_cell(Common.cd_2_1_code.value),
nbformat.v4.new_markdown_cell(Common.md_2_2_title.value),
nbformat.v4.new_markdown_cell(Common.md_2_2_text.value),
nbformat.v4.new_code_cell(Common.cd_2_2_code.value.format(self.dataset_path,
self.dataset_num,
self.label_path)),
nbformat.v4.new_markdown_cell(Common.md_2_3_title.value),
nbformat.v4.new_markdown_cell(Common.md_2_3_text.value),
nbformat.v4.new_code_cell(Common.cd_2_3_code.value.format(self.model_path)),
nbformat.v4.new_markdown_cell(Common.md_2_4_title.value),
nbformat.v4.new_markdown_cell(Common.md_2_4_text.value),
nbformat.v4.new_code_cell(Common.cd_2_4_code.value),
])
else:
nb['cells'].extend([nbformat.v4.new_markdown_cell(Common.md_2_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_text_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_1_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_1_text_ja.value),
nbformat.v4.new_code_cell(Common.cd_2_1_code_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_2_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_2_text_ja.value),
nbformat.v4.new_code_cell(Common.cd_2_2_code_ja.value.format(self.dataset_path,
self.dataset_num,
self.label_path)),
nbformat.v4.new_markdown_cell(Common.md_2_3_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_3_text_ja.value),
nbformat.v4.new_code_cell(Common.cd_2_3_code_ja.value.format(self.model_path)),
nbformat.v4.new_markdown_cell(Common.md_2_4_title_ja.value),
nbformat.v4.new_markdown_cell(Common.md_2_4_text_ja.value),
nbformat.v4.new_code_cell(Common.cd_2_4_code_ja.value),
])
self.utility.print_message(OK, 'Done creating common part.')
return nb
# Create evasion (FGSM) part.
def create_evasion_fgsm(self, nb, aes_path):
self.utility.print_message(OK, 'Creating Evasion (FGSM) part...')
# FGSM.
if self.lang == 'en':
nb['cells'].extend([nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_title.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_text.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_1_title.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_1_text.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_2_title.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_2_code.value.format(aes_path)),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_3_title.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_3_code.value.format(self.dataset_num)),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_4_title.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_4_code.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_5_title.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_5_code.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_6_title.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_6_code.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_7_title.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_7_text.value),
])
else:
nb['cells'].extend([nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_title_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_text_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_1_title_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_1_text_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_2_title_ja.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_2_code_ja.value.format(aes_path)),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_3_title_ja.value),
nbformat.v4.new_code_cell(
EvasionAttack.cd_ae_fgsm_3_code_ja.value.format(self.dataset_num)),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_4_title_ja.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_4_code_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_5_title_ja.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_5_code_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_6_title_ja.value),
nbformat.v4.new_code_cell(EvasionAttack.cd_ae_fgsm_6_code_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_7_title_ja.value),
nbformat.v4.new_markdown_cell(EvasionAttack.md_ae_fgsm_7_text_ja.value),
])
self.utility.print_message(OK, 'Done Evasion (FGSM) part...')
return nb
# Create report.
def create_report(self):
self.utility.print_message(NOTE, 'Creating report...')
nb = nbformat.v4.new_notebook()
# Report Setting.
self.model_path = self.report_util.template_target['model_path']
self.dataset_path = self.report_util.template_target['dataset_path']
self.label_path = self.report_util.template_target['label_path']
self.dataset_num = self.report_util.template_target['dataset_num']
# Create common part.
nb = self.create_common(nb)
# Create replay part.
report_name = ''
report_full_path = ''
if self.report_util.template_data_poisoning['exist']:
self.utility.print_message(WARNING, 'Not implementation.')
elif self.report_util.template_model_poisoning['exist']:
self.utility.print_message(WARNING, 'Not implementation.')
elif self.report_util.template_evasion['exist']:
if self.report_util.template_evasion['fgsm']['exist']:
# Create FGSM.
report_name = 'evasion_fgsm.ipynb'
nb = self.create_evasion_fgsm(nb, self.report_util.template_evasion['fgsm']['aes_path'])
report_full_path = os.path.join(self.report_util.report_path, report_name)
with open(report_full_path, 'w') as fout:
nbformat.write(nb, fout)
self.report_util.template_evasion['fgsm']['ipynb_path'] = report_full_path
if self.report_util.template_evasion['cnw']['exist']:
# Create C&W.
self.utility.print_message(WARNING, 'Not implementation.')
if self.report_util.template_evasion['jsma']['exist']:
# Create JSMA.
self.utility.print_message(WARNING, 'Not implementation.')
elif self.report_util.template_exfiltration['exist']:
self.utility.print_message(WARNING, 'Not implementation.')
self.utility.print_message(NOTE, 'Done creating report.')
return self.report_util, report_name
|
python
|
"""
pygments.lexers.email
~~~~~~~~~~~~~~~~~~~~~
Lexer for the raw E-mail.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
from pygments.lexers.mime import MIMELexer
from pygments.token import Text, Keyword, Name, String, Number, Comment
from pygments.util import get_bool_opt
__all__ = ["EmailLexer"]
class EmailHeaderLexer(RegexLexer):
"""
Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
.. versionadded:: 2.5
"""
def __init__(self, **options):
super().__init__(**options)
self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
def get_x_header_tokens(self, match):
if self.highlight_x:
# field
yield match.start(1), Name.Tag, match.group(1)
# content
default_actions = self.get_tokens_unprocessed(
match.group(2), stack=("root", "header"))
yield from default_actions
else:
# lowlight
yield match.start(1), Comment.Special, match.group(1)
yield match.start(2), Comment.Multiline, match.group(2)
tokens = {
"root": [
(r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
(r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
],
"header": [
# folding
(r"\n[ \t]", Text.Whitespace),
(r"\n(?![ \t])", Text.Whitespace, "#pop"),
# keywords
(r"\bE?SMTPS?\b", Keyword),
(r"\b(?:HE|EH)LO\b", Keyword),
# mailbox
(r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
(r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
# domain
(r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
# IPv4
(
r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
Number.Integer,
),
# IPv6
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
(r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
(r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
(r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
Number.Hex),
(r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
Number.Hex),
(
r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
r"[0-9])(?=\b)",
Number.Hex,
),
(
r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
r"9])(?=\b)",
Number.Hex,
),
# Date time
(
r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
Name.Decorator,
),
# RFC-2047 encoded string
(
r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
r"\]^_`{|}~]+)(\?=)",
bygroups(
String.Affix,
Name.Constant,
String.Affix,
Keyword.Constant,
String.Affix,
Number.Hex,
String.Affix
)
),
# others
(r'[\s]+', Text.Whitespace),
(r'[\S]', Text),
],
}
class EmailLexer(DelegatingLexer):
"""
Lexer for raw E-mail.
Additional options accepted:
`highlight-X-header`
Highlight the fields of ``X-`` user-defined email header. (default:
``False``).
.. versionadded:: 2.5
"""
name = "E-mail"
aliases = ["email", "eml"]
filenames = ["*.eml"]
mimetypes = ["message/rfc822"]
def __init__(self, **options):
super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
|
python
|
# import dota_utils as util
import os
# import cv2
import json
# from PIL import Image
import xmltodict
import xml.etree.ElementTree as ET
# from ShipRSImageNet_devkit import ShipRSImageNet_utils as util
# from collections import OrderedDict
wordname_50 = ['Other Ship', 'Other Warship', 'Submarine', 'Other Aircraft Carrier', 'Enterprise', 'Nimitz', 'Midway',
'Ticonderoga',
'Other Destroyer', 'Atago DD', 'Arleigh Burke DD', 'Hatsuyuki DD', 'Hyuga DD', 'Asagiri DD', 'Other Frigate',
'Perry FF',
'Patrol', 'Other Landing', 'YuTing LL', 'YuDeng LL', 'YuDao LL', 'YuZhao LL', 'Austin LL', 'Osumi LL',
'Wasp LL', 'LSD 41 LL', 'LHA LL', 'Commander', 'Other Auxiliary Ship', 'Medical Ship', 'Test Ship',
'Training Ship',
'AOE', 'Masyuu AS', 'Sanantonio AS', 'EPF', 'Other Merchant', 'Container Ship', 'RoRo', 'Cargo',
'Barge', 'Tugboat', 'Ferry', 'Yacht', 'Sailboat', 'Fishing Vessel', 'Oil Tanker', 'Hovercraft',
'Motorboat', 'Dock']
# wordname_50 = ['Other Ship', 'Other Warship', 'Submarine', 'Other Aircraft Carrier', 'Enterprise', 'Nimitz', 'Midway',
# 'Ticonderoga',
# 'Other Destroyer', 'Atago DD', 'Arleigh Burke DD', 'Hatsuyuki DD', 'Hyuga DD', 'Asagiri DD', 'Frigate',
# 'Perry FF',
# 'Patrol', 'Other Landing', 'YuTing LL', 'YuDeng LL', 'YuDao LL', 'YuZhao LL', 'Austin LL', 'Osumi LL',
# 'Wasp LL', 'LSD 41 LL', 'LHA LL', 'Commander', 'Other Auxiliary Ship', 'Medical Ship', 'Test Ship',
# 'Training Ship',
# 'AOE', 'Masyuu AS', 'Sanantonio AS', 'EPF', 'Other Merchant', 'Container Ship', 'RoRo', 'Cargo',
# 'Barge', 'Tugboat', 'Ferry', 'Yacht', 'Sailboat', 'Fishing Vessel', 'Oil Tanker', 'Hovercraft',
# 'Motorboat', 'Dock']
def ShipImageNet2COCOTrain(filenames, destfile, cls_names, level_num):
# set difficult to filter '2', '1', or do not filter, set '-1'
# imageparent = os.path.join(srcpath, 'JPEGImages')
# labelparent = .path.join(srcpath, 'Annotations_v2')
if level_num == 3:
level_class = 'level_3'
elif level_num == 2:
level_class = 'level_2'
elif level_num == 1:
level_class = 'level_1'
else:
level_class = 'level_0'
data_dict = {}
data_dict['images'] = []
data_dict['categories'] = []
data_dict['annotations'] = []
for idex, name in enumerate(cls_names):
single_cat = {'id': idex + 1, 'name': name, 'supercategory': name}
data_dict['categories'].append(single_cat)
inst_count = 1
image_id = 1
with open(destfile, 'w') as f_out:
# filenames = util.GetFileFromThisRootDir(labelparent)
for file in filenames:
doc = xmltodict.parse(open(file).read())
tree = ET.parse(file)
root = tree.getroot()
single_image = {}
single_image['file_name'] = str(doc['annotation']['filename'])
single_image['id'] = image_id
single_image['width'] = int(doc['annotation']['size']['width'])
single_image['height'] = int(doc['annotation']['size']['height'])
# print(single_image)
data_dict['images'].append(single_image)
# annotations
for obj in root.iter('object'):
single_obj = {}
single_obj['area'] = float(obj.find('Ship_area').text)
single_obj['category_id'] = int(obj.find(level_class).text)
single_obj['segmentation'] = []
x1 = float(obj.find('polygon').find("x1").text)
y1 = float(obj.find('polygon').find("y1").text)
x2 = float(obj.find('polygon').find("x2").text)
y2 = float(obj.find('polygon').find("y2").text)
x3 = float(obj.find('polygon').find("x3").text)
y3 = float(obj.find('polygon').find("y3").text)
x4 = float(obj.find('polygon').find("x4").text)
y4 = float(obj.find('polygon').find("y4").text)
single_obj['segmentation'] = x1, y1, x2, y2, x3, y3, x4, y4
single_obj['iscrowd'] = 0
xmin = int(obj.find('bndbox').find("xmin").text)
ymin = int(obj.find('bndbox').find("ymin").text)
xmax = int(obj.find('bndbox').find("xmax").text)
ymax = int(obj.find('bndbox').find("ymax").text)
width, height = xmax - xmin, ymax - ymin
# 计算旋转矩形框旋转角度
# roted_box = util.polygonToRotRectangle([x1,y1,x2,y2,x3,y3,x4,y4])
# xcenter,ycenter,width,height,angle = roted_box
single_obj['bbox'] = xmin,ymin,width,height
single_obj['image_id'] = image_id
data_dict['annotations'].append(single_obj)
single_obj['id'] = inst_count
inst_count = inst_count + 1
image_id = image_id + 1
json.dump(data_dict, f_out)
print('Total Instances:',image_id)
def ShipImageNet2COCOTest(filenames, destfile, cls_names):
# imageparent = os.path.join(srcpath, 'JPEGImages')
data_dict = {}
data_dict['images'] = []
data_dict['categories'] = []
for idex, name in enumerate(cls_names):
single_cat = {'id': idex + 1, 'name': name, 'supercategory': name}
data_dict['categories'].append(single_cat)
image_id = 1
with open(destfile, 'w') as f_out:
# filenames = util.GetFileFromThisRootDir(labelparent)
for file in filenames:
doc = xmltodict.parse(open(file).read())
single_image = {}
single_image['file_name'] = str(doc['annotation']['filename'])
single_image['id'] = image_id
single_image['width'] = int(doc['annotation']['size']['width'])
single_image['height'] = int(doc['annotation']['size']['height'])
data_dict['images'].append(single_image)
image_id = image_id + 1
json.dump(data_dict, f_out)
def get_filenames(rootdir, file_dir, set_name):
dataset_name = set_name + '.txt'
File = os.path.join(text_dir, dataset_name)
filenames = list()
level_num = 3
with open(File, "rb") as f:
for line in f:
fileName = str(line.strip(), encoding="utf-8")
# print(fileName)
fle_xml = fileName.replace('.bmp', '.xml')
annotation_path = os.path.join(rootdir, fle_xml)
filenames.append(annotation_path)
return filenames
if __name__ == '__main__':
rootdir = '/home/ssd/dataset/ShipRSImageNet/VOC_Format/Annotations/'
text_dir = '/home/ssd/dataset/ShipRSImageNet/VOC_Format/ImageSets/'
out_dir = '/home/zzn/Documents/zhangzhn_workspace/pycharm/ship_dataset/COCO_Format/'
level_num = 0
if not os.path.exists(out_dir):
os.makedirs(out_dir)
train_filenames = get_filenames(rootdir, text_dir, 'train')
val_filenames = get_filenames(rootdir, text_dir, 'val')
test_filenames = get_filenames(rootdir, text_dir, 'test')
# print(train_filenames)
# print('\n')
train_json_file_name = "{}ShipRSImageNet_bbox_train_level_{}.json".format(out_dir, level_num)
val_json_file_name = "{}ShipRSImageNet_bbox_val_level_{}.json".format(out_dir, level_num)
test_json_file_name = "{}ShipRSImageNet_bbox_test_level_{}.json".format(out_dir, level_num)
ShipImageNet2COCOTrain(train_filenames, train_json_file_name, wordname_50, level_num)
ShipImageNet2COCOTrain(val_filenames, val_json_file_name, wordname_50, level_num)
ShipImageNet2COCOTest(test_filenames, test_json_file_name, wordname_50)
print('Finished')
|
python
|
from draw2d import Viewer, Text, Line, Rectangle, Frame, Point, Circle
import math, time, random
viewer = Viewer(600,600)
W = 1.0
F = viewer.frame(0., W, 0., W)
F.add(Text("North", anchor_x="center", anchor_y="top", color=(0.2,0.2,1.0)).move_to(0.5,0.9))
F.add(Text("South", anchor_x="center", anchor_y="bottom", color=(1.0,1.0,0.1)).move_to(0.5,0.1))
F.add(Text("East", anchor_x="right", anchor_y="center", color=(0.2,1.0,1.0)).move_to(0.9,0.5))
F.add(Text("West", anchor_x="left", anchor_y="center", color=(1.0,0.2,0.1)).move_to(0.1,0.5))
fly = Frame()
fly.add(Circle(radius=0.01).color(1,1,1))
label = Text("").move_to(0.01, 0.01)
vlabel = Text("", rotation=0.0, anchor_x="left", anchor_y="center").move_to(0.02, 0.0)
fly.add(label)
fly.add(vlabel)
F.add(fly, "fly")
x, y = random.random(), random.random()
vx, vy = 0.0, 0.0
vmax = 0.5
r = random.random()
omega = 0.0
max_omega = 0.1
tau = 0.1
while True:
x += vx * tau
y += vy * tau
r += omega * tau
if x < 0.0 or x > W: vx = -vx*0.8
if y < 0.0 or y > W: vy = -vy*0.8
x = max(0.0, min(W, x))
y = max(0.0, min(W, y))
ax, ay = (2*random.random()-1)*vmax/10, (2*random.random()-1)*vmax/10
vx += ay * tau
vy += ay * tau
vx = max(-vmax, min(vmax, vx))
vy = max(-vmax, min(vmax, vy))
omega += (2*random.random()-1)*max_omega/10
omega = max(max_omega, min(-max_omega, omega))
fly.move_to(x, y).rotate_to(r)
label.Text = "[xy: %.3f:%.3f]" % (x,y)
vlabel.Text = "[vxy: %.3f:%.3f]" % (vx,vy)
viewer.render()
time.sleep(tau)
|
python
|
class Student():
# 类变量
# name = ''
sum = 0
age = 0
def __init__(self, name, age):
# 实例变量
self.name = name
self.age = age
self.__score = 0
# print(name) # xiaoming
# print(age) # 18
print(Student.age)
print(self.__class__.age)
self.__class__.sum += 1
print('当前学生总数为:' + str(self.__class__.sum))
def say(self):
print('my name is ' + self.name + 'my age is ' + str(self.age))
self.__score = 10
self.__dohomework()
# 类方法 cls -- 同时可以被实例和类调用
@classmethod
def plus_sum(cls):
print(cls.sum)
# 静态方法 -- 同时可以被实例和类调用
@staticmethod
def add(x, y):
print(Student.sum + x + y)
def __dohomework(self):
print('homework')
# 公开的 public
# 私有的 private 加 __ 设置为私有
# 方法私有化后,外部访问会报错,而属性却没有,原因是:由于python语言特性,其实是动态添加了一个新的属性
|
python
|
import os
import pytest
from ci_framework import FlopyTestSetup, base_test_dir
import flopy
base_dir = base_test_dir(__file__, rel_path="temp", verbose=True)
pthtest = os.path.join("..", "examples", "data", "swtv4_test")
swtv4_exe = "swtv4"
isswtv4 = flopy.which(swtv4_exe)
runmodel = False
verbose = False
swtdir = [
"1_box",
"1_box",
"2_henry",
"2_henry",
"2_henry",
"2_henry",
"2_henry",
"2_henry",
"3_elder",
"4_hydrocoin",
"5_saltlake",
"6_rotation",
"6_rotation",
"7_swtv4_ex",
"7_swtv4_ex",
"7_swtv4_ex",
"7_swtv4_ex",
"7_swtv4_ex",
"7_swtv4_ex",
"7_swtv4_ex",
]
subds = [
"case1",
"case2",
"1_classic_case1",
"2_classic_case2",
"3_VDF_no_Trans",
"4_VDF_uncpl_Trans",
"5_VDF_DualD_Trans",
"6_age_simulation",
"",
"",
"",
"1_symmetric",
"2_asymmetric",
"case1",
"case2",
"case3",
"case4",
"case5",
"case6",
"case7",
]
def test_seawat_array_format():
test_setup = FlopyTestSetup(verbose=True)
d = "2_henry"
subds = ["1_classic_case1"]
for subd in subds:
pth = os.path.join(pthtest, d, subd)
model_ws = os.path.join(
f"{base_dir}_test_seawat_array_format_{d}-{subd}"
)
test_setup.add_test_dir(model_ws)
namfile = "seawat.nam"
if subd == "6_age_simulation":
namfile = "henry_mod.nam"
m = flopy.seawat.Seawat.load(namfile, model_ws=pth, verbose=verbose)
m.change_model_ws(model_ws, reset_external=True)
m.bcf6.hy[0].fmtin = "(BINARY)"
m.btn.prsity[0].fmtin = "(BINARY)"
m.write_input()
if isswtv4 is not None and runmodel:
success, buff = m.run_model(silent=False)
assert success, f"{m.name} did not run"
return
@pytest.mark.parametrize(
"d, subd",
zip(swtdir, subds),
)
def test_swtv4(d, subd):
run_swtv4(d, subd)
return
def run_swtv4(d, subd):
test_setup = FlopyTestSetup(verbose=True)
model_ws = os.path.join(f"{base_dir}_test_swtv4_{d}-{subd}")
test_setup.add_test_dir(model_ws)
# set up paths
pth = os.path.join(pthtest, d, subd)
namfile = "seawat.nam"
if subd == "6_age_simulation":
namfile = "henry_mod.nam"
# load the existing model
m = flopy.seawat.swt.Seawat.load(namfile, model_ws=pth, verbose=verbose)
# change working directory
m.change_model_ws(model_ws)
# write input files
m.write_input()
# run the model
if isswtv4 is not None and runmodel:
success, buff = m.run_model(silent=False)
assert success, f"{m.name} did not run"
if __name__ == "__main__":
for d, subd in zip(swtdir, subds):
run_swtv4(d, subd)
test_seawat_array_format()
|
python
|
from ursina import *
from model.pion import PionBlanc, PionNoir
class VuePion(Entity):
def __init__(self, position, qubic, *args, **kwargs):
self.qubic = qubic
super().__init__(
position=position,
*args, **kwargs
)
class VuePionFactory:
def __init__(self, qubic, pion='Classic'):
"""
Args:
pion: le type de pion (le skin)
qubic: le qubic
"""
super().__init__()
pion_types = {'Classic': self.create_classic}
self.create_pion = pion_types.get(pion)
self.qubic = qubic
def create_classic(self, position, **kwargs):
vp = VuePion(position, self.qubic,
model='classic',
origin=(0, -0.5),
# texture='classic',
**kwargs)
vp.scale = 0.5
vp.y = vp.y * vp.scale[1]
if self.qubic.get_pion(position) == PionBlanc:
vp.color = color.white
elif self.qubic.get_pion(position) == PionNoir:
vp.color = color.dark_gray
else:
vp.color = color.black50
return vp
|
python
|
import db_handler
ZONE_MAPPING = {
27721: 3,
27767: 9,
-2: 7,
45041: 8,
27723: 3,
-6: 5,
27724: 5,
115_092: 5,
33130: 5,
27770: 2,
27726: 5,
61204: 4,
117_928: 4,
30754: 9,
35673: 8,
27774: 8,
27775: 8,
110_924: 8,
130_226: 12,
27779: 12,
33401: 12,
27730: 3,
-7: 3,
27781: 7,
30407: 12,
27783: 12,
27784: 11,
104_884: 1,
27746: 3,
57333: 10,
29349: 7,
29192: 7,
122_767: 3,
27790: 2,
27791: 12,
30913: 9,
27745: 2,
27792: 9,
29725: 12,
27788: 11,
27747: 1,
27796: 7,
27748: 6,
27750: 2,
39796: 6,
48149: 4,
27753: 6,
116_362: 7,
27814: 8,
27754: 6,
111_197: 11,
29439: 11,
27804: 11,
27805: 11,
27807: 10,
97579: 2,
105_249: 4,
52963: 4,
27757: 4,
45984: 10,
46497: 7,
109_971: 1,
27759: 1,
27760: 1,
29586: 10,
102_613: 10,
29241: 5,
27764: 2,
27742: 3,
27812: 11,
128_919: 7,
27766: 3,
27816: 7,
44342: 1,
27818: 9,
46134: 1,
}
def get_zones(struct, year=2020):
data = db_handler.get_data_object_from_db(year, struct)
clubs = [
c for c in data.get_district_clubs(include_officers=False) if not c.is_closed
]
clubs.sort(key=lambda x: x.name)
d = {}
for club in clubs:
z_id = int(input(f"{club.name}: "))
d[club.id] = z_id
insert_zone_mapping(d, struct, year)
def insert_zone_mapping(mapping, struct, year=2020):
data = db_handler.get_data_object_from_db(year, struct)
tc = data.db.tables["clubzone"]
vals = [{"year": 2020, "club_id": k, "zone_id": 45 + v} for (k, v) in mapping.items()]
data.db.conn.execute(tc.insert(vals))
get_zones("410W")
# insert_zone_mapping("410W")
|
python
|
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from types import SimpleNamespace
from typing import Dict
from playwright.connection import ChannelOwner
class BrowserServer(ChannelOwner):
Events = SimpleNamespace(Close="close",)
def __init__(
self, parent: ChannelOwner, type: str, guid: str, initializer: Dict
) -> None:
super().__init__(parent, type, guid, initializer)
self._channel.on("close", lambda _: self.emit(BrowserServer.Events.Close))
@property
def pid(self) -> str:
return self._initializer["pid"]
@property
def wsEndpoint(self) -> str:
return self._initializer["wsEndpoint"]
async def kill(self) -> None:
await self._channel.send("kill")
async def close(self) -> None:
await self._channel.send("close")
|
python
|
# -*- test-case-name: mimic.test.test_cinder -*-
"""
Defines a mock for Cinder
"""
import json
from uuid import uuid4
from six import text_type
from zope.interface import implementer
from twisted.plugin import IPlugin
from mimic.rest.mimicapp import MimicApp
from mimic.catalog import Entry
from mimic.catalog import Endpoint
from mimic.imimic import IAPIMock
@implementer(IAPIMock, IPlugin)
class CinderApi(object):
"""
Rest endpoints for mocked Cinder Api.
"""
def __init__(self, regions=["DFW", "ORD", "IAD"]):
"""
Create a CinderApi.
"""
self._regions = regions
def catalog_entries(self, tenant_id):
"""
List catalog entries for the Cinder API.
"""
return [
Entry(
tenant_id, "volume", "cloudBlockStorage",
[
Endpoint(tenant_id, region, text_type(uuid4()), prefix="v2")
for region in self._regions
]
)
]
def resource_for_region(self, region, uri_prefix, session_store):
"""
Get an :obj:`twisted.web.iweb.IResource` for the given URI prefix;
implement :obj:`IAPIMock`.
"""
return CinderMock(self, uri_prefix, session_store, region).app.resource()
class CinderMock(object):
"""
DNS Mock
"""
def __init__(self, api_mock, uri_prefix, session_store, name):
"""
Create a Cinder region with a given URI prefix
"""
self.uri_prefix = uri_prefix
self._api_mock = api_mock
self._session_store = session_store
self._name = name
app = MimicApp()
@app.route('/v2/<string:tenant_id>/volumes', methods=['GET'])
def get_volumes(self, request, tenant_id):
"""
Lists summary information for all Block Storage volumes that the tenant can access.
http://developer.openstack.org/api-ref-blockstorage-v2.html#getVolumesSimple
"""
request.setResponseCode(200)
return json.dumps({'volumes': []})
|
python
|
from sklearn.metrics import classification_report
import pandas as pd
import tests.test_utils as t
import unittest
from nlu import *
class SentimentTrainingTests(unittest.TestCase):
def test_sentiment_training(self):
#sentiment datase
df_train = self.load_sentiment_dl_dataset()#'/home/loan/Documents/freelancework/jsl/nlu/4realnlugit/tests/datasets/sentiment_dl/AllProductReviews.csv'
print(df_train.columns)
#convert int to str labels so our model predicts strings not numbers
# the text data to use for classification should be in a column named 'text'
df_train['text'] = df_train['text_data']
# the label column must have name 'y' name be of type str
df_train['y'] = df_train['Sentiment'].astype(str)
df_train.y = df_train.y.astype(str)
df_train.y = df_train.y.str.replace('-1','negative')
df_train.y = df_train.y.str.replace('1','positive')
df_train=df_train.iloc[0:100]
pipe = nlu.load('train.sentiment',verbose=True)
pipe = pipe.fit(df_train)
df = pipe.predict(df_train)
print(df)
print(df.columns)
print(df)
print(df.columns)
for c in df.columns : print (df[c])
# print(df[['sentiment','sentiment_confidence']])
# print(df.sentiment.value_counts())
# print(df.sentiment_confidence.value_counts())
def test_sentiment_training_with_custom_embeds_document_level(self):
#sentiment datase
df_train = self.load_sentiment_dl_dataset()
# the text data to use for classification should be in a column named 'text'
df_train['text'] = df_train['text_data']
# the label column must have name 'y' name be of type str
df_train['Sentiment'] = df_train['Sentiment']
df_train['y'] = df_train['Sentiment'].astype(str)
df_train.y = df_train.y.str.replace('-1','negative')
df_train.y = df_train.y.str.replace('1','positive')
# df_train=df_train.iloc[0:4000]
pipe = nlu.load('use train.sentiment',verbose=True, )
pipe = pipe.fit(df_train)
# df = fitted_pipe.predict(' I love NLU!')
df = pipe.predict(df_train.iloc[0:500],output_level='document')
for c in df.columns : print (df[c])
# print(df)
# print(df.columns)
# print(df[['sentiment','sentiment_confidence']])
# print(df.sentiment.value_counts())
# print(df.sentiment_confidence.value_counts())
# TODO test if bad performance persists in Spark NLP with non USE sentence eebddigns
def test_sentiment_training_with_custom_embeds_sentence_level(self):
#sentiment datase
df_train = self.load_sentiment_dl_dataset()
# the text data to use for classification should be in a column named 'text'
df_train['text'] = df_train['text_data']
# the label column must have name 'y' name be of type str
df_train['Sentiment'] = df_train['Sentiment']
df_train['y'] = df_train['Sentiment'].astype(str)
df_train.y = df_train.y.str.replace('-1','negative')
df_train.y = df_train.y.str.replace('1','positive')
# df_train=df_train.iloc[0:4000]
pipe = nlu.load('en.embed_sentence.small_bert_L12_768 train.sentiment',verbose=True, )
pipe.print_info()
pipe['sentiment_dl'].setMaxEpochs(1)
pipe = pipe.fit(df_train)
# df = fitted_pipe.predict(' I love NLU!')
df = pipe.predict(df_train.iloc[0:50],output_level='sentence')
s_path = 'saved_models/training_custom_embeds'
pipe.save(s_path)
hdd_pipe = nlu.load(path=s_path)
print(hdd_pipe.predict("YESSSSSSSSSSSSSSSSSSSSSSSSSSSSSsss"))
for c in df.columns : print (df[c])
# print(df.columns)
# print(df)
# print(df.columns)
# print(df[['sentiment','sentiment_confidence']])
# print(df.sentiment.value_counts())
# print(df.sentiment_confidence.value_counts())
def load_sentiment_dl_dataset(self):
output_file_name = 'stock.csv'
output_folder = 'sentiment/'
data_url = 'http://ckl-it.de/wp-content/uploads/2020/12/stock_data.csv'
return pd.read_csv(t.download_dataset(data_url,output_file_name,output_folder),error_bad_lines=False).iloc[0:100]
if __name__ == '__main__':
unittest.main()
|
python
|
import ntpath
import os
import sys
import tempfile
import unittest
from itertools import count
try:
from unittest.mock import Mock, patch, call, mock_open
except ImportError:
from mock import Mock, patch, call, mock_open
from flask import Flask, render_template_string, Blueprint
import six
import flask_s3
from flask_s3 import FlaskS3
class FlaskStaticTest(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.testing = True
@self.app.route('/<url_for_string>')
def a(url_for_string):
return render_template_string(url_for_string)
def test_jinja_url_for(self):
""" Tests that the jinja global gets assigned correctly. """
self.assertNotEqual(self.app.jinja_env.globals['url_for'],
flask_s3.url_for)
# then we initialise the extension
FlaskS3(self.app)
self.assertEquals(self.app.jinja_env.globals['url_for'],
flask_s3.url_for)
# Temporarily commented out
"""
def test_config(self):
"" Tests configuration vars exist. ""
FlaskS3(self.app)
defaults = ('S3_USE_HTTP', 'USE_S3', 'USE_S3_DEBUG',
'S3_BUCKET_DOMAIN', 'S3_CDN_DOMAIN',
'S3_USE_CACHE_CONTROL', 'S3_HEADERS',
'S3_URL_STYLE')
for default in defaults:
self.assertIn(default, self.app.config)
"""
class UrlTests(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.testing = True
self.app.config['FLASKS3_BUCKET_NAME'] = 'foo'
self.app.config['FLASKS3_USE_HTTPS'] = True
self.app.config['FLASKS3_BUCKET_DOMAIN'] = 's3.amazonaws.com'
self.app.config['FLASKS3_CDN_DOMAIN'] = ''
self.app.config['FLASKS3_OVERRIDE_TESTING'] = True
@self.app.route('/<url_for_string>')
def a(url_for_string):
return render_template_string(url_for_string)
@self.app.route('/')
def b():
return render_template_string("{{url_for('b')}}")
bp = Blueprint('admin', __name__, static_folder='admin-static')
@bp.route('/<url_for_string>')
def c():
return render_template_string("{{url_for('b')}}")
self.app.register_blueprint(bp)
def client_get(self, ufs):
FlaskS3(self.app)
client = self.app.test_client()
import six
if six.PY3:
return client.get('/%s' % ufs)
elif six.PY2:
return client.get('/{}'.format(ufs))
def test_required_config(self):
"""
Tests that ValueError raised if bucket address not provided.
"""
raises = False
del self.app.config['FLASKS3_BUCKET_NAME']
try:
ufs = "{{url_for('static', filename='bah.js')}}"
self.client_get(ufs)
except ValueError:
raises = True
self.assertTrue(raises)
def test_url_for(self):
"""
Tests that correct url formed for static asset in self.app.
"""
# non static endpoint url_for in template
self.assertEquals(self.client_get('').data, six.b('/'))
# static endpoint url_for in template
ufs = "{{url_for('static', filename='bah.js')}}"
exp = 'https://foo.s3.amazonaws.com/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_per_url_scheme(self):
"""
Tests that if _scheme is passed in the url_for arguments, that
scheme is used instead of configuration scheme.
"""
# check _scheme overriden per url
ufs = "{{url_for('static', filename='bah.js', _scheme='http')}}"
exp = 'http://foo.s3.amazonaws.com/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_handles_special_args(self):
"""
Tests that if any special arguments are passed, they are ignored, and
removed from generated url. As of this writing these are the special
args: _external, _anchor, _method (from flask's url_for)
"""
# check _external, _anchor, and _method are ignored, and not added
# to the url
ufs = "{{url_for('static', filename='bah.js',\
_external=True, _anchor='foobar', _method='GET')}}"
exp = 'https://foo.s3.amazonaws.com/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_debug(self):
"""Tests Flask-S3 behaviour in debug mode."""
self.app.debug = True
# static endpoint url_for in template
ufs = "{{url_for('static', filename='bah.js')}}"
exp = '/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_debug_override(self):
"""Tests Flask-S3 behavior in debug mode with USE_S3_DEBUG turned on."""
self.app.debug = True
self.app.config['FLASKS3_DEBUG'] = True
ufs = "{{url_for('static', filename='bah.js')}}"
exp = 'https://foo.s3.amazonaws.com/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_blueprint(self):
"""
Tests that correct url formed for static asset in blueprint.
"""
# static endpoint url_for in template
ufs = "{{url_for('admin.static', filename='bah.js')}}"
exp = 'https://foo.s3.amazonaws.com/admin-static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_cdn_domain(self):
self.app.config['FLASKS3_CDN_DOMAIN'] = 'foo.cloudfront.net'
ufs = "{{url_for('static', filename='bah.js')}}"
exp = 'https://foo.cloudfront.net/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_url_style_path(self):
"""Tests that the URL returned uses the path style."""
self.app.config['FLASKS3_URL_STYLE'] = 'path'
ufs = "{{url_for('static', filename='bah.js')}}"
exp = 'https://s3.amazonaws.com/foo/static/bah.js'
self.assertEquals(self.client_get(ufs).data, six.b(exp))
def test_url_for_url_style_invalid(self):
"""Tests that an exception is raised for invalid URL styles."""
self.app.config['FLASKS3_URL_STYLE'] = 'balderdash'
ufs = "{{url_for('static', filename='bah.js')}}"
self.assertRaises(ValueError, self.client_get, six.b(ufs))
class S3TestsWithCustomEndpoint(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.testing = True
self.app.config['FLASKS3_BUCKET_NAME'] = 'thebucket'
self.app.config['FLASKS3_REGION'] = 'theregion'
self.app.config['AWS_ACCESS_KEY_ID'] = 'thekeyid'
self.app.config['AWS_SECRET_ACCESS_KEY'] = 'thesecretkey'
self.app.config['FLASKS3_ENDPOINT_URL'] = 'https://minio.local:9000/'
@patch('flask_s3.boto3')
def test__custom_endpoint_is_passed_to_boto(self, mock_boto3):
flask_s3.create_all(self.app)
mock_boto3.client.assert_called_once_with("s3",
region_name='theregion',
aws_access_key_id='thekeyid',
aws_secret_access_key='thesecretkey',
endpoint_url='https://minio.local:9000/')
class S3Tests(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.testing = True
self.app.config['FLASKS3_BUCKET_NAME'] = 'foo'
self.app.config['FLASKS3_USE_CACHE_CONTROL'] = True
self.app.config['FLASKS3_CACHE_CONTROL'] = 'cache instruction'
self.app.config['FLASKS3_CACHE_CONTROL'] = '3600'
self.app.config['FLASKS3_HEADERS'] = {
'Expires': 'Thu, 31 Dec 2037 23:59:59 GMT',
'Content-Encoding': 'gzip',
}
self.app.config['FLASKS3_ONLY_MODIFIED'] = False
def test__bp_static_url(self):
""" Tests test__bp_static_url """
bps = [Mock(static_url_path='/foo', url_prefix=None),
Mock(static_url_path=None, url_prefix='/pref'),
Mock(static_url_path='/b/bar', url_prefix='/pref'),
Mock(static_url_path=None, url_prefix=None)]
expected = [six.u('/foo'), six.u('/pref'), six.u('/pref/b/bar'), six.u('')]
self.assertEquals(expected, [flask_s3._bp_static_url(x) for x in bps])
def test__cache_config(self):
""" Test that cache headers are set correctly. """
new_app = Flask("test_cache_param")
new_app.config['FLASKS3_USE_CACHE_CONTROL'] = True
new_app.config['FLASKS3_CACHE_CONTROL'] = '3600'
flask_s3.FlaskS3(new_app)
expected = {'Cache-Control': '3600'}
self.assertEqual(expected, new_app.config['FLASKS3_HEADERS'])
@patch('os.walk')
@patch('os.path.isdir')
def test__gather_files(self, path_mock, os_mock):
""" Tests the _gather_files function """
self.app.static_folder = '/home'
self.app.static_url_path = '/static'
bp_a = Mock(static_folder='/home/bar', static_url_path='/a/bar',
url_prefix=None)
bp_b = Mock(static_folder='/home/zoo', static_url_path='/b/bar',
url_prefix=None)
bp_c = Mock(static_folder=None)
self.app.blueprints = {'a': bp_a, 'b': bp_b, 'c': bp_c}
dirs = {'/home': [('/home', None, ['.a'])],
'/home/bar': [('/home/bar', None, ['b'])],
'/home/zoo': [('/home/zoo', None, ['c']),
('/home/zoo/foo', None, ['d', 'e'])]}
os_mock.side_effect = dirs.get
path_mock.return_value = True
expected = {('/home/bar', six.u('/a/bar')): ['/home/bar/b'],
('/home/zoo', six.u('/b/bar')): ['/home/zoo/c',
'/home/zoo/foo/d',
'/home/zoo/foo/e']}
actual = flask_s3._gather_files(self.app, False)
self.assertEqual(expected, actual)
expected[('/home', six.u('/static'))] = ['/home/.a']
actual = flask_s3._gather_files(self.app, True)
self.assertEqual(expected, actual)
@patch('os.walk')
@patch('os.path.isdir')
def test__gather_files_no_blueprints_no_files(self, path_mock, os_mock):
"""
Tests that _gather_files works when there are no blueprints and
no files available in the static folder
"""
self.app.static_folder = '/foo'
dirs = {'/foo': [('/foo', None, [])]}
os_mock.side_effect = dirs.get
path_mock.return_value = True
actual = flask_s3._gather_files(self.app, False)
self.assertEqual({}, actual)
@patch('os.walk')
@patch('os.path.isdir')
def test__gather_files_bad_folder(self, path_mock, os_mock):
"""
Tests that _gather_files when static folder is not valid folder
"""
self.app.static_folder = '/bad'
dirs = {'/bad': []}
os_mock.side_effect = dirs.get
path_mock.return_value = False
actual = flask_s3._gather_files(self.app, False)
self.assertEqual({}, actual)
@patch('os.path.splitdrive', side_effect=ntpath.splitdrive)
@patch('os.path.join', side_effect=ntpath.join)
def test__path_to_relative_url_win(self, join_mock, split_mock):
""" Tests _path_to_relative_url on Windows system """
input_ = [r'C:\foo\bar\baz.css', r'C:\foo\bar.css',
r'\foo\bar.css']
expected = ['/foo/bar/baz.css', '/foo/bar.css', '/foo/bar.css']
for in_, exp in zip(input_, expected):
actual = flask_s3._path_to_relative_url(in_)
self.assertEquals(exp, actual)
@unittest.skipIf(sys.version_info < (3, 0),
"not supported in this version")
@patch('flask_s3.boto3')
@patch("{}.open".format("builtins"), mock_open(read_data='test'))
def test__write_files(self, key_mock):
""" Tests _write_files """
static_url_loc = '/foo/static'
static_folder = '/home/z'
assets = ['/home/z/bar.css', '/home/z/foo.css']
exclude = ['/foo/static/foo.css', '/foo/static/foo/bar.css']
# we expect foo.css to be excluded and not uploaded
expected = [call(bucket=None, name=six.u('/foo/static/bar.css')),
call().set_metadata('Cache-Control', 'cache instruction'),
call().set_metadata('Expires', 'Thu, 31 Dec 2037 23:59:59 GMT'),
call().set_metadata('Content-Encoding', 'gzip'),
call().set_contents_from_filename('/home/z/bar.css')]
flask_s3._write_files(key_mock, self.app, static_url_loc, static_folder, assets,
None, exclude)
self.assertLessEqual(expected, key_mock.mock_calls)
@patch('flask_s3.boto3')
def test__write_only_modified(self, key_mock):
""" Test that we only upload files that have changed """
self.app.config['FLASKS3_ONLY_MODIFIED'] = True
static_folder = tempfile.mkdtemp()
static_url_loc = static_folder
filenames = [os.path.join(static_folder, f) for f in ['foo.css', 'bar.css']]
expected = []
data_iter = count()
for filename in filenames:
# Write random data into files
with open(filename, 'wb') as f:
if six.PY3:
data = str(data_iter)
f.write(data.encode())
else:
data = str(data_iter.next())
f.write(data)
# We expect each file to be uploaded
expected.append(call.put_object(ACL='public-read',
Bucket=None,
Key=filename.lstrip("/"),
Body=data,
Metadata={},
Expires='Thu, 31 Dec 2037 23:59:59 GMT',
ContentEncoding='gzip'))
files = {(static_url_loc, static_folder): filenames}
hashes = flask_s3._upload_files(key_mock, self.app, files, None)
# All files are uploaded and hashes are returned
self.assertLessEqual(len(expected), len(key_mock.mock_calls))
self.assertEquals(len(hashes), len(filenames))
# We now modify the second file
with open(filenames[1], 'wb') as f:
data = str(next(data_iter))
if six.PY2:
f.write(data)
else:
f.write(data.encode())
# We expect only this file to be uploaded
expected.append(call.put_object(ACL='public-read',
Bucket=None,
Key=filenames[1].lstrip("/"),
Body=data,
Metadata={},
Expires='Thu, 31 Dec 2037 23:59:59 GMT',
ContentEncoding='gzip'))
new_hashes = flask_s3._upload_files(key_mock, self.app, files, None,
hashes=dict(hashes))
#import pprint
#pprint.pprint(zip(expected, key_mock.mock_calls))
self.assertEquals(len(expected), len(key_mock.mock_calls))
@patch('flask_s3.boto3')
def test_write_binary_file(self, key_mock):
""" Tests _write_files """
self.app.config['FLASKS3_ONLY_MODIFIED'] = True
static_folder = tempfile.mkdtemp()
static_url_loc = static_folder
filenames = [os.path.join(static_folder, 'favicon.ico')]
for filename in filenames:
# Write random data into files
with open(filename, 'wb') as f:
f.write(bytearray([120, 3, 255, 0, 100]))
flask_s3._write_files(key_mock, self.app, static_url_loc, static_folder, filenames, None)
expected = {
'ACL': 'public-read',
'Bucket': None,
'Metadata': {},
'ContentEncoding': 'gzip',
'Body': b'x\x03\xff\x00d',
'Key': filenames[0][1:],
'Expires': 'Thu, 31 Dec 2037 23:59:59 GMT'}
name, args, kwargs = key_mock.mock_calls[0]
self.assertEquals(expected, kwargs)
def test_static_folder_path(self):
""" Tests _static_folder_path """
inputs = [('/static', '/home/static', '/home/static/foo.css'),
('/foo/static', '/home/foo/s', '/home/foo/s/a/b.css'),
('/bar/', '/bar/', '/bar/s/a/b.css')]
expected = [six.u('/static/foo.css'), six.u('/foo/static/a/b.css'),
six.u('/bar/s/a/b.css')]
for i, e in zip(inputs, expected):
self.assertEquals(e, flask_s3._static_folder_path(*i))
@patch('flask_s3.boto3')
def test__bucket_acl_not_set(self, mock_boto3):
flask_s3.create_all(self.app, put_bucket_acl=False)
self.assertFalse(mock_boto3.client().put_bucket_acl.called,
"put_bucket_acl was called!")
@patch('flask_s3._write_files')
def test__upload_uses_prefix(self, mock_write_files):
s3_mock = Mock()
local_path = '/local_path/static'
file_paths = ['/local_path/static/file1', '/local_path/static/file2']
files = {(local_path, '/static'): file_paths}
flask_s3._upload_files(s3_mock, self.app, files, 's3_bucket')
expected_call = call(
s3_mock, self.app, '/static', local_path, file_paths, 's3_bucket', hashes=None)
self.assertEquals(mock_write_files.call_args_list, [expected_call])
for supported_prefix in ['foo', '/foo', 'foo/', '/foo/']:
mock_write_files.reset_mock()
self.app.config['FLASKS3_PREFIX'] = supported_prefix
flask_s3._upload_files(s3_mock, self.app, files, 's3_bucket')
expected_call = call(s3_mock, self.app, '/foo/static',
local_path, file_paths, 's3_bucket', hashes=None)
self.assertEquals(mock_write_files.call_args_list, [expected_call])
@patch('flask_s3.current_app')
def test__url_for_uses_prefix(self, mock_current_app):
bucket_path = 'foo.s3.amazonaws.com'
flask_s3.FlaskS3(self.app)
mock_current_app.config = self.app.config
mock_bind = mock_current_app.url_map.bind
flask_s3.url_for('static', **{'filename': 'test_file.txt'})
self.assertEqual(mock_bind.call_args_list, [call(bucket_path, url_scheme='https')])
for supported_prefix in ['bar', '/bar', 'bar/', '/bar/']:
mock_bind.reset_mock()
self.app.config['FLASKS3_PREFIX'] = supported_prefix
flask_s3.url_for('static', **{'filename': 'test_file.txt'})
expected_path = '%s/%s' % (bucket_path, 'bar')
self.assertEqual(mock_bind.call_args_list,
[call(expected_path, url_scheme='https')])
if __name__ == '__main__':
unittest.main()
|
python
|
# Copyright (c) 2018, Ioannis Tziakos
# All rights reserved.
#
# Plugin hooks are inspired by the current implementations found in
# the tox.venv module and adapted to support edm.
import subprocess
import os
import re
import sys
from tox import hookimpl, exception
from tox.venv import VirtualEnv
COMMAND_FAILED = (
"command failed but result from testenv is ignored\ncmd: {}")
def env_exists(edm, envname):
try:
subprocess.check_call([str(edm), 'envs', 'exists', envname])
except subprocess.CalledProcessError:
return False
else:
return True
@hookimpl
def tox_testenv_create(venv, action):
name = venv.envconfig.basepython
m = re.match(r"python(\d)\.(\d)", name)
if m:
version = "%s.%s" % m.groups()
else:
raise exception.UnsupporterInterpreter(
'TOX-EDM cannot infer version from {!r}'.format(name))
edm = venv.getcommandpath('edm', venv=False)
action.venv.envconfig.whitelist_externals.append(
os.path.dirname(edm))
if action.activity == 'recreate':
action.popen([
edm, 'envs', 'create', action.venvname,
'--force', '--version', version])
elif not env_exists(edm, action.venvname):
action.popen([
edm, 'envs', 'create', action.venvname,
'--version', version])
prefix = action.popen(
[edm, 'prefix', '-e', action.venvname],
redirect=False, returnout=True)
prefix = prefix.strip()
# The envbindir will be used to find the environment python
# So we have to make sure that it has the right value.
action.venv.envconfig.envbindir = prefix
action.venv.envconfig.whitelist_externals.append(prefix)
return True
@hookimpl
def tox_testenv_install_deps(venv, action):
deps = venv._getresolvedeps()
name = action.venvname
if len(deps) > 0:
edm = venv.getcommandpath('edm', venv=False)
depinfo = " ".join(map(str, deps))
action.setactivity("installdeps", "%s" % depinfo)
args = [edm, 'install', '-e', name, '-y'] + map(str, deps)
action.popen(args)
return True
@hookimpl
def tox_runenvreport(venv, action):
edm = venv.getcommandpath('edm', venv=True)
output = action.popen([
edm, 'run', '-e', action.venvname, '--',
'pip', 'freeze'])
output = output.split("\n\n")[-1]
return output.strip().splitlines()
@hookimpl
def tox_runtest_pre(venv):
return True
@hookimpl
def tox_runtest_post(venv):
return True
@hookimpl
def tox_runtest(venv, redirect):
session = venv.session
envconfig = venv.envconfig
action = session.newaction(venv, "runtests")
with action:
venv.status = 0
session.make_emptydir(envconfig.envtmpdir)
envconfig.envtmpdir.ensure(dir=1)
env = venv._getenv(testcommand=True)
cwd = envconfig.changedir
edm = venv.getcommandpath('edm', venv=True)
action.setactivity(
"runtests", "PYTHONHASHSEED={!r}".format(
env.get("PYTHONHASHSEED")))
for i, argv in enumerate(envconfig.commands):
message = "commands[%s] | %s" % (
i, ' '.join([str(x) for x in argv]))
action.setactivity("runtests", message)
ignore_return = argv[0].startswith("-")
if ignore_return:
if argv[0] == "-":
del argv[0]
else:
argv[0] = argv[0].lstrip("-")
argv = [edm, 'run', '-e', action.venvname, '--'] + argv
try:
action.popen(
argv, cwd=cwd, env=env, redirect=redirect,
ignore_ret=ignore_return)
except exception.InvocationError as error:
if envconfig.ignore_outcome:
session.report.warning(COMMAND_FAILED.format(error))
venv.status = "ignored failed command"
continue # keep processing commands
session.report.error(str(error))
venv.status = "commands failed"
if not envconfig.ignore_errors:
break # Don't process remaining commands
except KeyboardInterrupt:
venv.status = "keyboardinterrupt"
session.report.error(venv.status)
raise
return True
@hookimpl
def tox_get_python_executable(envconfig):
venv = VirtualEnv(envconfig=envconfig)
edm = venv.getcommandpath('edm', venv=False)
if env_exists(edm, envconfig.envname):
executable = subprocess.check_output([
str(edm), 'run', '-e', envconfig.envname, '--',
'python', '-c',
"import sys; sys.stdout.write(sys.executable)"])
executable = executable.strip()
if sys.platform.startswith('win'):
# Make sure that we always have the right bin directory
envconfig.envbindir = os.path.join(
os.path.dirname(executable), 'Scripts')
return os.path.abspath(executable)
else:
return None
|
python
|
# Generated by Django 2.1.1 on 2018-09-23 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('backend', '0002_song'),
]
operations = [
migrations.AlterModelOptions(
name='song',
options={'ordering': ['position']},
),
migrations.AddField(
model_name='song',
name='position',
field=models.IntegerField(default=0),
),
]
|
python
|
#!/usr/bin/env python
import dfl.dynamic_system
import dfl.dynamic_model as dm
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal
m = 1.0
k11 = 0.2
k13 = 2.0
b1 = 3.0
class Plant1(dfl.dynamic_system.DFLDynamicPlant):
def __init__(self):
self.n_x = 2
self.n_eta = 2
self.n_u = 1
self.n = self.n_x + self.n_eta
# User defined matrices for DFL
self.A_cont_x = np.array([[0.0, 1.0],
[0.0, 0.0]])
self.A_cont_eta = np.array([[0.0, 0.0],
[-1/m,-1/m]])
self.B_cont_x = np.array([[0.0],[1.0]])
# Limits for inputs and states
self.x_min = np.array([-2.0,-2.0])
self.x_max = np.array([2.0 ,2.0])
self.u_min = np.array([-2.5])
self.u_max = np.array([ 2.5])
# Hybrid model
self.P = np.array([[1, 1]])
self.A_cont_eta_hybrid = self.A_cont_eta.dot(np.linalg.pinv(self.P))
# functions defining constituitive relations for this particular system
@staticmethod
def phi_c1(q):
e = k11*q + k13*q**3
return e
@staticmethod
def phi_r1(f):
# e = b1*np.sign(f)*np.abs(f)*np.abs(f)
e = b1*np.sign(f)*f**2
return e
@staticmethod
def phi_rc(q,v):
return 5*v*np.abs(q)
# nonlinear state equations
def f(self,t,x,u):
x_dot = np.zeros(x.shape)
q,v = x[0],x[1]
x_dot[0] = v
x_dot[1] = -self.phi_r1(v) -self.phi_c1(q) + u
return x_dot
# nonlinear observation equations
@staticmethod
def g(t,x,u):
return dm.Koopman.gkoop1(x)
# auxiliary variables (outputs from nonlinear elements)
def phi(self,t,x,u):
'''
outputs the values of the auxiliary variables
'''
q,v = x[0],x[1]
eta = np.zeros(self.n_eta)
eta[0] = self.phi_c1(q)
eta[1] = self.phi_r1(v)
return eta
###########################################################################################
#Dummy forcing laws
def zero_u_func(y,t):
return 1
def rand_u_func(y,t):
return np.random.normal(0.0,0.3)
def sin_u_func(y,t):
return 0.5*signal.square(3 * t)
# return np.sin(3*t)
if __name__== "__main__":
driving_fun = sin_u_func
plant1 = Plant1()
x_0 = np.zeros(plant1.n_x)
fig, axs = plt.subplots(2, 1)
tru = dm.GroundTruth(plant1)
data = tru.generate_data_from_random_trajectories()
t, u, x_tru, y_tru = tru.simulate_system(x_0, driving_fun, 10.0)
axs[0].plot(t, x_tru[:,0], 'k-', label='Ground Truth')
koo = dm.Koopman(plant1, observable='filippos')
koo.learn(data)
_, _, x_koo, y_koo = koo.simulate_system(x_0, driving_fun, 10.0)
axs[0].plot(t, x_koo[:,0], 'g-.', label='Koopman')
dfl = dm.DFL(plant1)
dfl.learn(data)
_, _, x_dfl, y_dfl = dfl.simulate_system(x_0, driving_fun, 10.0)
axs[0].plot(t, x_dfl[:,0], 'r-.', label='DFL')
lrn = dm.L3(plant1, 2, ac_filter=False)
lrn.learn(data)
_, _, x_lrn, y_lrn = lrn.simulate_system(x_0, driving_fun, 10.0)
axs[0].plot(t, x_lrn[:,0], 'b-.', label='L3')
axs[0].legend()
axs[1].plot(t, u, 'k')
axs[1].set_xlabel('time')
axs[0].set_ylabel('q')
axs[1].set_ylabel('u')
plt.show()
|
python
|
# RUN this file for an example adventure.
# THEN go to 02_my_adventure.py to make your own!
from random import randint
def startGame():
print("This is an adventure game.")
input("Press enter to continue the text.")
print("When you see this you will need to respond. Here type 'ok'. Then press enter.")
input("> ")
input("Ready? ...")
startRoom()
def startRoom():
input("You are in a big empty room.")
input("There are four doors.")
input("Which door do you enter?")
print("Type 1, 2, 3, or 4 then press enter.")
door = input("> ")
if door == "1":
input("You walk through door 1.")
emptyRoom()
elif door == "2":
input("You walk through door 2.")
mathTrap()
elif door == "3":
input("You walk through door 3.")
library()
elif door == "4":
pit()
else:
input("that's not a door, try again.")
print()
startRoom()
def emptyRoom():
input("It is an empty room.")
input("But you hear a mysterious voice.")
input("It whispers:")
input('"The password is...password..."')
input("...")
input("Whatever. Press enter leave back to the main room.")
startRoom()
def mathTrap():
input("OH NO it is a math trap.")
num1 = randint(1, 99)
num2 = randint(1, 99)
stringNum1 = str(num1)
stringNum2 = str(num2)
print("Answer the math question correctly to escape:")
answer = input(stringNum1 + " + " + stringNum2 + " = ")
if (int(answer) == num1 + num2):
input("CORRECT!")
input("You escape back to the main room.")
startRoom()
else:
input("INCORRECT!")
gameOver()
def library():
input("You are in a library.")
input("The librarian glares at you.")
input("'What is the password?' she asks.")
print("What do you say?")
password = input("> ")
if password == "password":
input("'How did you know?? Okay then...'")
input("She pulls a book out of a shelf, then the shelf moves...")
secretPassage()
else:
input("'Incorrect!!' she screams, then kicks you out.")
startRoom()
def pit():
input("What is in door 4???")
print("Guess!")
input("Your guess: ")
input("Nope, it's just a bottomless pit. Sorry.")
gameOver()
def secretPassage():
input("You enter a secret passageway.")
input("and there is cake!")
win()
def win():
input("You win!!")
print("congrats :D")
def gameOver():
print("Game Over!")
startGame()
|
python
|
from src.preprocessing.data_filter import DataFilter
from src.preprocessing.dataset import Article, Sentence, Token
class ThreeSentenceDataFilter(DataFilter):
def __init__(self, total_sentence_limit=None, *args, **kwargs):
self.article = None
self.sentence = None
self.last_entity = None
self.total_sentence_count = 0
self.total_sentence_limit = total_sentence_limit
super().__init__(*args, **kwargs)
def filter_articles(self):
missing_ids = 0
wrong_title_spans = 0
for article in self.articles.copy():
if article.is_valid():
wikidata_json = self.page_id_to_wikidata_id.get(int(article.doc_id), None)
if wikidata_json is not None and wikidata_json['id'] is not None:
nkjp_class = self.entity_id_to_nkjp_class.get(wikidata_json['id'], None)
nkjp_specific_class = self.entity_id_to_nkjp_class.get(wikidata_json['id'], None)
if nkjp_class is not None:
article.annotate_title(wikidata_json['title'], nkjp_class, nkjp_specific_class)
if article.title_annotation_error:
wrong_title_spans += 1
else:
article.title_annotation_error = True
missing_ids += 1
if not article.is_valid():
self.articles.remove(article)
def set_up(self):
pass
def process_line(self, line: str):
if self.total_sentence_limit is not None and self.total_sentence_limit <= self.total_sentence_count:
return
columns = line[:-1].split('\t')
if len(columns) == 7:
article_no, token, lemma, space, tags, entity, entity_wikidata_id = columns
if self.article is None or article_no != self.article.doc_id:
if self.article is not None:
self.articles.add(self.article)
self.article = Article(article_no, sentence_limit=3)
self.total_sentence_count += 3
if self.sentence is None:
self.sentence = Sentence()
self.article.add_next_sentence(self.sentence)
token = Token(token, lemma, space, tags, entity, entity_wikidata_id)
self.sentence.tokens.append(token)
if entity_wikidata_id != '_':
entity_wikidata_id = int(entity_wikidata_id[1:])
token.nkjp_class = self.entity_id_to_nkjp_class.get(entity_wikidata_id)
token.specific_nkjp_class = self.entity_id_to_nkjp_specific_class.get(entity_wikidata_id)
if token.nkjp_class is not None:
token.start_tag = 'B' if self.last_entity != entity else 'I'
# if nkjp_class is not None:
# print(token, entity, nkjp_class)
self.last_entity = entity
elif len(columns) != 1:
print('Invalid number of columns: %d' % len(columns))
print(columns)
else: # we reached a blank line - meaning the sentence is over
self.sentence = None
def process(base_dir):
ThreeSentenceDataFilter(
None,
'data/unfiltered_datasets/poleval',
'data/training_datasets/wikipedia_three_sentences',
base_dir)\
.filter_data_and_save()
if __name__ == '__main__':
process(r'C:\Users\piotrek\Desktop\inf\magisterka\ner')
|
python
|
import pytest
from pyvipr.examples_models.lopez_embedded import model
from pyvipr.pysb_viz.static_viz import PysbStaticViz
@pytest.fixture
def viz_model():
viz = PysbStaticViz(model)
return viz
def test_viz_exists(viz_model):
assert viz_model
def test_graphs(viz_model):
g_sp = viz_model.species_graph()
g_rxn_bi = viz_model.sp_rxns_bidirectional_graph(two_edges=True)
g_rxn = viz_model.sp_rxns_graph()
g_rules = viz_model.sp_rules_graph()
g_proj_sp = viz_model.projected_graph(g_rxn_bi, 'species_from_bireactions', viz_model.model.reactions_bidirectional)
g_proj_birxns = viz_model.projected_graph(g_rxn_bi, 'bireactions')
g_proj_rules = viz_model.projected_graph(g_rules, 'rules')
n_species = len(viz_model.model.species)
assert len(g_sp.nodes()) == n_species
assert len(g_rxn_bi.nodes()) == n_species + len(viz_model.model.reactions_bidirectional)
assert len(g_rxn.nodes()) == n_species + len(viz_model.model.reactions)
assert len(g_rules.nodes()) == n_species + len(viz_model.model.rules)
assert len(g_proj_sp.nodes()) == n_species
assert len(g_proj_birxns.nodes()) == len(viz_model.model.reactions_bidirectional)
assert len(g_proj_rules.nodes()) == len(viz_model.model.rules)
def test_wrong_projection(viz_model):
with pytest.raises(ValueError):
viz_model._projections_view('wrong_projection')
def test_no_compartments(viz_model):
with pytest.raises(ValueError):
viz_model.compartments_data_graph()
|
python
|
# Generated by Django 2.1.5 on 2019-01-31 18:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ipam', '0023_change_logging'),
]
operations = [
migrations.AlterField(
model_name='vrf',
name='rd',
field=models.CharField(blank=True, max_length=21, null=True, unique=True),
),
]
|
python
|
from itertools import product
with open("day-04.txt") as f:
numbers_str, *boards_str = f.read().rstrip().split("\n\n")
numbers = [int(n) for n in numbers_str.split(",")]
boards = {}
for b, board_str in enumerate(boards_str):
boards[b] = {}
for r, row in enumerate(board_str.splitlines()):
for c, number in enumerate(map(int, row.split())):
boards[b][number] = r, c
boards_rows = [[set() for _ in range(5)] for _ in range(len(boards))]
boards_cols = [[set() for _ in range(5)] for _ in range(len(boards))]
for number, (b, board) in product(numbers, boards.items()):
if number not in board:
continue
row, col = board.pop(number)
boards_rows[b][row].add(number)
boards_cols[b][col].add(number)
if len(boards_rows[b][row]) == 5 or len(boards_cols[b][col]) == 5:
winning_board = b
called_number = number
break
else:
print("No winning board found")
exit(1)
print(sum(boards[winning_board]) * called_number)
|
python
|
import gffutils
import pyfaidx
def select_annotation_type(db, fasta, selectionAnnotationType):
"""
list of gff3 features as fasta record of selected gff3 type (e.g. mRNA)
"""
countFeature = db.count_features_of_type(selectionAnnotationType)
featureList = [None] * countFeature
i = 0
for feature in db.features_of_type(selectionAnnotationType):
featureList[i] = feature
i=i+1
featureList = list(filter(None.__ne__, featureList))
return(featureList)
def variant_position_within(coordsVar, coordsInterval):
"""
check if coordsVars is within coordsInterval. Return 0
"""
if coordsVar.CHROM == coordsInterval.seqid:
if coordsVar.POS >= coordsInterval.start:
if coordsVar.POS <= coordsInterval.end:
return(1)
else:
return(0)
else:
return(0)
return(0)
|
python
|
import socket
import win32.lib.win32serviceutil as win32serviceutil
import win32.servicemanager as servicemanager
import win32.win32event as win32event
import win32.win32service as win32service
class SMWinServiceBase(win32serviceutil.ServiceFramework):
_svc_name_ = "SampleleService"
_svc_display_name_ = "Sample Service"
_svc_description_ = "Service Sample Description"
@classmethod
def parse_command_line(cls):
win32serviceutil.HandleCommandLine(cls)
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
socket.setdefaulttimeout(60)
def SvcStop(self):
self.stop()
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
self.start()
servicemanager.LogMsg(
servicemanager.EVENTLOG_INFORMATION_TYPE,
servicemanager.PYS_SERVICE_STARTED,
(self._svc_name_, ""),
)
self.main()
def start(self):
pass
def stop(self):
pass
def main(self):
pass
if __name__ == "__main__":
SMWinServiceBase.parse_command_line()
|
python
|
import os
import re
import subprocess
import shlex
from ConfigParser import SafeConfigParser
CONFIG_FILE = os.path.join(os.getcwd(), '.forrest')
def get_config():
config = SafeConfigParser()
config.read(CONFIG_FILE)
return config
def save_config(config):
config.write(open(CONFIG_FILE, 'w'))
def get_input(text, default=''):
response = raw_input(text)
if len(response) == 0:
response = default
return response
def create_bundle(source_dir):
local_command('tar czf /tmp/bundle.tgz -C %s .' % source_dir)
def local_command(command, decoder=None, tty=None):
if tty:
return os.system(command)
else:
dev_null = open(os.devnull, 'w')
output = subprocess.check_output(shlex.split(command))
dev_null.close()
if decoder:
return decoder(output)
else:
return output
|
python
|
from http import HTTPStatus
from django.urls import reverse
from mock import patch
from barriers.models import Company
from core.tests import MarketAccessTestCase
class EditCompaniesTestCase(MarketAccessTestCase):
company_id = "0692683e-5197-4853-a0fe-e43e35b8e7c5"
company_name = "Test Company"
company_data = {
"id": company_id,
"name": company_name,
"created_on": "2020-01-01",
"address": {
"line_1": "123 Test Street",
"town": "London",
},
}
def test_edit_companies_landing_page(self):
"""
Landing page should have the barrier's companies in the form
"""
response = self.client.get(
reverse(
"barriers:edit_companies", kwargs={"barrier_id": self.barrier["id"]}
)
)
assert response.status_code == HTTPStatus.OK
assert "form" in response.context
company_ids = [company["id"] for company in self.barrier["companies"]]
assert response.context["form"].initial["companies"] == company_ids
assert self.client.session["companies"] == self.barrier["companies"]
def test_company_search_page_loads(self):
"""
The search page should load with a form in the context
"""
response = self.client.get(
reverse(
"barriers:search_company", kwargs={"barrier_id": self.barrier["id"]}
)
)
assert response.status_code == HTTPStatus.OK
assert "form" in response.context
@patch("utils.datahub.DatahubClient.post")
def test_company_search_submit(self, mock_post):
"""
Searching should call the Datahub API
"""
mock_post.return_value = {
"count": 1,
"results": [self.company_data],
}
response = self.client.post(
reverse(
"barriers:search_company", kwargs={"barrier_id": self.barrier["id"]}
),
data={"query": "test search"},
)
assert response.status_code == HTTPStatus.OK
assert "form" in response.context
assert "results" in response.context
results = response.context["results"]
assert results["count"] == 1
assert results["results"][0].id == self.company_id
assert results["results"][0].name == self.company_name
@patch("barriers.views.companies.DatahubClient.get_company")
def test_company_detail(self, mock_get_company):
"""
Company Detail should call the Datahub API
"""
mock_get_company.return_value = Company(self.company_data)
response = self.client.get(
reverse(
"barriers:company_detail",
kwargs={
"barrier_id": self.barrier["id"],
"company_id": self.company_id,
},
),
)
assert response.status_code == HTTPStatus.OK
mock_get_company.assert_called_with(self.company_id)
assert response.context["company"].id == self.company_id
assert response.context["company"].name == self.company_name
@patch("utils.api.resources.APIResource.patch")
@patch("barriers.views.companies.DatahubClient.get_company")
def test_add_company(self, mock_get_company, mock_patch):
"""
Add company should change the session, not call the API
"""
mock_get_company.return_value = Company(self.company_data)
response = self.client.post(
reverse(
"barriers:company_detail",
kwargs={
"barrier_id": self.barrier["id"],
"company_id": self.company_id,
},
),
data={"company_id": self.company_id},
)
assert response.status_code == HTTPStatus.FOUND
new_company = {
"id": self.company_id,
"name": self.company_name,
}
assert new_company in self.client.session["companies"]
assert mock_patch.called is False
@patch("utils.api.resources.APIResource.patch")
def test_remove_company(self, mock_patch):
"""
Removing a company should remove it from the session, not call the API
"""
companies = [
{
"id": self.company_id,
"name": self.company_name,
},
{
"id": self.barrier["companies"][0]["id"],
"name": self.barrier["companies"][0]["name"],
},
]
self.update_session({"companies": companies})
response = self.client.post(
reverse(
"barriers:remove_company", kwargs={"barrier_id": self.barrier["id"]}
),
data={"company_id": self.company_id},
)
assert response.status_code == HTTPStatus.FOUND
companies = self.client.session["companies"]
assert {
"id": self.company_id,
"name": self.company_name,
} not in self.client.session["companies"]
assert self.barrier["companies"][0] in self.client.session["companies"]
assert mock_patch.called is False
@patch("utils.api.resources.APIResource.patch")
def test_confirm_companies(self, mock_patch):
"""
Saving should call the API
"""
self.update_session(
{
"companies": [
{
"id": self.company_id,
"name": self.company_name,
}
]
}
)
response = self.client.post(
reverse(
"barriers:edit_companies_session",
kwargs={
"barrier_id": self.barrier["id"],
},
),
data={"companies": [self.company_id]},
)
assert response.status_code == HTTPStatus.FOUND
mock_patch.assert_called_with(
id=self.barrier["id"],
companies=[
{
"id": self.company_id,
"name": self.company_name,
}
],
)
assert "companies" not in self.client.session
|
python
|
#!/usr/bin/env python3
# Reading and Writing files
# Creates a new file object and assigning it to a variable called file
file = open ("spider.txt")
# readline method reads a single line of a file
print(file.readline())
# readline method reads the second line of a file - each time the readline method isi called the file object updates current position in the file)
print (file.readline())
print(file.read())
# We have to close opened file
file.close
############# WITH OPEN _ FILE ################333
"""With keyword creates block of code with the work needs to be done with the file inside"""
"""When 'with' is used. Python will automatically close the file"""
with open("spider.txt") as file:
print (file.readline())
with open ("spider.txt") as file:
print (file.readline())
with open ("spider.txt") as file:
for line in file:
print(line.upper())
# Empty lines can be avoided by using STRIP
with open ("spider.txt") as file:
for line in file:
print (line.strip().upper())
|
python
|
import math
def length_norm(score):
length_tgt = len(score)
return sum(score) / length_tgt
def word_reward(score, reward):
length_tgt = len(score)
return sum(score) - reward * length_tgt
def bounded_word_reward(score, reward, bound):
"""
bound = L_predict
L_predict could be:
1) length_src * alpha
2) average length_tgt * beta
3) model predicted length * gamma
"""
length_tgt = len(score)
bounded_length = min(length_tgt, bound)
return sum(score) - reward * bounded_length
def bounded_adaptive_reward(score, rewards, bound):
if len(rewards) > bound:
rewards = rewards[:bound]
return sum(score) - sum(rewards)
def neg_sigmoid(x):
return 1.0 / (1 + math.exp(x))
|
python
|
import pandas as pd
while(1):
menu = {1:"Driver Login",
2:"Customer Login",
3:"ZULA Administarator",
4:"Exit"}
intial_cab_drivers = {"id":[1,2,3,4],
"Name":["aaa","bbb","ccc","ddd"],
"Pass":[111,222,333,444],
"Age":[25,36,31,28] }
intial_customers = {"id":[1,2,3,4],
"Name":["ww","xx","yy","zz"],
"Pass":[55,66,77,88],
"Age":[25,36,31,28]
}
intial_locations = {"id":[1,3,4,6,2,7,8,5],
"Name":["A","C","D","F","B","G","H","E"],
"Dist_from_origin":[0,4,7,9,15,18,20,23]
}
intial_cab_positions = {
"Location":["D","G","H","A"],
"cabid":[1,2,3,4]
}
cabdrivers_summary = {
"cabid":{1:
{"Source":["D","E","C"],
"Destination":["H","G","B"],
"CustomerDetail":[4,2,2],
"Fare":[130,50,110],
"ZulaCommision":[39,15,33]
},2:{"Source":["C","E","D"],
"Destination":["B","G","H"],
"CustomerDetail":[4,3,2],
"Fare":[145,50,187],
"ZulaCommision":[87,25,55]
},
3:{"Source":["F","E","D","H"],
"Destination":["A","B","G","E"],
"CustomerDetail":[2,3,4,7],
"Fare":[187,150,145,96],
"ZulaCommision":[55,58,36,47]
},
4:{"Source":["A","C","B"],
"Destination":["E","H","E"],
"CustomerDetail":[5,4,1],
"Fare":[125,30,158],
"ZulaCommision":[65,5,35]
}
}
}
customer_ride_summary = {"custid":{1:
{
"Source":["A","E","C"],
"Destination":["E","G","B"],
"Cab Detail":[3,1,1],
"Fare":[230,50,110]
},
2:
{
"Source":["H","E","G"],
"Destination":["A","G","H"],
"Cab Detail":[4,2,2],
"Fare":[220,40,100]
},
3:
{
"Source":["A","E","C"],
"Destination":["E","G","B"],
"Cab Detail":[5,3,2],
"Fare":[225,45,115]
},
4:
{
"Source":["H","E","F"],
"Destination":["F","H","G"],
"Cab Detail":[5,2,3],
"Fare":[150,45,86]
},
}
}
cab_summary = {"cabid":{1:{"Total Number of Trips":3,
"Total Fare Collected":290,
"Total Zula Commision":87
},
2:{"Total Number of Trips":10,
"Total Fare Collected":2900,
"Total Zula Commision":1000
},
3:{"Total Number of Trips":7,
"Total Fare Collected":1500,
"Total Zula Commision":500
},
4:{"Total Number of Trips":5,
"Total Fare Collected":700,
"Total Zula Commision":150
}
}
}
Welcome = ["Welcome to !!*** ZULA***!!","1.Cab driver login","2.Customer login","3.Administration","4.Quit","Please choose a service"]
for i in Welcome:
print(i)
option = int(input())
if option==1:
id = int(input("Enter your ID: "))
password = int(input("Enter your password: "))
if id in intial_cab_drivers["id"] and password in intial_cab_drivers["Pass"]:
print("Congratulations You are logged in!")
inp_ = input("Press 1 to know your summary!\nPress 2 to continue\n")
if inp_=="1":
cabid = id
print("Cabid: ",cabid)
print("Cab Driver Name: ",intial_cab_drivers["Name"][cabid-1])
print("Trip Details")
print(pd.DataFrame(cabdrivers_summary["cabid"][cabid]))
continue
else:
if id not in intial_cab_drivers["id"]:
print("Please Enter Your Id correctly")
else:
print("Check Your Password and Try Again")
continue
elif option==2:
print("1.Login")
print("2.Create Account")
print("Choose one option from above")
cust = int(input())
# while(1):
if cust==1:
id = int(input("Enter your ID: "))
password = int(input("Enter your password: "))
if id in intial_customers["id"] and password in intial_customers["Pass"]:
# print("Congratulations You are logged in!")
while(1):
inp_ = input("Press 1 to know your summary!\nPress 2 to continue\n")
if inp_=="1":
custid = id
print("Customerid: ",custid)
print("Customer Name: ",intial_customers["Name"][custid-1])
print("Trip Details")
print(pd.DataFrame(customer_ride_summary["custid"][custid]))
print("Availble Locations are------------------->")
print(intial_locations["Name"])
source = input("Choose source location: ").upper()
destination = input("Choose destination location: ").upper()
# if source== destination:
# print("Invalid Ride")
# continue
locs = intial_locations["Name"]
dist = intial_locations["Dist_from_origin"]
fare = abs(dist[locs.index(source)] - dist[locs.index(destination)])*10
print()
print(f"Your Estimasted Fare is {fare}Rs!")
print()
print("CAB LOCATIONS!!!")
print(pd.DataFrame(intial_cab_positions))
print()
cabride = input("Press Y if you want to start your ride or Press N to Quit ")
if cabride.lower()=="n":
break
distances = intial_cab_positions["Location"]
source_ = dist[locs.index(source)]
mini = 10000
cab_location,cabid = "",1000
for i in distances:
index = intial_locations["Name"].index(i)
temp = intial_locations["Dist_from_origin"][index]
dis = temp - source_
if dis < mini:
mini = dis
cab_location = i
cabidindex = intial_cab_positions["Location"].index(i)
cabid = intial_cab_positions["cabid"][cabidindex]
print(f"Near Available cab is CABID:{cabid},CABLOCATION:{cab_location} ")
if cabride.lower()=="y":
print("Your Ride Started!")
else:
if id not in intial_customers["id"]:
print("Please Enter Your Id correctly")
else:
print("Check Your Password and Try Again")
n = input("N to quit")
if n.lower()=='n':
break
elif cust==2:
id_ = int(input("Enter id "))
name_ = input("Enter Your Name: ")
pass_ = input("Set Your Password: ")
age_ = input("Enter Your Age")
intial_customers["id"].append(id_)
intial_customers["Name"].append(name_)
intial_customers["Pass"].append(pass_)
intial_customers["Age"].append(age_)
print("Thank you account has been sucessfully created!")
break
elif option==3:
inp = input("Press 1 to see Cabs Summary")
if inp=="1":
cabid = int(input("Enter cabid: "))
print("Cabid: ",cabid)
print("Cab Driver Name: ",intial_cab_drivers["Name"][cabid-1])
print("Total Number of Trips: ",cab_summary["cabid"][cabid]["Total Number of Trips"])
print("Total Fare Collected: ",cab_summary["cabid"][cabid]["Total Fare Collected"])
print("Total Zula Commision: ",cab_summary["cabid"][cabid]["Total Zula Commision"])
print("Trip Details--->")
print(pd.DataFrame(cabdrivers_summary["cabid"][cabid]))
print()
continue
elif option==4:
print("Thank you!")
break
|
python
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
class WorkUnit(object):
"""
A WorkUnit instance defines the Work which will be processed by a worker as defined
in the process_pool class. The Job Manager handles the dispatching of the WorkUnit.
It allows only one unique instance of the WorkUnit as defined by get_unique_key()
to be executed.
"""
def __init__(self):
self.in_progress_jobs = None
self.lock = None
def process(self, db_session, logger, process_name):
try:
self.start(db_session, logger, process_name)
except Exception:
logger.exception("WorkUnit.process() hit exception")
finally:
if self.in_progress_jobs is not None and self.lock is not None:
with self.lock:
if self.get_unique_key() in self.in_progress_jobs:
self.in_progress_jobs.remove(self.get_unique_key())
def start(self, db_session, logger, process_name):
raise NotImplementedError("Children must override start()")
def get_unique_key(self):
"""
Returns an unique value which represents this instance. An example is an
unique prefix with the job id from a specific DB table (e.g. email_job_1).
"""
raise NotImplementedError("Children must override get_unique_key()")
|
python
|
"""
link: https://leetcode.com/problems/word-ladder
problem: 给起始单词,结尾单词,与单词列表,问能否每次转换一个字母,使用列表中的单词由起始变换到结尾
solution: 无权最短路图,即BFS。难点在于如何构造图,一个很巧妙的思路,增加虚拟节点。将 hit 的相邻节点记为 hi*, h*t, *it,
将 hot 的相邻节点记为 ho*, h*t, *ot,这样两个节点就存在了相连路径。构造图后做BFS即可。
"""
class Solution:
def ladderLength(self, beginWord: str, endWord: str, wordList: List[str]) -> int:
d = collections.defaultdict(list)
wordList.append(beginWord)
for x in wordList:
for k in range(len(x)):
aim = x[:k] + "*" + x[k + 1:]
d[aim].append(x)
d[x].append(aim)
s = {beginWord}
q = [beginWord]
res = 0
while len(q) != 0:
res += 1
q2 = []
for word in q:
for next_word in d[word]:
if next_word == endWord:
return (res + 2) // 2
if next_word not in s:
s.add(next_word)
q2.append(next_word)
q = q2
return 0
|
python
|
import numpy as np
import pandas as pd
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
df_train = pd.read_csv('train.csv')
train=pd.DataFrame(df_train)
train = pd.crosstab(index=train["Type"],columns="count")
type = [[1,"Dog"], [2,"Cat"]]
pet = pd.DataFrame(type, columns = ['Type','Animal'])
results = train.merge(pet,on='Type')
r1 = results[['Animal','count']]
print("")
print("Data from train.csv")
print(train)
print("-------------------")
print("Self-created type key")
print(pet)
print("-------------------")
print('combined data:')
print(r1)
#print("")
|
python
|
#!/usr/bin/env python
import rospy
import numpy as np
from sensor_msgs.msg import CompressedImage,Image # @UnresolvedImport
from duckietown_msgs.msg import AntiInstagramHealth, BoolStamped, AntiInstagramTransform # @UnresolvedImport
from anti_instagram.AntiInstagram import *
from duckietown_utils.jpg import image_cv_from_jpg
from cv_bridge import CvBridge # @UnresolvedImport
from line_detector.timekeeper import TimeKeeper
class AntiInstagramNode():
def __init__(self):
self.node_name = rospy.get_name()
self.active = True
self.locked = False
self.image_pub_switch = rospy.get_param("~publish_corrected_image",False)
# Initialize publishers and subscribers
self.pub_image = rospy.Publisher("~corrected_image", Image, queue_size=1)
self.pub_health = rospy.Publisher("~health", AntiInstagramHealth, queue_size=1,latch=True)
self.pub_transform = rospy.Publisher("~transform", AntiInstagramTransform, queue_size=1, latch=True)
#self.sub_switch = rospy.Subscriber("~switch",BoolStamped, self.cbSwitch, queue_size=1)
#self.sub_image = rospy.Subscriber("~uncorrected_image",Image,self.cbNewImage,queue_size=1)
self.sub_image = rospy.Subscriber("~uncorrected_image", CompressedImage, self.cbNewImage,queue_size=1)
self.sub_click = rospy.Subscriber("~click", BoolStamped, self.cbClick, queue_size=1)
self.trans_timer = rospy.Timer(rospy.Duration.from_sec(20), self.cbPubTrans, True)
# Verbose option
self.verbose = rospy.get_param('line_detector_node/verbose',True)
# Initialize health message
self.health = AntiInstagramHealth()
# Initialize transform message
self.transform = AntiInstagramTransform()
# FIXME: read default from configuration and publish it
self.ai_scale = np.array([2.2728408473337893, 2.2728273205024614, 2.272844346401005])
self.ai_shift = np.array([21.47181119272393, 37.14653160247276, 4.089311860796786])
self.ai = AntiInstagram()
self.corrected_image = Image()
self.bridge = CvBridge()
self.image_msg = None
self.click_on = False
def cbPubTrans(self, _):
self.transform.s[0], self.transform.s[1], self.transform.s[2] = self.ai_shift
self.transform.s[3], self.transform.s[4], self.transform.s[5] = self.ai_scale
self.pub_transform.publish(self.transform)
rospy.loginfo('ai: Color transform published.')
def cbNewImage(self,image_msg):
# memorize image
self.image_msg = image_msg
if self.image_pub_switch:
tk = TimeKeeper(image_msg)
cv_image = self.bridge.imgmsg_to_cv2(image_msg, "bgr8")
corrected_image_cv2 = self.ai.applyTransform(cv_image)
tk.completed('applyTransform')
corrected_image_cv2 = np.clip(corrected_image_cv2, 0, 255).astype(np.uint8)
self.corrected_image = self.bridge.cv2_to_imgmsg(corrected_image_cv2, "bgr8")
tk.completed('encode')
self.pub_image.publish(self.corrected_image)
tk.completed('published')
if self.verbose:
rospy.loginfo('ai:\n' + tk.getall())
def cbClick(self, _):
# if we have seen an image:
if self.image_msg is not None:
self.click_on = not self.click_on
if self.click_on:
self.processImage(self.image_msg)
else:
self.transform.s = [0,0,0,1,1,1]
self.pub_transform.publish(self.transform)
rospy.loginfo('ai: Color transform is turned OFF!')
def processImage(self,msg):
'''
Inputs:
msg - CompressedImage - uncorrected image from raspberry pi camera
Uses anti_instagram library to adjust msg so that it looks like the same
color temperature as a duckietown reference image. Calculates health of the node
and publishes the corrected image and the health state. Health somehow corresponds
to how good of a transformation it is.
'''
rospy.loginfo('ai: Computing color transform...')
tk = TimeKeeper(msg)
#cv_image = self.bridge.imgmsg_to_cv2(msg,"bgr8")
try:
cv_image = image_cv_from_jpg(msg.data)
except ValueError as e:
rospy.loginfo('Anti_instagram cannot decode image: %s' % e)
return
tk.completed('converted')
self.ai.calculateTransform(cv_image)
tk.completed('calculateTransform')
# if health is much below the threshold value, do not update the color correction and log it.
if self.ai.health <= 0.001:
# health is not good
rospy.loginfo("Health is not good")
else:
self.health.J1 = self.ai.health
self.transform.s[0], self.transform.s[1], self.transform.s[2] = self.ai.shift
self.transform.s[3], self.transform.s[4], self.transform.s[5] = self.ai.scale
rospy.set_param('antiins_shift', self.ai.shift.tolist())
rospy.set_param('antiins_scale', self.ai.scale.tolist())
self.pub_health.publish(self.health)
self.pub_transform.publish(self.transform)
rospy.loginfo('ai: Color transform published.')
if __name__ == '__main__':
# Initialize the node with rospy
rospy.init_node('anti_instagram_node', anonymous=False)
# Create the NodeName object
node = AntiInstagramNode()
# Setup proper shutdown behavior
#rospy.on_shutdown(node.on_shutdown)
# Keep it spinning to keep the node alive
rospy.spin()
|
python
|
# coding: utf-8
import types
import pymssql
from itertools import chain
from .abstract import DatabaseAdapter
class MSSQLAdapter(DatabaseAdapter):
last_table = None
def get_connection(self):
if hasattr(self, 'connection') and self.connection:
return self.connection
params = {
'server': self.params.get('host', 'localhost'),
'user': self.params.get('user'),
'password': self.params.get('password'),
'database': self.params.get('database'),
'autocommit': True,
}
if self.params.get('unix_socket'):
params.update({'unix_socket': self.params.get('unix_socket')})
else:
params.update({'port': self.params.get('port', 1433)})
conn = pymssql.connect(**params)
return conn
def foreign_keys_freeze(self):
self.query("""
DECLARE @sql AS NVARCHAR(max)='';
select @sql = @sql +
'ALTER INDEX ALL ON [' + t.[name] + '] DISABLE;' + CHAR(13)
from sys.tables t
where type = 'u';
select @sql = @sql +
'ALTER INDEX ' + i.[name] + ' ON [' + t.[name] + '] REBUILD;' + CHAR(13)
from sys.key_constraints i
join
sys.tables t on i.parent_object_id = t.object_id
where i.type = 'PK';
exec dbo.sp_executesql @sql
""")
def foreign_keys_unfreeze(self):
self.query('''
DECLARE @sql AS NVARCHAR(max)=''
select @sql = @sql +
'ALTER INDEX ALL ON [' + t.[name] + '] REBUILD;'+CHAR(13)
from
sys.tables t
where type='u'
exec dbo.sp_executesql @sql
''')
def drop_all(self):
self.query('drop database {0} go'.format(self.params.get('database')))
self.query('create database {0} go'.format(self.params.get('database')))
def reset(self):
pass
def insert(self, table_name, dict_data):
# if identity_insert is on, it wont add null values for primary key.
if 'id' in dict_data.keys() and dict_data.get('id') is None:
del dict_data['id']
placeholders = ', '.join(['%s'] * len(dict_data))
columns = ', '.join(dict_data.keys())
sql = "INSERT INTO %s ( %s ) VALUES ( %s )" % (table_name, columns, placeholders)
on_sql = f"SET IDENTITY_INSERT {table_name} ON"
off_sql = f"SET IDENTITY_INSERT {table_name} OFF"
if_exists_sql = f"IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [object_id] = OBJECT_ID(N'{table_name}'))"
if 'id' in dict_data.keys():
sql = "%s %s; %s; %s;" % (if_exists_sql, on_sql, sql, off_sql)
return self.query(sql, tuple(dict_data.values()))
def query(self, q: str, params=()):
super().query(q, params)
return self.cursor.execute(q, params)
def column_exists(self, table_name, column_name):
self.query("""
SELECT count(*) as count FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME=%s AND COLUMN_NAME=%s
""", (table_name, column_name))
return bool(self.fetchone()[0])
def table_exists(self, table_name):
self.query("""
SELECT count(*) as table_count FROM INFORMATION_SCHEMA.TABLES WHERE
TABLE_TYPE='BASE TABLE' AND TABLE_NAME=%s
""", table_name)
return bool(self.fetchone()[0])
def get_table_names(self):
self.query("""
SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' ORDER BY 1
""")
return list(sum(self.fetchall(), ()))
def get_table_schema(self, table_name):
self.query("""
SELECT column_name, data_type, is_nullable FROM INFORMATION_SCHEMA.COLUMNS WHERE
TABLE_NAME = %s ORDER BY LEN(column_name), column_name ASC
""", table_name)
schema = [dict(zip([column[0] for column in self.cursor.description], row)) for row in
self.cursor.fetchall()]
return schema
def get_records_count(self, table_name):
self.query("""
SELECT count(*) AS count FROM {}
""".format(table_name))
fetch = self.fetchone()
return int(fetch[0]) if fetch is not None else 0
def get_table_as_json(self, table_name, transformer=None):
schema = self.get_table_schema(table_name)
column_names = [col['column_name'] for col in schema]
columns = ', '.join(chain(*zip(map(lambda x: '"%s"' % x, column_names), column_names)))
self.query("""
SELECT * FROM {table_name} FOR JSON PATH, INCLUDE_NULL_VALUES
""".format(columns=columns, table_name=table_name))
results = ''
for row in self.fetchall():
results += row[0]
if isinstance(transformer, types.FunctionType):
results = transformer(results)
return results
def fetchone(self):
return self.cursor.fetchone()
def fetchall(self):
return self.cursor.fetchall()
|
python
|
class Camera:
def __init__(self, game):
self.game = game
self.dx = 0
self.dy = 0
self.ny = 240
self.is_start = True
def start_camera(self):
self.dx = -2100
self.dy = -2100
def apply(self, obj):
obj.rect.x += self.dx
obj.rect.y += self.dy
def update(self, x, y):
if self.is_start:
self.start_camera()
self.is_start = False
else:
self.dx = 0
self.dy = 0
if 260 < self.game.corridor.hero.general_x < 2360:
self.dx = -(x - self.game.width // 2 + self.game.corridor.hero.rect.w // 2)
elif 20 > self.game.corridor.hero.general_x and \
self.game.corridor.hero.general_y <= 2270:
if 360 <= self.game.corridor.hero.general_y <= 380 or \
1060 <= self.game.corridor.hero.general_y <= 1080 or \
1760 <= self.game.corridor.hero.general_y <= 1780:
self.dy = -700
self.game.corridor.hero.general_y += 600
self.game.corridor.hero.rect.y += 600
if 700 <= self.game.corridor.hero.general_y <= 750 or \
1400 <= self.game.corridor.hero.general_y <= 1450 or \
2100 <= self.game.corridor.hero.general_y <= 2150:
self.dy = 700
self.game.corridor.hero.general_y -= 440
self.game.corridor.hero.rect.y -= 440
|
python
|
import libres
import threading
from cached_property import cached_property
from contextlib import contextmanager
from libres.modules import errors
missing = object()
required = object()
class StoppableService(object):
""" Services inheriting from this class have their stop_service method
called when the service is discarded.
Note that this only happens when a service is replaced with a new one
and not when libres is stopped (i.e. this is *not* a deconstructor).
"""
def stop_service(self):
pass
class ContextServicesMixin(object):
""" Provides access methods to the context's services. Expects
the class that uses the mixin to provide self.context.
The results are cached for performance.
"""
@cached_property
def is_allocation_exposed(self):
return self.context.get_service('exposure').is_allocation_exposed
@cached_property
def generate_uuid(self):
return self.context.get_service('uuid_generator')
@cached_property
def validate_email(self):
return self.context.get_service('email_validator')
def clear_cache(self):
""" Clears the cache of the mixin. """
try:
del self.is_allocation_exposed
except AttributeError:
pass
try:
del self.generate_uuid
except AttributeError:
pass
try:
del self.validate_email
except AttributeError:
pass
@property
def session_provider(self):
return self.context.get_service('session_provider')
@property
def session(self):
""" Returns the current session. """
return self.session_provider.session()
def close(self):
""" Closes the current session. """
self.session.close()
@property
def begin_nested(self):
return self.session.begin_nested
def commit(self):
return self.session.commit()
def rollback(self):
return self.session.rollback()
class Context(object):
""" Used throughout Libres, the context holds settings like the database
connection string and services like the json dumps/loads functions that
should be used.
Contexts allow consumers of the Libres library to override these settings /
services as they wish. It also makes sure that multiple consumers of Libres
can co-exist in a single process, as each consumer must operate on it's
own context.
Libres holds all contexts in libres.registry and provides a master_context.
When a consumer registers its own context, all lookups happen on the custom
context. If that context can provide a service or a setting, it is used.
If the custom context can't provide a service or a setting, the
master_context is used instead. In other words, the custom context
inherits from the master context.
Note that contexts not meant to be changed often. Classes talking to the
database usually cache data form the context freely. That means basically
that after changing the context you should get a fresh
:class:`~libres.db.scheduler.Scheduler` instance or call
:meth:`~.ContextServicesMixin.clear_cache`.
A context may be registered as follows::
from libres import registry
my_context = registry.register_context('my_app')
See also :class:`~libres.context.registry.Registry`
"""
def __init__(self, name, registry=None, parent=None, locked=False):
self.name = name
self.registry = registry or libres.registry
self.values = {}
self.parent = parent
self.locked = False
self.thread_lock = threading.RLock()
def __repr__(self):
return "<Libres Context(name='{}')>".format(self.name)
@contextmanager
def as_current_context(self):
with self.registry.context(self.name):
yield
def switch_to(self):
self.registry.switch_context(self.name)
def lock(self):
with self.thread_lock:
self.locked = True
def unlock(self):
with self.thread_lock:
self.locked = False
def get(self, key):
if key in self.values:
return self.values[key]
elif self.parent:
return self.parent.get(key)
else:
return missing
def set(self, key, value):
if self.locked:
raise errors.ContextIsLocked
with self.thread_lock:
# If a value already exists it could be a stoppable service.
# Stoppable services are called before they are stop so they
# can clean up after themselves without having to wait for the GC.
if isinstance(self.values.get(key), StoppableService):
self.values[key].stop_service()
self.values[key] = value
def get_setting(self, name):
return self.get('settings.{}'.format(name))
def set_setting(self, name, value):
with self.thread_lock:
self.set('settings.{}'.format(name), value)
def get_service(self, name):
service_id = '/'.join(('service', name))
service = self.get(service_id)
if service is missing:
raise errors.UnknownService(service_id)
cache_id = '/'.join(('service', name, 'cache'))
cache = self.get(cache_id)
# no cache
if cache is missing:
return service(self)
else:
# first call, cache it!
if cache is required:
self.set(cache_id, service(self))
# nth call, use cached value
return self.get(cache_id)
def set_service(self, name, factory, cache=False):
with self.thread_lock:
service_id = '/'.join(('service', name))
self.set(service_id, factory)
if cache:
cache_id = '/'.join(('service', name, 'cache'))
self.set(cache_id, required)
|
python
|
"""
COMMAND: SELECT
Select objects by id or name for further
command processes.
"""
import command
import cache
from util import logger
from api import APIRequests
class Select(command.Command):
@staticmethod
def get_invoke():
return 'SELECT'
@staticmethod
def get_args():
return {
'GUILD(S)|CHANNEL(S)|ROLE(S)|USER(S)': True,
'BY NAME [ID|NAME]': False
}
@staticmethod
def get_help_description():
return 'Select an object by ID or NAME for further command operations.'
def execute(self, passed_args: list):
if len(passed_args) < 1:
logger.fatal('MISSING 1. ARGUMENT: GUILD(S)|CHANNEL(S)|ROLE(S)|USER(S)')
raise Exception('manual interruption')
# if len(passed_args) < 2:
# logger.fatal('MISSING 2. ARGUMENT: ID')
# raise Exception('manual interruption')
api = self.cmd_parser.api_instance
by_name = False
if len(passed_args) > 2 and passed_args[1].upper() == 'BY' and passed_args[2].upper() == 'NAME':
if len(passed_args) < 4:
logger.fatal('MISSING ARGUMENT: [NAME]')
raise Exception('manual interruption')
by_name = True
objecttype = passed_args[0].upper()
identifier = passed_args[1] if len(passed_args) > 1 else None
if by_name:
identifier = passed_args[3]
def __check_args_length(must: int, argname: str, soft: bool = False) -> bool:
if len(passed_args) < must:
if not soft:
logger.fatal('MISSING ARGUMENT: [%s]' % argname)
raise Exception('manual interruption')
logger.error('MISSING ARGUMENT: [%s]' % argname)
if objecttype == 'GUILD':
__check_args_length(2, 'ID')
response = api.get_guild(identifier, by_name)
APIRequests.check_status_code(response)
cache.selected = cache.Selection('GUILD', response.json())
elif objecttype == 'GUILDS':
response = api.get_users_guilds()
api.check_status_code(response)
cache.selected = cache.Selection('GUILDS', response.json())
elif objecttype == 'CHANNEL':
__check_args_length(2, 'ID')
if by_name and (cache.selected == None or not cache.selected.type == 'GUILD'):
logger.fatal('GUILD needs to be selected to select a channel by name')
raise Exception('manual interruption')
guild_id = cache.selected.data['id'] if cache.selected != None else ''
response = api.get_channel(guild_id, identifier, by_name)
APIRequests.check_status_code(response)
cache.selected = cache.Selection('CHANNEL', response.json())
pass
elif objecttype == 'USER':
pass
elif objecttype == 'ROLE':
pass
else:
logger.error('UNSUPPORTED TYPE: ', objecttype)
raise Exception('manual interruption')
logger.debug('SELECTED:\n - TYPE: ', cache.selected.type, '\n - DATA: ', cache.selected.data)
|
python
|
from testcases import TestCaseWithFixture as TestCase
from django.http import HttpRequest
from django.contrib.auth.models import User, Permission
from core.models import Note
from tastypie.authorization import Authorization, ReadOnlyAuthorization, DjangoAuthorization
from tastypie import fields
from tastypie.resources import Resource, ModelResource
class NoRulesNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = Authorization()
class ReadOnlyNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = ReadOnlyAuthorization()
class DjangoNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = DjangoAuthorization()
class NotAModel(object):
name = 'Foo'
class NotAModelResource(Resource):
name = fields.CharField(attribute='name')
class Meta:
resource_name = 'notamodel'
object_class = NotAModel
authorization = DjangoAuthorization()
class AuthorizationTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_no_rules(self):
request = HttpRequest()
for method in ('GET', 'POST', 'PUT', 'DELETE'):
request.method = method
self.assertTrue(NoRulesNoteResource()._meta.authorization.is_authorized(request))
def test_read_only(self):
request = HttpRequest()
request.method = 'GET'
self.assertTrue(ReadOnlyNoteResource()._meta.authorization.is_authorized(request))
for method in ('POST', 'PUT', 'DELETE'):
request = HttpRequest()
request.method = method
self.assertFalse(ReadOnlyNoteResource()._meta.authorization.is_authorized(request))
class DjangoAuthorizationTestCase(TestCase):
fixtures = ['note_testdata.json']
def setUp(self):
self.add = Permission.objects.get_by_natural_key('add_note', 'core', 'note')
self.change = Permission.objects.get_by_natural_key('change_note', 'core', 'note')
self.delete = Permission.objects.get_by_natural_key('delete_note', 'core', 'note')
self.user = User.objects.all()[0]
self.user.user_permissions.clear()
def test_no_perms(self):
# sanity check: user has no permissions
self.assertFalse(self.user.get_all_permissions())
request = HttpRequest()
request.method = 'GET'
request.user = self.user
# with no permissions, api is read-only
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
for method in ('POST', 'PUT', 'DELETE'):
request.method = method
self.assertFalse(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_add_perm(self):
request = HttpRequest()
request.user = self.user
# give add permission
request.user.user_permissions.add(self.add)
request.method = 'POST'
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_change_perm(self):
request = HttpRequest()
request.user = self.user
# give change permission
request.user.user_permissions.add(self.change)
request.method = 'PUT'
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_delete_perm(self):
request = HttpRequest()
request.user = self.user
# give delete permission
request.user.user_permissions.add(self.delete)
request.method = 'DELETE'
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_all(self):
request = HttpRequest()
request.user = self.user
request.user.user_permissions.add(self.add)
request.user.user_permissions.add(self.change)
request.user.user_permissions.add(self.delete)
for method in ('GET', 'OPTIONS', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH'):
request.method = method
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_not_a_model(self):
request = HttpRequest()
request.user = self.user
# give add permission
request.user.user_permissions.add(self.add)
request.method = 'POST'
self.assertTrue(NotAModelResource()._meta.authorization.is_authorized(request))
def test_patch_perms(self):
request = HttpRequest()
request.user = self.user
request.method = 'PATCH'
# Not enough.
request.user.user_permissions.add(self.add)
request.user.refresh_from_db()
self.assertFalse(DjangoNoteResource()._meta.authorization.is_authorized(request))
# Still not enough.
request.user.user_permissions.add(self.change)
request.user.refresh_from_db()
self.assertFalse(DjangoNoteResource()._meta.authorization.is_authorized(request))
# Much better.
request.user.user_permissions.add(self.delete)
request.user = User.objects.get(pk=self.user.pk)
self.assertTrue(DjangoNoteResource()._meta.authorization.is_authorized(request))
def test_unrecognized_method(self):
request = HttpRequest()
request.user = self.user
# Check a non-existent HTTP method.
request.method = 'EXPLODE'
self.assertFalse(DjangoNoteResource()._meta.authorization.is_authorized(request))
|
python
|
from django.core.exceptions import ValidationError
from pulpo_forms.fieldtypes.Field import Field
from pulpo_forms.statistics.ListStatistics import ListStatistics
class ListField(Field):
"""
List field validator, render and analize methods
"""
def get_methods(self, **kwargs):
base = super(ListField, self).get_methods(**kwargs)
base.append(self.belong_check)
return base
def belong_check(self, value, **kwargs):
v = int(value)
opt = kwargs['options']
l = []
for o in opt:
l.append(o['id'])
if v not in l:
raise ValidationError("Invalid value, not among options.")
def check_consistency(self, field):
options = field.options
if (options == []):
raise ValidationError("List fields need at least one option.")
def get_option_labels(self, field):
return field["options"]
def get_statistics(self, data_list, field):
options = self.get_option_labels(field)
list_statistics = ListStatistics(data_list, options)
statistics = super(ListField, self).get_statistics(data_list, field)
statistics.update(list_statistics.getSerializedData())
return statistics
def get_options(self, json, f_id):
for page in json['pages']:
for field in page['fields']:
if (field['field_id'] == f_id):
return field['options']
class Meta:
abstract = True
|
python
|
from sqlalchemy import Column, Integer, Float, String, Date, Time
from shared.core.db import Base
class HourlyMainData(Base):
__tablename__ = 'HourlyMain'
Id = Column(Integer, primary_key=True, nullable=False)
StationId = Column(Integer, nullable=False)
Date = Column(Date, nullable=False)
Hour = Column(Time, nullable=False)
HlyAirTmp = Column(Float, nullable=True)
HlyAirTmpQc = Column(String(50), nullable=True, default='')
HlyAirTmpUnits = Column(String(50), nullable=True, default='')
HlyDewPnt = Column(Float, nullable=True)
HlyDewPntQc = Column(String(50), nullable=True, default='')
HlyDewPntUnits = Column(String(50), nullable=True, default='')
HlyEto = Column(Float, nullable=True)
HlyEtoQc = Column(String(50), nullable=True, default='')
HlyEtoUnits = Column(String(50), nullable=True, default='')
HlyNetRad = Column(Float, nullable=True)
HlyNetRadQc = Column(String(50), nullable=True, default='')
HlyNetRadUnits = Column(String(50), nullable=True, default='')
HlyAsceEto = Column(Float, nullable=True)
HlyAsceEtoQc = Column(String(50), nullable=True, default='')
HlyAsceEtoUnits = Column(String(50), nullable=True, default='')
HlyAsceEtr = Column(Float, nullable=True)
HlyAsceEtrQc = Column(String(50), nullable=True, default='')
HlyAsceEtrUnits = Column(String(50), nullable=True, default='')
HlyPrecip = Column(Float, nullable=True)
HlyPrecipQc = Column(String(50), nullable=True, default='')
HlyPrecipUnits = Column(String(50), nullable=True, default='')
HlyRelHum = Column(Float, nullable=True)
HlyRelHumQc = Column(String(50), nullable=True, default='')
HlyRelHumUnits = Column(String(50), nullable=True, default='')
HlyResWind = Column(Float, nullable=True)
HlyResWindQc = Column(String(50), nullable=True, default='')
HlyResWindUnits = Column(String(50), nullable=True, default='')
HlySoilTmp = Column(Float, nullable=True)
HlySoilTmpQc = Column(String(50), nullable=True, default='')
HlySoilTmpUnits = Column(String(50), nullable=True, default='')
HlySolRad = Column(Float, nullable=True)
HlySolRadQc = Column(String(50), nullable=True, default='')
HlySolRadUnits = Column(String(50), nullable=True, default='')
HlyVapPres = Column(Float, nullable=True)
HlyVapPresQc = Column(String(50), nullable=True, default='')
HlyVapPresUnits = Column(String(50), nullable=True, default='')
HlyWindDir = Column(Float, nullable=True)
HlyWindDirQc = Column(String(50), nullable=True, default='')
HlyWindDirUnits = Column(String(50), nullable=True, default='')
HlyWindSpd = Column(Float, nullable=True)
HlyWindSpdQc = Column(String(50), nullable=True, default='')
HlyWindSpdUnits = Column(String(50), nullable=True, default='')
class DailyMainData(Base):
__tablename__ = 'DailyMain'
Id = Column(Integer, primary_key=True, nullable=False)
StationId = Column(Integer, nullable=False)
Date = Column(Date, nullable=False)
DayAirTmpAvg = Column(Float, nullable=True)
DayAirTmpAvgQc = Column(String(50), nullable=True, default='')
DayAirTmpAvgUnits = Column(String(50), nullable=True, default='')
DayAirTmpMax = Column(Float, nullable=True)
DayAirTmpMaxQc = Column(String(50), nullable=True, default='')
DayAirTmpMaxUnits = Column(String(50), nullable=True, default='')
DayAirTmpMin = Column(Float, nullable=True)
DayAirTmpMinQc = Column(String(50), nullable=True, default='')
DayAirTmpMinUnits = Column(String(50), nullable=True, default='')
DayDewPnt = Column(Float, nullable=True)
DayDewPntQc = Column(String(50), nullable=True, default='')
DayDewPntUnits = Column(String(50), nullable=True, default='')
DayEto = Column(Float, nullable=True)
DayEtoQc = Column(String(50), nullable=True, default='')
DayEtoUnits = Column(String(50), nullable=True, default='')
DayAsceEto = Column(Float, nullable=True)
DayAsceEtoQc = Column(String(50), nullable=True, default='')
DayAsceEtoUnits = Column(String(50), nullable=True, default='')
DayAsceEtr = Column(Float, nullable=True)
DayAsceEtrQc = Column(String(50), nullable=True, default='')
DayAsceEtrUnits = Column(String(50), nullable=True, default='')
DayPrecip = Column(Float, nullable=True)
DayPrecipQc = Column(String(50), nullable=True, default='')
DayPrecipUnits = Column(String(50), nullable=True, default='')
DayRelHumAvg = Column(Float, nullable=True)
DayRelHumAvgQc = Column(String(50), nullable=True, default='')
DayRelHumAvgUnits = Column(String(50), nullable=True, default='')
DayRelHumMax = Column(Float, nullable=True)
DayRelHumMaxQc = Column(String(50), nullable=True, default='')
DayRelHumMaxUnits = Column(String(50), nullable=True, default='')
DayRelHumMin = Column(Float, nullable=True)
DayRelHumMinQc = Column(String(50), nullable=True, default='')
DayRelHumMinUnits = Column(String(50), nullable=True, default='')
DaySoilTmpAvg = Column(Float, nullable=True)
DaySoilTmpAvgQc = Column(String(50), nullable=True, default='')
DaySoilTmpAvgUnits = Column(String(50), nullable=True, default='')
DaySoilTmpMax = Column(Float, nullable=True)
DaySoilTmpMaxQc = Column(String(50), nullable=True, default='')
DaySoilTmpMaxUnits = Column(String(50), nullable=True, default='')
DaySoilTmpMin = Column(Float, nullable=True)
DaySoilTmpMinQc = Column(String(50), nullable=True, default='')
DaySoilTmpMinUnits = Column(String(50), nullable=True, default='')
DaySolRadAvg = Column(Float, nullable=True)
DaySolRadAvgQc = Column(String(50), nullable=True, default='')
DaySolRadAvgUnits = Column(String(50), nullable=True, default='')
DaySolRadNet = Column(Float, nullable=True)
DaySolRadNetQc = Column(String(50), nullable=True, default='')
DaySolRadNetUnits = Column(String(50), nullable=True, default='')
DayVapPresAvg = Column(Float, nullable=True)
DayVapPresAvgQc = Column(String(50), nullable=True, default='')
DayVapPresAvgUnits = Column(String(50), nullable=True, default='')
DayVapPresMax = Column(Float, nullable=True)
DayVapPresMaxQc = Column(String(50), nullable=True, default='')
DayVapPresMaxUnits = Column(String(50), nullable=True, default='')
DayWindEne = Column(Float, nullable=True)
DayWindEneQc = Column(String(50), nullable=True, default='')
DayWindEneUnits = Column(String(50), nullable=True, default='')
DayWindEse = Column(Float, nullable=True)
DayWindEseQc = Column(String(50), nullable=True, default='')
DayWindEseUnits = Column(String(50), nullable=True, default='')
DayWindNne = Column(Float, nullable=True)
DayWindNneQc = Column(String(50), nullable=True, default='')
DayWindNneUnits = Column(String(50), nullable=True, default='')
DayWindNnw = Column(Float, nullable=True)
DayWindNnwQc = Column(String(50), nullable=True, default='')
DayWindNnwUnits = Column(String(50), nullable=True, default='')
DayWindRun = Column(Float, nullable=True)
DayWindRunQc = Column(String(50), nullable=True, default='')
DayWindRunUnits = Column(String(50), nullable=True, default='')
DayWindSpdAvg = Column(Float, nullable=True)
DayWindSpdAvgQc = Column(String(50), nullable=True, default='')
DayWindSpdAvgUnits = Column(String(50), nullable=True, default='')
DayWindSsw = Column(Float, nullable=True)
DayWindSswQc = Column(String(50), nullable=True, default='')
DayWindSswUnits = Column(String(50), nullable=True, default='')
DayWindSse = Column(Float, nullable=True)
DayWindSseQc = Column(String(50), nullable=True, default='')
DayWindSseUnits = Column(String(50), nullable=True, default='')
DayWindWnw = Column(Float, nullable=True)
DayWindWnwQc = Column(String(50), nullable=True, default='')
DayWindWnwUnits = Column(String(50), nullable=True, default='')
DayWindWsw = Column(Float, nullable=True)
DayWindWswQc = Column(String(50), nullable=True, default='')
DayWindWswUnits = Column(String(50), nullable=True, default='')
|
python
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from builtins import range
from past.utils import old_div
import sys
from forcebalance.molecule import *
# Script to generate virtual sites and rename atoms in .gro file.
M = Molecule(sys.argv[1])
if 'M' in M.elem:
print("Virtual sites already exist")
sys.exit()
num_mol = int(M.na/3)
for i in range(num_mol)[::-1]:
v = i*3 + 3
M.add_virtual_site(v, resid=i+1, elem='M', atomname='MW', resname='SOL', pos=i*3)
M.replace_peratom('resname', 'HOH','SOL')
M.replace_peratom_conditional('resname', 'SOL', 'atomname', 'H1', 'HW1')
M.replace_peratom_conditional('resname', 'SOL', 'atomname', 'H2', 'HW2')
M.replace_peratom_conditional('resname', 'SOL', 'atomname', 'O', 'OW')
M.write('new.gro')
|
python
|
from django.utils import timezone
from django.conf import settings
import datetime
from rest_framework_jwt.settings import api_settings
expires_delta = (api_settings.JWT_REFRESH_EXPIRATION_DELTA) - datetime.timedelta(seconds=200)
def jwt_response_handler(token, user=None, request=None):
return {
'token': token,
'user': user.username,
'expires': timezone.now() + expires_delta
}
|
python
|
import zeit.cms.testing
import zeit.content.article.testing
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'edit.landing.txt',
'edit.txt',
'edit.form.txt',
package='zeit.content.article.edit.browser',
layer=zeit.content.article.testing.WSGI_LAYER)
|
python
|
"""Auxiliar functions that may be used in most modules"""
from typing import List
import numpy as np
def compute_permutation_distance(
distance_matrix: np.ndarray, permutation: List[int]
) -> float:
"""Compute the total route distance of a given permutation
Parameters
----------
distance_matrix
Distance matrix of shape (n x n) with the (i, j) entry indicating the
distance from node i to j. It does not need to be symmetric
permutation
A list with nodes from 0 to n - 1 in any order
Returns
-------
Total distance of the path given in ``permutation`` for the provided
``distance_matrix``
Notes
-----
Suppose the permutation [0, 1, 2, 3], with four nodes. The total distance
of this path will be from 0 to 1, 1 to 2, 2 to 3, and 3 back to 0. This
can be fetched from a distance matrix using:
distance_matrix[ind1, ind2], where
ind1 = [0, 1, 2, 3] # the FROM nodes
ind2 = [1, 2, 3, 0] # the TO nodes
This can easily be generalized to any permutation by using ind1 as the
given permutation, and moving the first node to the end to generate ind2.
"""
ind1 = permutation
ind2 = permutation[1:] + permutation[:1]
return distance_matrix[ind1, ind2].sum()
|
python
|
'''
test_var = False
if(test_var == True):
print("okay")
else:
print("this is not true")
number_a = 500.6
number_b = 100.4
if(number_a > number_b):
print(number_a,"is bigger than",number_b)
else:
print(number_b,"is bigger than",number_a)
# name = input("what's your name? ")
# print("your name is",name)
def Multiply(num1,num2):
result = num1*num2
return result
temp = Multiply(2,5)
print(">>",temp)
'''
text = float("1665.5")
print(text*52.5)
action = input("?")
num1 = float(input('number 1'))
num2 = float(input('number 2'))
result = 0
if(action == '+'):
result = num1+num2
elif(action == '*'):
result = num1*num2
else:
print('this not a number')
print(result)
number = 515.5
text = "1235.2"
number += float(text)
text += str(number)
|
python
|
#!/usr/bin/env python
"""Example script"""
from __future__ import division, print_function
import random
import time
from simanneal import Annealer
import click
import numpy as np
import rasterio
from rasterio.plot import reshape_as_image
from rio_color.operations import parse_operations
from rio_color.utils import to_math_type
def time_string(seconds):
"""Returns time in seconds as a string formatted HHHH:MM:SS."""
s = int(round(seconds)) # round to nearest second
h, s = divmod(s, 3600) # get hours and remainder
m, s = divmod(s, 60) # split remainder into minutes and seconds
return "%2i:%02i:%02i" % (h, m, s)
def progress_report(
curr, best, curr_score, best_score, step, totalsteps, accept, improv, elaps, remain
):
"""Report progress"""
text = """
Current Formula {curr} (hist distance {curr_score})
Best Formula {best} (hist distance {best_score})
Step {step} of {totalsteps}
Acceptance Rate : {accept} %
Improvement Rate: {improv} %
Time {elaps} ( {remain} Remaing)""".format(
**locals()
)
return text
# Plot globals
fig = None
txt = None
imgs = []
class ColorEstimator(Annealer):
"""Optimizes color using simulated annealing"""
keys = "gamma_red,gamma_green,gamma_blue,contrast".split(",")
def __init__(self, source, reference, state=None):
"""Create a new instance"""
self.src = source.copy()
self.ref = reference.copy()
if not state:
params = dict(gamma_red=1.0, gamma_green=1.0, gamma_blue=1.0, contrast=10)
else:
if self._validate(state):
params = state
else:
raise ValueError("invalid state")
super(ColorEstimator, self).__init__(params)
def validate(self):
"""Validate keys."""
# todo validate values bt 0..1
for k in self.keys:
if k not in self.state:
return False
def move(self):
"""Create a state change."""
k = random.choice(self.keys)
multiplier = random.choice((0.95, 1.05))
invalid_key = True
while invalid_key:
# make sure bias doesn't exceed 1.0
if k == "bias":
if self.state[k] > 0.909:
k = random.choice(self.keys)
continue
invalid_key = False
newval = self.state[k] * multiplier
self.state[k] = newval
def cmd(self, state):
"""Get color formula representation of the state."""
ops = (
"gamma r {gamma_red:.2f}, gamma g {gamma_green:.2f}, gamma b {gamma_blue:.2f}, "
"sigmoidal rgb {contrast:.2f} 0.5".format(**state)
)
return ops
def apply_color(self, arr, state):
"""Apply color formula to an array."""
ops = self.cmd(state)
for func in parse_operations(ops):
arr = func(arr)
return arr
def energy(self):
"""Calculate state's energy."""
arr = self.src.copy()
arr = self.apply_color(arr, self.state)
scores = [histogram_distance(self.ref[i], arr[i]) for i in range(3)]
# Important: scale by 100 for readability
return sum(scores) * 100
def to_dict(self):
"""Serialize as a dict."""
return dict(best=self.best_state, current=self.state)
def update(self, step, T, E, acceptance, improvement):
"""Print progress."""
if acceptance is None:
acceptance = 0
if improvement is None:
improvement = 0
if step > 0:
elapsed = time.time() - self.start
remain = (self.steps - step) * (elapsed / step)
# print('Time {} ({} Remaing)'.format(time_string(elapsed), time_string(remain)))
else:
elapsed = 0
remain = 0
curr = self.cmd(self.state)
curr_score = float(E)
best = self.cmd(self.best_state)
best_score = self.best_energy
report = progress_report(
curr,
best,
curr_score,
best_score,
step,
self.steps,
acceptance * 100,
improvement * 100,
time_string(elapsed),
time_string(remain),
)
print(report)
if fig:
imgs[1].set_data(
reshape_as_image(self.apply_color(self.src.copy(), self.state))
)
imgs[2].set_data(
reshape_as_image(self.apply_color(self.src.copy(), self.best_state))
)
if txt:
txt.set_text(report)
fig.canvas.draw()
def histogram_distance(arr1, arr2, bins=None):
""" This function returns the sum of the squared error
Parameters:
two arrays constrained to 0..1
Returns:
sum of the squared error between the histograms
"""
eps = 1e-6
assert arr1.min() > 0 - eps
assert arr1.max() < 1 + eps
assert arr2.min() > 0 - eps
assert arr2.max() < 1 + eps
if not bins:
bins = [x / 10 for x in range(11)]
hist1 = np.histogram(arr1, bins=bins)[0] / arr1.size
hist2 = np.histogram(arr2, bins=bins)[0] / arr2.size
assert abs(hist1.sum() - 1.0) < eps
assert abs(hist2.sum() - 1.0) < eps
sqerr = (hist1 - hist2) ** 2
return sqerr.sum()
def calc_downsample(w, h, target=400):
"""Calculate downsampling value."""
if w > h:
return h / target
elif h >= w:
return w / target
@click.command()
@click.argument("source")
@click.argument("reference")
@click.option("--downsample", "-d", type=int, default=None)
@click.option("--steps", "-s", type=int, default=5000)
@click.option("--plot/--no-plot", default=True)
def main(source, reference, downsample, steps, plot):
"""Given a source image and a reference image,
Find the rio color formula which results in an
output with similar histogram to the reference image.
Uses simulated annealing to determine optimal settings.
Increase the --downsample option to speed things up.
Increase the --steps to get better results (longer runtime).
"""
global fig, txt, imgs
click.echo("Reading source data...", err=True)
with rasterio.open(source) as src:
if downsample is None:
ratio = calc_downsample(src.width, src.height)
else:
ratio = downsample
w = int(src.width // ratio)
h = int(src.height // ratio)
rgb = src.read((1, 2, 3), out_shape=(3, h, w))
orig_rgb = to_math_type(rgb)
click.echo("Reading reference data...", err=True)
with rasterio.open(reference) as ref:
if downsample is None:
ratio = calc_downsample(ref.width, ref.height)
else:
ratio = downsample
w = int(ref.width / ratio)
h = int(ref.height / ratio)
rgb = ref.read((1, 2, 3), out_shape=(3, h, w))
ref_rgb = to_math_type(rgb)
click.echo("Annealing...", err=True)
est = ColorEstimator(orig_rgb, ref_rgb)
if plot:
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(20, 10))
fig.suptitle("Color Formula Optimization", fontsize=18, fontweight="bold")
txt = fig.text(0.02, 0.05, "foo", family="monospace", fontsize=16)
type(txt)
axs = (
fig.add_subplot(1, 4, 1),
fig.add_subplot(1, 4, 2),
fig.add_subplot(1, 4, 3),
fig.add_subplot(1, 4, 4),
)
fig.tight_layout()
axs[0].set_title("Source")
axs[1].set_title("Current Formula")
axs[2].set_title("Best Formula")
axs[3].set_title("Reference")
imgs.append(axs[0].imshow(reshape_as_image(est.src)))
imgs.append(axs[1].imshow(reshape_as_image(est.src)))
imgs.append(axs[2].imshow(reshape_as_image(est.src)))
imgs.append(axs[3].imshow(reshape_as_image(est.ref)))
fig.show()
schedule = dict(
tmax=25.0, # Max (starting) temperature
tmin=1e-4, # Min (ending) temperature
steps=steps, # Number of iterations
updates=steps / 20, # Number of updates
)
est.set_schedule(schedule)
est.save_state_on_exit = False
optimal, score = est.anneal()
optimal["energy"] = score
ops = est.cmd(optimal)
click.echo("rio color -j4 {} {} {}".format(source, "/tmp/output.tif", ops))
if __name__ == "__main__":
main()
|
python
|
class Card:
def __init__(self, card_type):
"""
card_type 0 is a skipbo card
card_type 1-12 are the normal value cards
actual value indicates the value a skipbo card takes on after it is played
"""
self.card_type = card_type
self.actual_value = card_type if card_type > 0 else 0
|
python
|
import numpy as np
from lagom.envs.spaces import Box
from lagom.envs.wrappers import ObservationWrapper
class PartialFlattenDict(ObservationWrapper):
"""
Returns flattened observation from a dictionary space with partial keys into a Box space.
"""
def __init__(self, env, keys):
super().__init__(env)
self.keys = keys
spaces = self.env.observation_space.spaces
assert all([isinstance(space, Box) for space in spaces.values()]) # enforce all Box spaces
# Calculate dimensionality
shape = (int(np.sum([spaces[key].flat_dim for key in self.keys])), )
self._observation_space = Box(low=-np.inf, high=np.inf, shape=shape, dtype=np.float32)
def process_observation(self, observation):
return np.concatenate([observation[key].ravel() for key in self.keys])
@property
def observation_space(self):
return self._observation_space
|
python
|
#
# Copyright 2022 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
import environ
ROOT_DIR = environ.Path(__file__) - 3
ENVIRONMENT = environ.Env()
if ENVIRONMENT.bool("DJANGO_READ_DOT_ENV_FILE", default=False):
# Operating System Environment variables have precedence over variables
# defined in the .env file, that is to say variables from the .env files
# will only be used if not defined as environment variables.
ENV_FILE = str(ROOT_DIR.path(".env"))
print(f"Loading : {ENV_FILE}")
ENVIRONMENT.read_env(ENV_FILE)
print("The .env file has been loaded.")
|
python
|
# coding: utf-8
# # Performance of various Machine Learning Algorithms on Electrical Impedance Tomography Images
#
# ## Copyright (c) 2018, Faststream Technologies
#
# ## Author: Sudhanva Narayana
# In[1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
CURR_DIR = os.path.dirname(os.path.abspath('__file__'))
PARENT_DIR = os.path.abspath(os.path.join(CURR_DIR, os.pardir))
df = pd.read_csv(PARENT_DIR + '\\assets\\datasets\\eit_data.csv', index_col=[0], header = [0], skiprows= [1] ,skipinitialspace=True)
X = df.loc[:, ['gray', 'violet', 'blue', 'green', 'yellow', 'orange', 'red', 'brown']].values.astype(float)
y = df.loc[:, ['target']].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
y_train = y_train.ravel()
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
# ### Classifiers
# In[2]:
classifiers = {}
# ### KNN
# In[3]:
classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['knn'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Decision Tree
# In[4]:
classifier = DecisionTreeClassifier(criterion='entropy', random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['desicion_tree'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Kernal SVM
# In[5]:
classifier = SVC(kernel='rbf', random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['kernal_svm'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Logistic Regression
# In[6]:
classifier = LogisticRegression(random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['logistic_regression'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Naive Bayes
# In[7]:
classifier = GaussianNB()
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['naive_bayes'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Random Forest
# In[8]:
classifier = RandomForestClassifier(n_estimators=10, criterion='entropy', random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['random_forest'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# ### Support Vector Machines
# In[9]:
classifier = SVC(kernel='linear', random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifiers['svm'] = classifier.score(X_test, y_test)
print(classifier.score(X_test, y_test))
# In[10]:
print(classifiers)
# In[11]:
values = list(classifiers.values())
labels = list(classifiers.keys())
values = [round(i * 100, 2) for i in values]
# print(values)
# print(labels)
index = np.arange(len(labels))
# In[12]:
plt.figure(figsize=(15,10))
plt.bar(index, values)
plt.xlabel('Machine Learning Algorithms', fontsize=20)
plt.ylabel('Performance (%)', fontsize=20)
plt.xticks(index, labels, rotation=30, fontsize=15)
plt.yticks(fontsize=20)
plt.title('Performance of Machine Learning algorithms on EIT Images', fontsize=20)
plt.show()
# In[13]:
plt.figure(figsize=(15,10))
plt.plot(index, values)
plt.xlabel('Machine Learning Algorithms', fontsize=20)
plt.ylabel('Performance (%)', fontsize=20)
plt.xticks(index, labels, rotation=30, fontsize=15)
plt.yticks(fontsize=20)
plt.title('Performance of Machine Learning algorithms on EIT Images', fontsize=20)
plt.show()
|
python
|
import os
import numpy as np
import torch
import nibabel as nib
from glob import glob
import scipy.io
import random
from PIL import Image
import elastic_transform as elt
from torch.utils.data import Dataset
import torchvision.transforms.functional as F
def load_nifty(full_file_name):
img = nib.load(full_file_name)
#dtype = img.get_data_dtype() # F8 is 64-bit floating-point Number
data = img.get_fdata()
return data
def tensor_2_numpy_image(tensor):
img_out = np.moveaxis(tensor.numpy()[:,:,:].squeeze(), 0, -1)
return img_out
def to_img(batch_of_images):
img = batch_of_images[0]
img = tensor_2_numpy_image(img)
img-=np.min(img[:])
img *= 255.0/img.max()
img = img.astype(np.uint8)
return img
def cvt1to3channels(one_channel):
return np.stack((one_channel,)*3, axis=-1)
def load_dataset(src_path, mask_path, validation_portion=0.05):
# 1- set the paths
src_format = 'mat'
mask_format = 'nii'
src_file_format = '*.{}'.format(src_format)
mask_file_format = '*.{}'.format(mask_format)
all_src_img = glob(os.path.join(src_path,src_file_format))
all_mask_img = glob(os.path.join(mask_path,mask_file_format))
all_src_img.sort()
all_mask_img.sort()
# 2- Find the matching pairs
src_msk_file_pair_list = []
for i,src_f in enumerate(all_src_img):
base_src_name = os.path.basename(src_f)
base_src_name = base_src_name.split('.')[0]
src_id1, src_id2 = base_src_name.split('_')[1:3]
for j,msk_f in enumerate(all_mask_img):
base_msk_name = os.path.basename(msk_f)
base_msk_name = base_msk_name.split('.')[0]
msk_id1, msk_id2 = base_msk_name.split('_')[1:3]
if src_id1 == msk_id1 and src_id2 == msk_id2:
src_msk_file_pair_list.append([src_f,msk_f])
# 3- load every single frame and stores it into a list
src = []
msk = []
for i in range(len(src_msk_file_pair_list)):
src_f, msk_f = src_msk_file_pair_list[i]
mat = scipy.io.loadmat(src_f)
if 'N' in mat:
src_mat = mat["N"]
msk_mat = load_nifty(msk_f)
for j in range(min(src_mat.shape[2], msk_mat.shape[2])):
src.append(np.uint8(src_mat[:,:,j]))
msk.append(np.uint8(msk_mat[:,:,j]))
src = np.array(src)
msk = np.array(msk)
validation_size = int(len(src) * validation_portion)
train_size = len(src)-validation_size
src_train, src_val = np.split(src, [train_size])
msk_train, msk_val = np.split(msk, [train_size])
return src_train, msk_train, src_val, msk_val
# 5- Define Dataset model
class MouseMRIDS(Dataset):
def __init__(self,
src,
msk,
transform = None,
augmentation=True):
self.src = src
self.msk = msk
indices_with_problem = np.where(np.logical_and(
np.min(self.msk, axis=(1,2)) == 0,
np.max(self.msk, axis=(1,2)) == 1) == False)
if len(indices_with_problem) > 0:
for i in indices_with_problem:
self.src = np.delete(self.src, i, axis=0)
self.msk = np.delete(self.msk, i, axis=0)
self.transform = transform
self.augmentation = augmentation
def __len__(self):
return len(self.src)
def __getitem__(self,idx):
if random.random() > 0.5 or not self.augmentation:
src_img = self.src[idx]
msk_img = self.msk[idx]
else:
src_img, msk_img = elt.get_elastic_transforms(self.src[idx],
self.msk[idx])
src_im = Image.fromarray(np.uint8(cvt1to3channels(src_img)))
msk_im = Image.fromarray(np.uint8(cvt1to3channels(msk_img)))
# Apply the same trasnformation to the two images
if self.transform:
if random.random() > 0.5 and self.augmentation:
src_im = F.vflip(src_im)
msk_im = F.vflip(msk_im)
if random.random() > 0.5 and self.augmentation:
src_im = F.hflip(src_im)
msk_im = F.hflip(msk_im)
if random.random() > 0.5 and self.augmentation:
angle=np.random.choice([90,180,270])
src_im = F.rotate(src_im,angle)
msk_im = F.rotate(msk_im,angle)
src_im = self.transform(src_im)
msk_im = self.transform(msk_im)
msk_im = (msk_im - torch.min(msk_im)) / torch.max(msk_im)
return src_im,\
msk_im[1,:,:].expand(1,-1,-1).type(torch.float)
|
python
|
import functools
from typing import Type, Generic, TypeVar, Dict, Any, Optional
from drf_yasg.utils import swagger_auto_schema
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework_dataclasses.serializers import DataclassSerializer
from thairod.utils.decorators import swagger_example
T = TypeVar('T')
class TGSerializer(DataclassSerializer[T], Generic[T]):
def parse_request(cls, request: Request) -> T:
raise NotImplementedError()
class AutoSerialize:
def to_response(self) -> Response:
return Response(self.to_data())
def to_data(self) -> Dict[str, Any]:
return self.__class__.serializer()(self).data
@classmethod
def from_data(cls: Type[T], data: Dict[str, Any]) -> T:
ser = cls.serializer()(data=data)
ser.is_valid(raise_exception=True)
return ser.save()
@classmethod
def from_get_request(cls: Type[T], request: Request) -> T:
return cls.from_data(request.query_params.dict())
@classmethod
def from_post_request(cls: Type[T], request: Request) -> T:
return cls.from_data(request.data)
@classmethod
@functools.lru_cache
def serializer(cls: Type[T]) -> Type[TGSerializer[T]]:
class Serializer(TGSerializer[cls]):
class Meta:
dataclass = cls
ref_name = cls.__name__
@classmethod
def parse_request(cls, request: Request) -> T:
ser = cls(data=request.data)
ser.is_valid(raise_exception=True)
return ser.save()
if hasattr(cls, 'example') and callable(cls.example):
return swagger_example(cls.example())(Serializer)
else:
return Serializer
def swagger_auto_serialize_schema(body_type: Optional[Type[AutoSerialize]], response_type: Type[AutoSerialize], **kwds):
return swagger_auto_schema(
request_body=body_type.serializer() if body_type is not None else None,
responses={200: response_type.serializer()},
**kwds
)
|
python
|
"""A config store holds the configuration data for running system-of-systems models with smif:
- model runs
- system-of-systems models
- model definitions
- strategies
- scenarios and scenario variants
- narratives
"""
from abc import ABCMeta, abstractmethod
class ConfigStore(metaclass=ABCMeta):
"""A ConfigStore must implement each of the abstract methods defined in this interface
"""
# region Model runs
@abstractmethod
def read_model_runs(self):
"""Read all system-of-system model runs
Returns
-------
list[~smif.controller.modelrun.ModelRun]
"""
@abstractmethod
def read_model_run(self, model_run_name):
"""Read a system-of-system model run
Parameters
----------
model_run_name : str
Returns
-------
~smif.controller.modelrun.ModelRun
"""
@abstractmethod
def write_model_run(self, model_run):
"""Write system-of-system model run
Parameters
----------
model_run : ~smif.controller.modelrun.ModelRun
"""
@abstractmethod
def update_model_run(self, model_run_name, model_run):
"""Update system-of-system model run
Parameters
----------
model_run_name : str
model_run : ~smif.controller.modelrun.ModelRun
"""
@abstractmethod
def delete_model_run(self, model_run_name):
"""Delete a system-of-system model run
Parameters
----------
model_run_name : str
"""
# endregion
# region System-of-systems models
@abstractmethod
def read_sos_models(self):
"""Read all system-of-system models
Returns
-------
list[~smif.model.sos_model.SosModel]
"""
@abstractmethod
def read_sos_model(self, sos_model_name):
"""Read a specific system-of-system model
Parameters
----------
sos_model_name : str
Returns
-------
~smif.model.sos_model.SosModel
"""
@abstractmethod
def write_sos_model(self, sos_model):
"""Write system-of-system model
Parameters
----------
sos_model : ~smif.model.sos_model.SosModel
"""
@abstractmethod
def update_sos_model(self, sos_model_name, sos_model):
"""Update system-of-system model
Parameters
----------
sos_model_name : str
sos_model : ~smif.model.sos_model.SosModel
"""
@abstractmethod
def delete_sos_model(self, sos_model_name):
"""Delete a system-of-system model
Parameters
----------
sos_model_name : str
"""
# endregion
# region Models
@abstractmethod
def read_models(self):
"""Read all models
Returns
-------
list[~smif.model.Model]
"""
@abstractmethod
def read_model(self, model_name):
"""Read a model
Parameters
----------
model_name : str
Returns
-------
~smif.model.Model
"""
@abstractmethod
def write_model(self, model):
"""Write a model
Parameters
----------
model : ~smif.model.Model
"""
@abstractmethod
def update_model(self, model_name, model):
"""Update a model
Parameters
----------
model_name : str
model : ~smif.model.Model
"""
@abstractmethod
def delete_model(self, model_name):
"""Delete a model
Parameters
----------
model_name : str
"""
# endregion
# region Scenarios
@abstractmethod
def read_scenarios(self):
"""Read scenarios
Returns
-------
list[~smif.model.ScenarioModel]
"""
@abstractmethod
def read_scenario(self, scenario_name):
"""Read a scenario
Parameters
----------
scenario_name : str
Returns
-------
~smif.model.ScenarioModel
"""
@abstractmethod
def write_scenario(self, scenario):
"""Write scenario
Parameters
----------
scenario : ~smif.model.ScenarioModel
"""
@abstractmethod
def update_scenario(self, scenario_name, scenario):
"""Update scenario
Parameters
----------
scenario_name : str
scenario : ~smif.model.ScenarioModel
"""
@abstractmethod
def delete_scenario(self, scenario_name):
"""Delete scenario from project configuration
Parameters
----------
scenario_name : str
"""
# endregion
# region Scenario Variants
@abstractmethod
def read_scenario_variants(self, scenario_name):
"""Read variants of a given scenario
Parameters
----------
scenario_name : str
Returns
-------
list[dict]
"""
@abstractmethod
def read_scenario_variant(self, scenario_name, variant_name):
"""Read a scenario variant
Parameters
----------
scenario_name : str
variant_name : str
Returns
-------
dict
"""
@abstractmethod
def write_scenario_variant(self, scenario_name, variant):
"""Write scenario to project configuration
Parameters
----------
scenario_name : str
variant : dict
"""
@abstractmethod
def update_scenario_variant(self, scenario_name, variant_name, variant):
"""Update scenario to project configuration
Parameters
----------
scenario_name : str
variant_name : str
variant : dict
"""
@abstractmethod
def delete_scenario_variant(self, scenario_name, variant_name):
"""Delete scenario from project configuration
Parameters
----------
scenario_name : str
variant_name : str
"""
# endregion
# region Narratives
@abstractmethod
def read_narrative(self, sos_model_name, narrative_name):
"""Read narrative from sos_model
Parameters
----------
sos_model_name : str
narrative_name : str
"""
# endregion
# region Strategies
@abstractmethod
def read_strategies(self, modelrun_name):
"""Read strategies for a given model run
Parameters
----------
model_run_name : str
Returns
-------
list[dict]
"""
@abstractmethod
def write_strategies(self, modelrun_name, strategies):
"""Write strategies for a given model_run
Parameters
----------
model_run_name : str
strategies : list[dict]
"""
# endregion
|
python
|
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
import os
path_prefix = os.path.join('cvat', 'apps', 'annotation')
BUILTIN_FORMATS = (
os.path.join(path_prefix, 'cvat.py'),
os.path.join(path_prefix, 'pascal_voc.py'),
os.path.join(path_prefix, 'yolo.py'),
os.path.join(path_prefix, 'coco.py'),
os.path.join(path_prefix, 'mask.py'),
os.path.join(path_prefix, 'tfrecord.py'),
)
|
python
|
dataFile = open("Day10_Data.txt")
asteroidCoordinates =[]
for corY,line in enumerate(dataFile):
for corX,c in enumerate(line):
if(c=="#"):
asteroidCoordinates.append([corX,corY])
curMax=0
astBaseX = astBaseY =0
for astBase in range( len(asteroidCoordinates)):
seenDivisionsR = []
seenDivisionsL =[]
seenAsteroids =[]
foundRight = foundLeft = foundUp = foundDown = False
foundCount =0
for observeAst in range(len(asteroidCoordinates)):
if(asteroidCoordinates[astBase][0] >= asteroidCoordinates[observeAst][0]):
if(astBase == observeAst):
continue
asteroidX = asteroidCoordinates[observeAst][0]
asteroidY = asteroidCoordinates[observeAst][1]
xDifference = asteroidCoordinates[astBase][0] - asteroidX
yDifference = asteroidCoordinates[astBase][1] - asteroidY
if(yDifference==0):
if(xDifference>0):
if(not foundLeft):
foundLeft = True
seenAsteroids.append([asteroidX, asteroidY])
else:
if(not foundRight):
foundRight=True
seenAsteroids.append([asteroidX, asteroidY])
elif (xDifference == 0):
if (yDifference > 0):
if(not foundUp):
foundUp = True
seenAsteroids.append([asteroidX, asteroidY])
else:
if(not foundDown):
foundDown = True
seenAsteroids.append([asteroidX, asteroidY])
else:
div = xDifference/yDifference
if(div not in seenDivisionsR):
seenDivisionsR.append(div)
seenAsteroids.append([asteroidX,asteroidY])
else:
if (astBase == observeAst):
continue
asteroidX = asteroidCoordinates[observeAst][0]
asteroidY = asteroidCoordinates[observeAst][1]
xDifference = asteroidCoordinates[astBase][0] - asteroidX
yDifference = asteroidCoordinates[astBase][1] - asteroidY
if (yDifference == 0):
if (xDifference > 0):
if(not foundLeft):
foundLeft = True
seenAsteroids.append([asteroidX, asteroidY])
else:
if(not foundRight):
foundRight = True
seenAsteroids.append([asteroidX, asteroidY])
elif (xDifference == 0):
if (yDifference > 0):
if(not foundUp):
foundUp = True
seenAsteroids.append([asteroidX, asteroidY])
else:
if(not foundDown):
foundDown = True
seenAsteroids.append([asteroidX, asteroidY])
else:
div = xDifference / yDifference
if (div not in seenDivisionsL):
seenDivisionsL.append(div)
seenAsteroids.append([asteroidX, asteroidY])
foundCount =len(seenAsteroids)
print(foundCount,seenAsteroids)
if(foundCount>curMax):
astBaseX=asteroidCoordinates[astBase][0]
astBaseY = asteroidCoordinates[astBase][1]
curMax = foundCount
print(curMax,astBaseX,astBaseY)
|
python
|
# -*- encoding: utf-8 -*-
#
#
# Copyright (C) 2006-2011 André Wobst <[email protected]>
#
# This file is part of PyX (http://pyx.sourceforge.net/).
#
# PyX is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PyX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyX; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
# Just a quick'n'dirty ascii art (I'll do a nice PyX plot later on):
#
#
# node1 *
# | \
# | \ neighbor2
# | \
# | \
# neighbor3 |element * node3
# | /
# | /
# | / neighbor1
# | /
# node2 *
import struct, binascii, zlib, os, tempfile
import bbox, canvasitem, color, pdfwriter, unit
class node_pt:
def __init__(self, coords_pt, value):
self.coords_pt = coords_pt
self.value = value
class node(node_pt):
def __init__(self, coords, value):
node_pt.__init__(self, [unit.topt(coord) for coord in coords], value)
class element:
def __init__(self, nodes, neighbors=None):
self.nodes = nodes
self.neighbors = neighbors
def coords24bit_pt(coords_pt, min_pt, max_pt):
return struct.pack(">I", int((coords_pt-min_pt)*16777215.0/(max_pt-min_pt)))[1:]
class PDFGenericResource(pdfwriter.PDFobject):
def __init__(self, type, name, content):
pdfwriter.PDFobject.__init__(self, type, name)
self.content = content
def write(self, file, writer, registry):
file.write(self.content)
class mesh(canvasitem.canvasitem):
def __init__(self, elements, check=1):
self.elements = elements
if check:
colorspacestring = ""
for element in elements:
if len(element.nodes) != 3:
raise ValueError("triangular mesh expected")
try:
for node in element.nodes:
if not colorspacestring:
colorspacestring = node.value.colorspacestring()
elif node.value.colorspacestring() != colorspacestring:
raise ValueError("color space mismatch")
except AttributeError:
raise ValueError("gray, rgb or cmyk color values expected")
for node in element.nodes:
if len(node.coords_pt) != 2:
raise ValueError("two dimensional coordinates expected")
def bbox(self):
return bbox.bbox_pt(min([node.coords_pt[0] for element in self.elements for node in element.nodes]),
min([node.coords_pt[1] for element in self.elements for node in element.nodes]),
max([node.coords_pt[0] for element in self.elements for node in element.nodes]),
max([node.coords_pt[1] for element in self.elements for node in element.nodes]))
def data(self, bbox):
return "".join(["\000%s%s%s" % (coords24bit_pt(node.coords_pt[0], bbox.llx_pt, bbox.urx_pt),
coords24bit_pt(node.coords_pt[1], bbox.lly_pt, bbox.ury_pt),
node.value.to8bitstring())
for element in self.elements for node in element.nodes])
def processPS(self, file, writer, context, registry, bbox):
if writer.mesh_as_bitmap:
from pyx import bitmap, canvas
import Image
c = canvas.canvas()
c.insert(self)
i = Image.open(c.pipeGS("pngalpha", resolution=writer.mesh_as_bitmap_resolution, seekable=True))
i.load()
b = bitmap.bitmap_pt(self.bbox().llx_pt, self.bbox().lly_pt, i)
# we slightly shift the bitmap to re-center it, as the bitmap might contain some additional border
# unfortunately we need to construct another bitmap instance for that ...
b = bitmap.bitmap_pt(self.bbox().llx_pt + 0.5*(self.bbox().width_pt()-b.bbox().width_pt()),
self.bbox().lly_pt + 0.5*(self.bbox().height_pt()-b.bbox().height_pt()), i)
b.processPS(file, writer, context, registry, bbox)
else:
thisbbox = self.bbox()
bbox += thisbbox
file.write("""<< /ShadingType 4
/ColorSpace %s
/BitsPerCoordinate 24
/BitsPerComponent 8
/BitsPerFlag 8
/Decode [%f %f %f %f %s]
/DataSource currentfile /ASCIIHexDecode filter /FlateDecode filter
>> shfill\n""" % (self.elements[0].nodes[0].value.colorspacestring(),
thisbbox.llx_pt, thisbbox.urx_pt, thisbbox.lly_pt, thisbbox.ury_pt,
" ".join(["0 1" for value in self.elements[0].nodes[0].value.to8bitstring()])))
file.write(binascii.b2a_hex(zlib.compress(self.data(thisbbox))))
file.write(">\n")
def processPDF(self, file, writer, context, registry, bbox):
if writer.mesh_as_bitmap:
from pyx import bitmap, canvas
import Image
c = canvas.canvas()
c.insert(self)
i = Image.open(c.pipeGS("pngalpha", resolution=writer.mesh_as_bitmap_resolution, seekable=True))
i.load()
b = bitmap.bitmap_pt(self.bbox().llx_pt, self.bbox().lly_pt, i)
# we slightly shift the bitmap to re-center it, as the bitmap might contain some additional border
# unfortunately we need to construct another bitmap instance for that ...
b = bitmap.bitmap_pt(self.bbox().llx_pt + 0.5*(self.bbox().width_pt()-b.bbox().width_pt()),
self.bbox().lly_pt + 0.5*(self.bbox().height_pt()-b.bbox().height_pt()), i)
b.processPDF(file, writer, context, registry, bbox)
else:
thisbbox = self.bbox()
bbox += thisbbox
d = self.data(thisbbox)
if writer.compress:
filter = "/Filter /FlateDecode\n"
d = zlib.compress(d)
else:
filter = ""
name = "shading-%s" % id(self)
shading = PDFGenericResource("shading", name, """<<
/ShadingType 4
/ColorSpace %s
/BitsPerCoordinate 24
/BitsPerComponent 8
/BitsPerFlag 8
/Decode [%f %f %f %f %s]
/Length %i
%s>>
stream
%s
endstream\n""" % (self.elements[0].nodes[0].value.colorspacestring(),
thisbbox.llx_pt, thisbbox.urx_pt, thisbbox.lly_pt, thisbbox.ury_pt,
" ".join(["0 1" for value in self.elements[0].nodes[0].value.to8bitstring()]),
len(d), filter, d))
registry.add(shading)
registry.addresource("Shading", name, shading)
file.write("/%s sh\n" % name)
|
python
|
'''OpenGL extension ARB.transform_feedback3
This module customises the behaviour of the
OpenGL.raw.GL.ARB.transform_feedback3 to provide a more
Python-friendly API
Overview (from the spec)
This extension further extends the transform feedback capabilities
provided by the EXT_transform_feedback, NV_transform_feedback, and
NV_transform_feedback2 extensions. Those extensions provided a new
transform feedback mode, where selected vertex attributes can be recorded
to a buffer object for each primitive processed by the GL.
This extension provides increased flexibility in how vertex attributes can
be written to buffer objects. Previous extensions allowed applications to
record a set of attributes interleaved into a single buffer object
(interleaved mode) or to record into multiple objects, but with only a
single attribute per buffer (separate mode). This extension extends
interleaved mode to write into multiple buffers, with multiple attributes
per buffer. This capability is supported for all three styles of
transform feedback:
- "EXT"-style GLSL transform feedback (EXT_transform_feedback), where a
list of varyings is provided prior to linking a program object and is
used whenever that program object is used.
- "NV"-style GLSL transform feedback (NV_transform_feedback), where
"locations" of active varyings are queried after linking and are then
passed to a function that sets the active transform feedback varyings
for the program object. Unlike the "EXT"-style mode, the set of
varyings to capture can be changed without relinking.
- Transform feedback for fixed-function or assembly vertex/geometry
shaders (NV_transform_feedback), where applications specify a set of
canonical attribute enums/numbers to capture.
Additionally, this extension adds new support for multiple separate
vertex streams. New geometry shader functionality provided by the
ARB_gpu_shader5 and NV_gpu_program5 extensions allows geometry shaders
to direct each vertex arbitrarily at a specified vertex stream. For
example, a geometry program might write each "regular" vertex it emits
to one vertex stream while writing some per-primitive data it computes
to a second vertex stream. This extension allows applications to
choose a vertex stream for each buffer object it writes to, and allows
the vertices written to each vertex stream to be recorded in separate
buffer objects. Only one stream may be selected for rasterization,
and in the initial implementation, the geometry shader output topology
must be POINTS if multiple streams are used. When geometry shaders
are not used, or when an old geometry shader not writing multiple
streams is used, all vertices produced by the GL are directed at the
stream numbered zero. The set of transform feedback-related query
targets is extended to accommodate multiple vertex streams, so it is
possible to count the number of processed and recorded primitives for
each stream separately.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/transform_feedback3.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.transform_feedback3 import *
from OpenGL.raw.GL.ARB.transform_feedback3 import _EXTENSION_NAME
def glInitTransformFeedback3ARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
glGetQueryIndexediv=wrapper.wrapper(glGetQueryIndexediv).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
### END AUTOGENERATED SECTION
|
python
|
"""
Meshing: Make and plot a 3D prism mesh
"""
from fatiando import mesher
from fatiando.vis import myv
mesh = mesher.PrismMesh(bounds=(-2, 2, -3, 3, 0, 1), shape=(4,4,4))
myv.figure()
plot = myv.prisms(mesh)
axes = myv.axes(plot)
myv.show()
|
python
|
import click
@click.command()
def main():
print("This is the CLI!")
if __name__ == '__main__':
main()
|
python
|
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('h_dim', default=32,
help='Hidden dim in various models.')
flags.DEFINE_integer('rnn_dim', default=256,
help='RNN hidden dim.')
flags.DEFINE_integer('rnn_n_layers', default=2,
help='Number of layers for RNNs.')
flags.DEFINE_float('rnn_drop', default=0.1,
help='Dropout rate in RNNs.')
flags.DEFINE_integer('n_latent', default=24,
help='Latent dimension for vaes.')
flags.DEFINE_integer('n_batch', default=128,
help='Minibatch size to train.')
flags.DEFINE_integer('visualize_every', default=10,
help='Frequency of visualization.')
flags.DEFINE_integer('n_iter', default=200000,
help='Number of iteration to train. Might not be used if '
'n_epoch is used.')
flags.DEFINE_integer('n_epoch', default=50,
help='Number of epochs to train. Might not be used if '
'n_iter is used.')
flags.DEFINE_integer('n_workers', default=4,
help='Sets num workers for data loaders.')
flags.DEFINE_integer('seed', default=0,
help='Sets global seed.')
flags.DEFINE_string("vis_root", default='vis',
help='root folder for visualization and logs.')
flags.DEFINE_float('decay', default=0.99,
help='set learning rate value for optimizers')
flags.DEFINE_float('lr', default=1e-3,
help='Set learning rate for optimizers.')
flags.DEFINE_bool("debug", default=False,
help='Enables debug mode.')
flags.DEFINE_bool('highdrop', default=False,
help='Enables high dropout to encourage copy.')
flags.DEFINE_bool('highdroptest', default=False,
help='Applies high dropout in test as well.')
flags.DEFINE_float("highdropvalue", default=0.,
help='High dropout value to encourage copying.')
flags.DEFINE_bool('copy', default=False,
help='Enable copy in seq2seq models')
flags.DEFINE_string('model_path', default='',
help="Model path to load a pretrained model")
flags.DEFINE_bool('extract_codes', default=False,
help='Extract VQVAE codes for training and test set given a '
'pretrained vae')
flags.DEFINE_bool('filter_model', default=False,
help='To run filter model experiments.')
flags.DEFINE_bool('test', default=False,
help='Only runs evaluations.')
flags.DEFINE_string('tensorboard', default=None,
help='Use tensorboard for logging losses.')
flags.DEFINE_bool('kl_anneal', default=False,
help='Enables kl annealing.')
flags.DEFINE_integer('decoder_reset', default=-1,
help='Enables decoder reset for vae to prevent posterior collapse.')
flags.DEFINE_string("resume", default='',
help='Path to the main model to resume training')
flags.DEFINE_float("gclip", default=-1,
help='gradient clip')
flags.DEFINE_integer("gaccum", default=1,
help='gradient accumulation')
flags.DEFINE_integer("warmup_steps", default=-1,
help="noam warmup_steps")
|
python
|
"""
Compared with model_baseline, do not use correlation output for skip link
Compared to model_baseline_fixed, added return values to test whether nsample is set reasonably.
"""
import tensorflow as tf
import numpy as np
import math
import sys
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(BASE_DIR, '../../utils'))
sys.path.append(os.path.join(BASE_DIR, '../..'))
sys.path.append(os.path.join(BASE_DIR, '../../tf_ops/sampling'))
import tf_util
from net_utils import *
def placeholder_inputs(batch_size, num_point, num_frames):
pointclouds_pl = tf.placeholder(tf.float32, shape=(batch_size, num_point * num_frames, 3 + 3))
labels_pl = tf.placeholder(tf.int32, shape=(batch_size, num_point * num_frames))
labelweights_pl = tf.placeholder(tf.float32, shape=(batch_size, num_point * num_frames))
masks_pl = tf.placeholder(tf.float32, shape=(batch_size, num_point * num_frames))
return pointclouds_pl, labels_pl, labelweights_pl, masks_pl
def get_model(point_cloud, num_frames, is_training, bn_decay=None):
""" Semantic segmentation PointNet, input is BxNx3, output Bxnum_class """
end_points = {}
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value // num_frames
l0_xyz = point_cloud[:, :, 0:3]
l0_time = tf.concat([tf.ones([batch_size, num_point, 1]) * i for i in range(num_frames)], \
axis=-2)
l0_points = tf.concat([point_cloud[:, :, 3:], l0_time], axis=-1)
RADIUS1 = np.array([0.98, 0.99, 1.0], dtype='float32')
RADIUS2 = RADIUS1 * 2
RADIUS3 = RADIUS1 * 4
RADIUS4 = RADIUS1 * 8
l1_xyz, l1_time, l1_points, l1_indices = meteor_direct_module(l0_xyz, l0_time, l0_points, npoint=2048, radius=RADIUS1, nsample=32, mlp=[32,32,128], mlp2=None, group_all=False, knn=False, is_training=is_training, bn_decay=bn_decay, scope='layer1')
l2_xyz, l2_time, l2_points, l2_indices = meteor_direct_module(l1_xyz, l1_time, l1_points, npoint=512, radius=RADIUS2, nsample=32, mlp=[64,64,256], mlp2=None, group_all=False, knn=False, is_training=is_training, bn_decay=bn_decay, scope='layer2')
l3_xyz, l3_time, l3_points, l3_indices = meteor_direct_module(l2_xyz, l2_time, l2_points, npoint=128, radius=RADIUS3, nsample=32, mlp=[128,128,512], mlp2=None, group_all=False, knn=False, is_training=is_training, bn_decay=bn_decay, scope='layer3')
l4_xyz, l4_time, l4_points, l4_indices = meteor_direct_module(l3_xyz, l3_time, l3_points, npoint=64, radius=RADIUS4, nsample=32, mlp=[256,256,1024], mlp2=None, group_all=False, knn=False, is_training=is_training, bn_decay=bn_decay, scope='layer4')
# Feature Propagation layers
l3_points = pointnet_fp_module(l3_xyz, l4_xyz, l3_points, l4_points, [256,256], is_training, bn_decay, scope='fa_layer1')
l2_points = pointnet_fp_module(l2_xyz, l3_xyz, l2_points, l3_points, [256,256], is_training, bn_decay, scope='fa_layer2')
l1_points = pointnet_fp_module(l1_xyz, l2_xyz, l1_points, l2_points, [256,128], is_training, bn_decay, scope='fa_layer3')
l0_points = pointnet_fp_module(l0_xyz, l1_xyz, l0_points, l1_points, [128,128], is_training, bn_decay, scope='fa_layer4')
##### debug
net = tf_util.conv1d(l0_points, 12, 1, padding='VALID', activation_fn=None, scope='fc2')
return net, end_points
def get_loss(pred, label, mask, end_points, label_weights):
""" pred: BxNx3,
label: BxN,
mask: BxN
"""
classify_loss = tf.losses.sparse_softmax_cross_entropy( labels=label, \
logits=pred, \
weights=label_weights, \
reduction=tf.losses.Reduction.NONE)
classify_loss = tf.reduce_sum(classify_loss * mask) / (tf.reduce_sum(mask) + 1)
tf.summary.scalar('classify loss', classify_loss)
tf.add_to_collection('losses', classify_loss)
return classify_loss
if __name__=='__main__':
with tf.Graph().as_default():
inputs = tf.zeros((32,1024*2,6))
outputs = get_model(inputs, tf.constant(True))
print(outputs)
|
python
|
"""
TODO module docstring
"""
from re import fullmatch
from typing import Optional
from datetime import timedelta, datetime
from jose import JWTError, jwt
from passlib.hash import bcrypt
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from fastapi import APIRouter, Depends, HTTPException, status
from app.utils.user_db import UserDB
from app.utils.message import Message, log
router = APIRouter(prefix="/api/authorization", tags=["Authorizations"])
user_db = UserDB()
SECRET_KEY = "7505d3e581d01c02fd31667cdc67cdb64173a9d4f715e73bf0a8e196fa02a15c"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="authorization/login")
def verify_password(plain_password, hashed_password):
"""
TODO function docstring
"""
return bcrypt.verify(plain_password, hashed_password)
def hash_password(plain_password):
"""
TODO function docstring
"""
return bcrypt.hash(plain_password)
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
"""
TODO function docstring
"""
to_encode = data.copy()
if expires_delta:
expire = datetime.utcnow() + expires_delta
else:
expire = datetime.utcnow() + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
return encoded_jwt
def authenticate_user(username: str, password: str):
"""
Look for user in user_db.json (TinyDB).
Parameters:
username (str): username from form
password (str): password from form
Returns:
bool: False if a user doesn't exist
dict: Dict with user info if it does exist
"""
user = user_db.get_user_data(username)
if isinstance(user, Message):
log(user)
return False
if not verify_password(password, user["password_hash"]):
return False
return user
async def get_current_user(token: str = Depends(oauth2_scheme)):
"""
TODO function docstring
"""
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("username")
if username is None:
raise credentials_exception
except JWTError as jwt_error:
raise credentials_exception from jwt_error
user = user_db.get_user_data(username)
if isinstance(user, Message):
log(user)
raise credentials_exception
return user
@router.post("/login")
async def generate_token(form: OAuth2PasswordRequestForm = Depends()):
"""
TODO function docstring
"""
user = authenticate_user(form.username, form.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"username": user["username"]}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer"}
@router.post("/register")
async def create_user(form: OAuth2PasswordRequestForm = Depends()):
"""
TODO function docstring
"""
username = form.username
password = form.password
email = form.scopes[0]
if not fullmatch("[A-Za-z0-9-_]+", username):
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Username must contain only upper and lower case characters, "
+ "numbers, and symbols - or _ ",
headers={"WWW-Authenticate": "Bearer"},
)
if len(password) < 8:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Password must be at least 8 characters long.",
headers={"WWW-Authenticate": "Bearer"},
)
if not fullmatch("^[a-zA-Z0-9]+@[a-zA-Z0-9]+.[A-Za-z]+$", email):
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Incorrect e-mail address.",
headers={"WWW-Authenticate": "Bearer"},
)
if user_db.does_user_exist(username):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Given user already exists",
headers={"WWW-Authenticate": "Bearer"},
)
return user_db.add_user(form.username, hash_password(form.password), email)
|
python
|
from webapp import db, login_manager
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin # is_authenticated is_loggedin usw...
@login_manager.user_loader # if user is authenticated, then....
def load_user(user_id):
return Users.query.get(user_id)
class Users(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(500))
def __init__(self, username, password):
self.username = username
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return f"Username: {self.username}"
class Abonnenten(db.Model):
__tablename__ = 'abonnenten'
abonnenten_url = db.Column(db.String(100), primary_key=True)
datum = db.Column(db.DateTime, default=datetime.utcnow)
def __init__(self, abonnenten_url):
self.abonnenten_url = abonnenten_url
def __repr__(self):
return f"Abonnent: {self.abonnenten_url}"
class Abonniert(db.Model):
__tablename__ = 'abonniert'
abonniet_url = db.Column(db.String(100), primary_key=True)
datum = db.Column(db.DateTime, default=datetime.utcnow)
def __init__(self, abonniet_url):
self.abonniet_url = abonniet_url
def __repr__(self):
return f"Abonniert: {self.abonniet_url}"
class Source(db.Model):
__tablename__ = 'source'
id = db.Column(db.Integer, primary_key=True)
source_url = db.Column(db.String(100), index=True)
targets_total = db.Column(db.Integer)
datum = db.Column(db.DateTime, default=datetime.utcnow)
targets_raw = db.relationship('Targets_raw', backref='targets_raw_quelle')
targets_done = db.relationship('Targets_raw', backref='targets_done_quelle')
def __init__(self, source_url):
self.source_url = source_url
def __repr__(self):
return f"Target-Source: {self.source_url} vom: {self.datum}"
class Targets_raw(db.Model):
__tablename__ = 'targets_raw'
id = db.Column(db.Integer, primary_key=True)
target_url = db.Column(db.String(100), index=True)
source_id = db.Column(db.Integer, db.ForeignKey('source.id'))
def __init__(self, target_url, source_id):
self.target_url = target_url
self.source_id = source_id
def __repr__(self):
return f"Target-Account: {self.target_url} und Source-ID: {self.source_id}"
class Targets_done(db.Model):
__tablename__ = 'targets_done'
id = db.Column(db.Integer, primary_key=True)
source_id = db.Column(db.Integer, db.ForeignKey('source.id'))
target_url = db.Column(db.String(100), index=True)
target_abonnenten = db.Column(db.Integer)
target_abonniert = db.Column(db.Integer)
match = db.Column(db.String(10))
datum_bearbeitet = db.Column(db.DateTime, default=datetime.utcnow)
pics_liked = db.Column(db.Integer)
followed = db.Column(db.DateTime)
unfollowed = db.Column(db.DateTime)
followed_back = db.Column(db.DateTime)
t5_indicator = db.Column(db.String(3))
t1_indicator = db.Column(db.String(3))
t5_timestamp = db.Column(db.DateTime)
t1_timestamp = db.Column(db.DateTime)
def __init__(self, target_url, target_abonnenten, target_abonniert, source_id):
self.target_url = target_url
self.target_abonnenten = target_abonnenten
self.target_abonniert = target_abonniert
self.source_id = source_id
def __repr__(self):
return f"Target-URL: {self.target_url} bearbeitet am {self.datum_bearbeitet}, Anzahl Abonnenten: {self.target_abonnenten}, Anzahl Abonniert: {self.target_abonniert}"
class Statistiken(db.Model):
__tablename__ = "statistik"
id = db.Column(db.Integer, primary_key=True)
source_id = db.Column(db.Integer)
targets_total = db.Column(db.Integer)
pics_liked = db.Column(db.Integer)
followed = db.Column(db.Integer)
unfollowed = db.Column(db.Integer)
followed_back = db.Column(db.Integer)
def __init__(self, source_id, targets_total):
self.source_id = source_id
self.targets_total = targets_total
class Counter(db.Model):
__tablename__ = "counter"
datum = db.Column(db.DateTime, default=datetime.now().date(), primary_key=True)
like_counter = db.Column(db.Integer)
follow_counter = db.Column(db.Integer)
class Blacklist(db.Model):
__tablename__ = "blacklist"
id = db.Column(db.Integer, primary_key=True)
url = db.Column(db.String(100))
datum = db.Column(db.DateTime, default=datetime.now().date())
def __init__(self, url):
self.url = url
class Historical_follower(db.Model):
__tablename__ = "historical_follower"
id = db.Column(db.Integer, primary_key=True)
target_url = db.Column(db.String(100))
datum = db.Column(db.DateTime, default=datetime.now().date())
def __init__(self, target_url):
self.target_url = target_url
class Tasks(db.Model):
__tablename__ = "tasks"
task_id = db.Column(db.String(72), primary_key=True)
task_type = db.Column(db.String(21))
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
taskid = db.relationship('Taskstatus', backref="status")
def __init__(self, task_id, task_type):
self.task_id = task_id
self.task_type = task_type
class Taskstatus(db.Model):
__tablename__ = "taskstatus"
id = db.Column(db.Integer, primary_key=True)
taskid = db.Column(db.String(72), db.ForeignKey('tasks.task_id'))
target_url = db.Column(db.String(100))
check0 = db.Column(db.String(100))
check1 = db.Column(db.String(100))
check2 = db.Column(db.String(100))
check3 = db.Column(db.String(100))
check4 = db.Column(db.String(100))
check5 = db.Column(db.String(100))
check6 = db.Column(db.String(100))
match = db.Column(db.String(4))
followed = db.Column(db.DateTime)
unfollowed = db.Column(db.DateTime)
pics_liked = db.Column(db.Integer)
t5_timestamp = db.Column(db.DateTime)
t1_timestamp = db.Column(db.DateTime)
def __init__(self, target_url):
self.target_url = target_url
|
python
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# /*##########################################################################
#
# Copyright (c) 2016 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
"""
Pan and zoom interaction to plug on a matplotlib Figure.
Interaction:
- Zoom in/out with the mouse wheel
- Pan figures by dragging the mouse with left button pressed
- Select a zoom-in area by dragging the mouse with right button pressed
It provides a figure_pz function to create a Figure with interaction.
Example:
.. code-block:: python
import matplotlib.pyplot as plt
from mpl_interaction import figure_pz
fig = figure_pz()
ax = fig.add_subplot(1, 1, 1)
ax.plot((1, 2, 1))
plt.show()
The :class:`PanAndZoom` class can be used to add interaction
to an existing Figure.
Example:
.. code-block:: python
import matplotlib.pyplot as plt
from mpl_interaction import PanAndZoom
fig = plt.figure()
pan_zoom = PanAndZoom(fig) # Add support for pan and zoom
ax = fig.add_subplot(1, 1, 1)
ax.plot((1, 2, 1))
plt.show()
Known limitations:
- Only support linear and log scale axes.
- Zoom area not working well with keep aspect ratio.
- Interfere with matplotlib toolbar.
"""
import logging
import math
import warnings
import weakref
import matplotlib.pyplot as _plt
import numpy
from PySide2.QtCore import Qt
from PySide2.QtGui import QIcon
from PySide2.QtWidgets import QAction, QLabel
from ... import Dict
__all__ = ['figure_pz', 'MplInteraction', 'PanAndZoom']
class MplInteraction(object):
"""Base class for class providing interaction to a matplotlib Figure."""
def __init__(self, figure):
"""
Args:
figure (figure): The matplotlib figure to attach the behavior to.
"""
self._fig_ref = weakref.ref(figure)
self._cids = []
def __del__(self):
"""Disconnnect."""
self.disconnect()
def _add_connection(self, event_name, callback):
"""Called to add a connection to an event of the figure.
Args:
event_name (str): The matplotlib event name to connect to.
callback (callback): The callback to register to this event.
"""
cid = self.figure.canvas.mpl_connect(event_name, callback)
self._cids.append(cid)
def disconnect(self):
"""Disconnect interaction from Figure."""
if self._fig_ref is not None:
figure = self._fig_ref()
if figure is not None:
for cid in self._cids:
figure.canvas.mpl_disconnect(cid)
self._fig_ref = None
@property
def figure(self):
"""The Figure this interaction is connected to or
None if not connected."""
return self._fig_ref() if self._fig_ref is not None else None
def _axes_to_update(self, event):
"""Returns two sets of Axes to update according to event.
Takes care of multiple axes and shared axes.
Args:
event (MouseEvent): Matplotlib event to consider
Returns:
tuple: Axes for which to update xlimits and ylimits.
2-tuple of set (xaxes, yaxes)
"""
x_axes, y_axes = set(), set()
# Go through all axes to enable zoom for multiple axes subplots
for ax in self.figure.axes:
if ax.contains(event)[0]:
# For twin x axes, makes sure the zoom is applied once
shared_x_axes = set(ax.get_shared_x_axes().get_siblings(ax))
if x_axes.isdisjoint(shared_x_axes):
x_axes.add(ax)
# For twin y axes, makes sure the zoom is applied once
shared_y_axes = set(ax.get_shared_y_axes().get_siblings(ax))
if y_axes.isdisjoint(shared_y_axes):
y_axes.add(ax)
return x_axes, y_axes
def _draw(self):
"""Conveninent method to redraw the figure."""
self.figure.canvas.draw()
class ZoomOnWheel(MplInteraction):
"""Class providing zoom on wheel interaction to a matplotlib Figure.
This class extends the `MplInteraction` class.
Supports subplots, twin Axes and log scales.
"""
def __init__(self, figure=None, scale_factor=1.1):
"""
Args:
figure (figure): The matplotlib figure to attach the behavior to.
scale_factor (float): The scale factor to apply on wheel event.
"""
super(ZoomOnWheel, self).__init__(figure)
self._add_connection('scroll_event', self._on_mouse_wheel)
self.scale_factor = scale_factor
@staticmethod
def _zoom_range(begin, end, center, scale_factor, scale):
"""Compute a 1D range zoomed around center.
Args:
begin (float): The begin bound of the range
end (float): The end bound of the range
center (float): The center of the zoom (i.e., invariant point)
scale_factor (float): The scale factor to apply
scale (str): The scale of the axis
Returns:
tuple: The zoomed range (min, max)
"""
if begin < end:
min_, max_ = begin, end
else:
min_, max_ = end, begin
if scale == 'linear':
old_min, old_max = min_, max_
elif scale == 'log':
old_min = numpy.log10(min_ if min_ > 0. else numpy.nextafter(0, 1))
center = numpy.log10(
center if center > 0. else numpy.nextafter(0, 1))
old_max = numpy.log10(max_) if max_ > 0. else 0.
else:
logging.warning('Zoom on wheel not implemented for scale "%s"' %
scale)
return begin, end
offset = (center - old_min) / (old_max - old_min)
range_ = (old_max - old_min) / scale_factor
new_min = center - offset * range_
new_max = center + (1. - offset) * range_
if scale == 'log':
try:
new_min, new_max = 10.**float(new_min), 10.**float(new_max)
except OverflowError: # Limit case
new_min, new_max = min_, max_
if new_min <= 0. or new_max <= 0.: # Limit case
new_min, new_max = min_, max_
if begin < end:
return new_min, new_max
else:
return new_max, new_min
def _on_mouse_wheel(self, event):
"""Mouse wheel event."""
if event.step > 0:
scale_factor = self.scale_factor
else:
scale_factor = 1. / self.scale_factor
# Go through all axes to enable zoom for multiple axes subplots
x_axes, y_axes = self._axes_to_update(event)
for ax in x_axes:
transform = ax.transData.inverted()
xdata, ydata = transform.transform_point((event.x, event.y))
xlim = ax.get_xlim()
xlim = self._zoom_range(xlim[0], xlim[1], xdata, scale_factor,
ax.get_xscale())
ax.set_xlim(xlim)
for ax in y_axes:
ylim = ax.get_ylim()
ylim = self._zoom_range(ylim[0], ylim[1], ydata, scale_factor,
ax.get_yscale())
ax.set_ylim(ylim)
if x_axes or y_axes:
self._draw()
class PanAndZoom(ZoomOnWheel):
"""Class providing pan & zoom interaction to a matplotlib Figure.
Left button for pan, right button for zoom area and zoom on wheel.
Support subplots, twin Axes and log scales.
This class extends the `ZoomOnWheel` class.
"""
def __init__(self, figure=None, scale_factor=1.1):
"""
Args:
figure (figure): The matplotlib figure to attach the behavior to.
scale_factor (float): The scale factor to apply on wheel event.
"""
super(PanAndZoom, self).__init__(figure, scale_factor)
self._add_connection('button_press_event', self._on_mouse_press)
self._add_connection('button_release_event', self._on_mouse_release)
self._add_connection('motion_notify_event', self._on_mouse_motion)
self._pressed_button = None # To store active button
self._axes = None # To store x and y axes concerned by interaction
self._event = None # To store reference event during interaction
self.options = Dict(dict(report_point_position=True,))
self.logger = None
self._statusbar_label = None
#self._get_images_path()
#self._add_toolbar_tools()
self._style_figure()
self._ix_iy_old = (0, 0)
def _get_images_path(self):
"""Get the path to images.
Returns:
str: path
Raises:
Exception: path error
"""
# to be removed
try: # Get tool image path
from pathlib import Path
from ... import _gui
imgs_path = Path(_gui.__file__).parent / '_imgs'
if imgs_path.is_dir() == False:
print(f'Bad File path for images! {imgs_path}')
imgs_path = None
except Exception as e:
print('ERROR: ', e)
imgs_path = None
self.imgs_path = imgs_path
return imgs_path
def _add_toolbar_tools(self):
"""Add tools."""
# TODO: Outdated - to be removed
from matplotlib.backend_tools import ToolToggleBase # ToolBase
class ToolPointPosition(ToolToggleBase):
'''Tools.'''
default_keymap = 'Ctrl+p'
description = 'Click to get point coordinate printed'
default_toggled = False
image = None # str(imgs_path)
def __init__(self, *args, parent=None, **kwargs):
super().__init__(*args, **kwargs)
if parent is None:
raise ('Pass a parent')
self.parent = parent
def enable(self, *args):
self.parent.options.report_point_position = True
def disable(self, *args):
self.parent.options.report_point_position = False
fig = self.figure
imgs_path = self.imgs_path
toolbar = self.toolbar = fig.canvas.manager.toolbar
# Get tool manager
# TODO: Remove use of tool manager just use PySide2 bare as below
# ToolbarQt --- https://github.com/matplotlib/matplotlib/blob/master/lib/matplotlib/backends/backend_qt5.py
tm = fig.canvas.manager.toolmanager
self.tm = tm
# Tool: Print point location
ToolPointPosition.image = str(imgs_path / 'click.png')
with warnings.catch_warnings():
warnings.simplefilter("ignore")
tm.add_tool("Point_position", ToolPointPosition, parent=self)
fig.canvas.manager.toolbar.add_tool(tm.get_tool("Point_position"),
"toolgroup")
# Tool: Copy to Clipboard
from matplotlib.backend_tools import ToolCopyToClipboard
ToolCopyToClipboard.image = str(imgs_path / 'copy.png')
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# OVvrwties Ctrl+C and issues warning
tm.add_tool("Copy_to_clipboard", ToolCopyToClipboard)
fig.canvas.manager.toolbar.add_tool(tm.get_tool("Copy_to_clipboard"),
"toolgroup")
if 1: # add QT Pieces
toolbar.action_ascale = QAction(
QIcon(str(imgs_path / 'auto_zoom.png')), 'Auto scale', toolbar)
toolbar.action_ascale.setShortcut('A')
toolbar.action_ascale.setShortcutContext(Qt.WindowShortcut)
toolbar.action_ascale.setStatusTip('Autoscale')
toolbar.action_ascale.triggered.connect(self.auto_scale)
toolbar.addAction(toolbar.action_ascale)
# Status Bar: Second label to report
figManager = fig.canvas.manager # plt.get_current_fig_manager()
status_bar = figManager.window.statusBar()
self._status_label_2 = QLabel(status_bar)
self._status_label_2.setText('')
status_bar.addWidget(self._status_label_2)
#from matplotlib.backends.backend_qt5 import StatusbarQt
#st = StatusbarQt(figManager.window, figManager.toolmanager)
# figManager.statusbar.set_message('')
def auto_scale(self):
"""Auto scaler."""
for ax in self.figure.axes:
ax.autoscale()
# self.figure.canvas.flush_events()
self.figure.canvas.draw()
def _style_figure(self):
"""Style figure."""
#self.figure.dpi = 150
pass
@staticmethod
def _pan_update_limits(ax, axis_id, event, last_event):
"""Compute limits with applied pan.
Args:
axis_id (int): ID of the axis
event (event): The event
last_event (event): The previous event
Returns:
double: New limit
Raises:
ValueError: Value error
OverflowError: Overflow error
"""
assert axis_id in (0, 1)
if axis_id == 0:
lim = ax.get_xlim()
scale = ax.get_xscale()
else:
lim = ax.get_ylim()
scale = ax.get_yscale()
pixel_to_data = ax.transData.inverted()
data = pixel_to_data.transform_point((event.x, event.y))
last_data = pixel_to_data.transform_point((last_event.x, last_event.y))
if scale == 'linear':
delta = data[axis_id] - last_data[axis_id]
new_lim = lim[0] - delta, lim[1] - delta
elif scale == 'log':
try:
delta = math.log10(data[axis_id]) - \
math.log10(last_data[axis_id])
new_lim = [
pow(10., (math.log10(lim[0]) - delta)),
pow(10., (math.log10(lim[1]) - delta))
]
except (ValueError, OverflowError):
new_lim = lim # Keep previous limits
else:
logging.warning('Pan not implemented for scale "%s"' % scale)
new_lim = lim
return new_lim
def _pan(self, event):
"""Pan.
Args:
event (event): The event
"""
if event.name == 'button_press_event': # begin pan
self._event = event
elif event.name == 'button_release_event': # end pan
self._event = None
elif event.name == 'motion_notify_event': # pan
if self._event is None:
return
if event.x != self._event.x:
for ax in self._axes[0]:
xlim = self._pan_update_limits(ax, 0, event, self._event)
ax.set_xlim(xlim)
if event.y != self._event.y:
for ax in self._axes[1]:
ylim = self._pan_update_limits(ax, 1, event, self._event)
ax.set_ylim(ylim)
if event.x != self._event.x or event.y != self._event.y:
self._draw()
self._event = event
def _zoom_area(self, event):
"""Zoom
Args:
event (event): The event
"""
if event.name == 'button_press_event': # begin drag
self._event = event
self._patch = _plt.Rectangle(xy=(event.xdata, event.ydata),
width=0,
height=0,
fill=False,
linewidth=1.,
linestyle='solid',
color='black')
self._event.inaxes.add_patch(self._patch)
elif event.name == 'button_release_event': # end drag
self._patch.remove()
del self._patch
if (abs(event.x - self._event.x) < 3 or
abs(event.y - self._event.y) < 3):
return # No zoom when points are too close
x_axes, y_axes = self._axes
for ax in x_axes:
pixel_to_data = ax.transData.inverted()
begin_pt = pixel_to_data.transform_point((event.x, event.y))
end_pt = pixel_to_data.transform_point(
(self._event.x, self._event.y))
min_ = min(begin_pt[0], end_pt[0])
max_ = max(begin_pt[0], end_pt[0])
if not ax.xaxis_inverted():
ax.set_xlim(min_, max_)
else:
ax.set_xlim(max_, min_)
for ax in y_axes:
pixel_to_data = ax.transData.inverted()
begin_pt = pixel_to_data.transform_point((event.x, event.y))
end_pt = pixel_to_data.transform_point(
(self._event.x, self._event.y))
min_ = min(begin_pt[1], end_pt[1])
max_ = max(begin_pt[1], end_pt[1])
if not ax.yaxis_inverted():
ax.set_ylim(min_, max_)
else:
ax.set_ylim(max_, min_)
self._event = None
elif event.name == 'motion_notify_event': # drag
if self._event is None:
return
if event.inaxes != self._event.inaxes:
return # Ignore event outside plot
self._patch.set_width(event.xdata - self._event.xdata)
self._patch.set_height(event.ydata - self._event.ydata)
self._draw()
def _on_mouse_press(self, event):
"""Mouse press event
Args:
event (event): The event
"""
if self._pressed_button is not None:
return # Discard event if a button is already pressed
if event.button in (1, 3): # Start
x_axes, y_axes = self._axes_to_update(event)
if x_axes or y_axes:
self._axes = x_axes, y_axes
self._pressed_button = event.button
if self._pressed_button == 1: # pan
self._pan(event)
if self.options.report_point_position: # check if we want to report point
self._report_point_position(event)
elif self._pressed_button == 3: # zoom area
self._zoom_area(event)
def _on_mouse_release(self, event):
"""Mouse release event
Args:
event (event): The event
"""
if self._pressed_button == event.button:
if self._pressed_button == 1: # pan
self._pan(event)
elif self._pressed_button == 3: # zoom area
self._zoom_area(event)
self._pressed_button = None
def _on_mouse_motion(self, event):
"""Mouse motion event
Args:
event (event): The event
"""
if self._pressed_button == 1: # pan
self._pan(event)
elif self._pressed_button == 3: # zoom area
self._zoom_area(event)
def _report_point_position(self, event):
"""Report point position
Args:
event (event): the event
"""
ix, iy = event.xdata, event.ydata
if hasattr(self, '_ix_iy_old'):
ix_old, iy_old = self._ix_iy_old
else:
ix_old, iy_old = (ix, iy)
self._ix_iy_old = ix, iy
_text = f'(x,y) = ({ix:.4f}, {iy:.4f}) Δ last point ({ix-ix_old:.4f}, {iy-iy_old:.4f})'
if self.logger:
self.logger.info(_text)
if self._statusbar_label:
self._statusbar_label.setText(_text)
#print(_text)
def figure_pz(*args, **kwargs):
"""matplotlib.pyplot.figure with pan and zoom interaction."""
#import warnings
# warnings.filterwarnings(action='ignore')
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fig = _plt.figure(*args, **kwargs)
fig.pan_zoom = PanAndZoom(fig)
# warnings.resetwarnings()
return fig
"""
if __name__ == "__main__":
import matplotlib.pyplot as plt
fig = figure_pz()
# Alternative:
# fig = plt.figure()
# pan_zoom = PanAndZoom(fig)
nrow, ncol = 2, 3
ax1 = fig.add_subplot(nrow, ncol, 1)
ax1.set_title('basic')
ax1.plot((1, 2, 3))
ax2 = fig.add_subplot(nrow, ncol, 2)
ax2.set_title('log + twinx')
ax2.set_yscale('log')
ax2.plot((1, 2, 1))
ax2bis = ax2.twinx()
ax2bis.plot((3, 2, 1), color='red')
ax3 = fig.add_subplot(nrow, ncol, 3)
ax3.set_title('inverted y axis')
ax3.plot((1, 2, 3))
lim = ax3.get_ylim()
ax3.set_ylim(lim[1], lim[0])
ax4 = fig.add_subplot(nrow, ncol, 4)
ax4.set_title('keep ratio')
ax4.axis('equal')
ax4.imshow(numpy.arange(100).reshape(10, 10))
ax5 = fig.add_subplot(nrow, ncol, 5)
ax5.set_xlabel('symlog scale + twiny')
ax5.set_xscale('symlog')
ax5.plot((1, 2, 3))
ax5bis = ax5.twiny()
ax5bis.plot((3, 2, 1), color='red')
# The following is taken from:
# http://matplotlib.org/examples/axes_grid/demo_curvelinear_grid.html
from mpl_toolkits.axisartist import Subplot
from mpl_toolkits.axisartist.grid_helper_curvelinear import \
GridHelperCurveLinear
def tr(x, y): # source (data) to target (rectilinear plot) coordinates
x, y = numpy.asarray(x), numpy.asarray(y)
return x + 0.2 * y, y - x
def inv_tr(x, y):
x, y = numpy.asarray(x), numpy.asarray(y)
return x - 0.2 * y, y + x
grid_helper = GridHelperCurveLinear((tr, inv_tr))
ax6 = Subplot(fig, nrow, ncol, 6, grid_helper=grid_helper)
fig.add_subplot(ax6)
ax6.set_title('non-ortho axes')
xx, yy = tr([3, 6], [5.0, 10.])
ax6.plot(xx, yy)
ax6.set_aspect(1.)
ax6.set_xlim(0, 10.)
ax6.set_ylim(0, 10.)
ax6.axis["t"] = ax6.new_floating_axis(0, 3.)
ax6.axis["t2"] = ax6.new_floating_axis(1, 7.)
ax6.grid(True)
plt.show()
"""
|
python
|
var1 = 'Geeks'
print("Original String :-", var1)
print("Updated String :- ", var1[:5] + 'for' + 'Geeks') # statement 1
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.