content
stringlengths
0
894k
type
stringclasses
2 values
import random import decimal import datetime from dateutil.relativedelta import relativedelta def gen_sale(store_no, store_id, date): # double christmas eve, every other seasonality = 1 if date[5:len(date) - 1] == '12-24': seasonality = 2 elif int(date[5:7]) == 12: seasonality = 1.75 elif int(date[5:7]) == 11: seasonality = 1.5 elif int(date[5:7]) == 10: seasonality = 1.2 amount = '{0:.2f}'.format(random.random() * 20000 * seasonality) try: sale = { 'amount_1': amount, 'amount_2': None, # illustration of multiple different sales numbers for a store 'sales_ticks': [], # Sales by the minute 'store_no': store_no, 'date': date, 'store_id': store_id } except AttributeError: pass return sale def gen_sales(store): sale_records = [] sd = [int(x) for x in store['open_date'].split('-')] start_date = datetime.date(year=sd[0], month=sd[1], day=sd[2]) if store['close_date'] is None: close_date = datetime.date.today().isoformat() else: close_date = store['close_date'] ed = [int(x) for x in close_date.split('-')] end_date = datetime.date(year=ed[0], month=ed[1], day=ed[2]) while start_date <= end_date: sale_record = gen_sale(store['store_no'], store['_id'], start_date.isoformat()) sale_records.append(sale_record) start_date += datetime.timedelta(days=1) return sale_records # This method when passed the genesis of the business will create collection names for every month from then until today. def gen_sales_collections(start_date_str): labels = [] sd = [int(x) for x in start_date_str.split('-')] start_date = datetime.date(year=sd[0], month=sd[1], day=1) today = datetime.date.today() today = today - datetime.timedelta(days=(today.day -1)) while start_date <= today: label = 'sales_{0}_{1}'.format(start_date.year, start_date.month) labels.append(label) start_date += relativedelta(months=1) return labels def determine_sales_collections(start_date_str, end_date_str): labels = [] sd = [int(x) for x in start_date_str.split('-')] start_date = datetime.date(year=sd[0], month=sd[1], day=1) ed = [int(x) for x in end_date_str.split('-')] end_date = datetime.date(year=ed[0], month=ed[1], day=1) while start_date <= end_date: label = 'sales_{0}_{1}'.format(start_date.year, start_date.month) labels.append(label) start_date += relativedelta(months=1) return labels def add_store_id_to_sales(stores): pass
python
"""https://open.kattis.com/problems/kornislav""" nums = list(map(int, input().split())) nums.remove(max(nums)) print(min(nums) * max(nums))
python
import os from random import shuffle ########### input ######## b=10 raw_data = 'yahoo_raw_train' userwise_data = 'yahoo_userwise_train_split%d'%b ########################### fr = open(raw_data,'r') nr = int(fr.readline()) for i in range(b): f=open('raw%d'%i,'w') if i == b-1: tt = nr - i*(nr/b) f.write('%d\n'%(tt)) for j in range(tt): line = fr.readline() l = line.split(',') u = int(l[0]) v = int(l[1]) r = float(l[2]) f.write('%d,%d,%f\n'%(u,v,r)) else: f.write('%d\n'%(nr/b)) for j in range(nr/b): line = fr.readline() l = line.split(',') u = int(l[0]) v = int(l[1]) r = float(l[2]) f.write('%d,%d,%f\n'%(u,v,r)) f.close() fr.close() print 'split raw done\n' for i in range(b): data = [] f=open('raw%d'%i,'r') fw=open('raw_shuffle%d'%i,'w') f.readline() lines = f.readlines() for line in lines: t = line.split(',') u = int(t[0]) v = int(t[1]) r = float(t[2]) data.append((u,v,r)) shuffle(data) shuffle(data) fw.write('%d\n'%len(data)) for d in data: fw.write('%d,%d,%f\n'%(d[0],d[1],d[2])) f.close() fw.close() print 'shuffle done\n' fl = [] fn = [] for i in range(b): f=open('raw_shuffle%d'%i,'r') nn = int(f.readline()) fn.append(nn) fl.append(f) fw=open(userwise_data, 'w') for i in range(b): du={} for j in range(b): if i == b-1: for k in range(fn[j]/b+fn[j]%b): li = fl[j].readline().split(',') u=int(li[0]) v=int(li[1]) r=float(li[2]) if u in du: du[u].append((v,r)) else: du[u]=[] du[u].append((v,r)) else: for k in range(fn[j]/b): li = fl[j].readline().split(',') u=int(li[0]) v=int(li[1]) r=float(li[2]) if u in du: du[u].append((v,r)) else: du[u]=[] du[u].append((v,r)) for u in du: fw.write('%d:\n'%u) for (v,r) in du[u]: fw.write('%d,%f\n'%(v,r)) for i in range(b): fl[i].close() fw.close()
python
class Solution: def singleNumber(self, nums): res = 0 # Exploit associative property of XOR and XORing the same number creates 0 for i in nums: res ^= i return res z = Solution() nums = [4, 2, 1, 2, 1] print(z.singleNumber(nums))
python
from typing import Dict, Optional, Tuple import uuid import pandas as pd from tqdm import tqdm_notebook def flatten_df( df: pd.DataFrame, i: int = 0, columns_map: Optional[Dict[str, str]] = None, p_bar: Optional[tqdm_notebook] = None, ) -> Tuple[pd.DataFrame, Dict[str, str]]: """Expand lists and dicts to new columns named after list element number or dict key and containing respective cell values. If new name conflicts with an existing column, a short hash is used. Almost as fast as json_normalize but supports lists. Args: df: a dataframe to expand i: start index of columns slice, since there's no need to iterate twice over completely expanded column columns_map: a dict with old name references {new_name: old} p_bar: a progress bar Returns: A flat dataframe with new columns from expanded lists and dicts and a columns map dict with old name references {new_name: old} Examples: >>> df = pd.DataFrame({"links": [[{"im": "http://www.im.com/illinoi"}, ... {"ITW website": "http://www.itw.com"}]]}) >>> flat_df, cols_map = flatten_df(df) >>> flat_df links_0_im links_1_ITW website 0 http://www.im.com/illinoi http://www.itw.com >>> cols_map {'links_0_im': 'links', 'links_1_ITW website': 'links'} """ if not columns_map: columns_map = {} if not p_bar: p_bar = tqdm_notebook( total=len(df.columns), desc="Flattening df", unit="columns" ) for c in df.columns[i:]: flattened_columns = expand_column(df, c) if flattened_columns.empty: i += 1 p_bar.update(1) continue def name_column(x): new_name = f"{c}_{x}" if new_name in df.columns: new_name = f"{c}_{uuid.uuid1().hex[:5]}" if c in columns_map: columns_map[new_name] = columns_map[c] else: columns_map[new_name] = c return new_name flattened_columns = flattened_columns.rename(columns=name_column) df = pd.concat([df[:], flattened_columns[:]], axis=1).drop(c, axis=1) columns_map.pop(c, None) p_bar.total = len(df.columns) return flatten_df(df, i, columns_map, p_bar) return df, columns_map def expand_column(df: pd.DataFrame, column: str) -> pd.DataFrame: mask = df[column].map(lambda x: (isinstance(x, list) or isinstance(x, dict))) collection_column = df[mask][column] return collection_column.apply(pd.Series)
python
# -*- coding: utf-8 -*- import uuid import scrapy from scrapy import Selector from GAN_data.items import GanDataItem class UmeiSpider(scrapy.Spider): name = 'umei' # allowed_domains = ['https://www.umei.cc/tags/meinv_1.htm'] start_urls = ['https://www.umei.cc/tags/meinv_1.htm'] def parse(self, response): for src in Selector(response).xpath("//div[@class='TypeList']/ul/li/a/@href").extract(): yield scrapy.Request(src, callback=self.parse_img_link) if response.xpath("//div[@class='NewPages']/ul/li/a[text()='下一页']/@href").extract(): next_page = response.xpath("//div[@class='NewPages']/ul/li/a[text()='下一页']/@href").get() yield scrapy.Request(response.urljoin(next_page), callback=self.parse) # 爬取具体的图片链接 def parse_img_link(self, response): item = GanDataItem() img_link = Selector(response).xpath("//div[@class='ImageBody']/p/a/img/@src").get() item['name'] = str(uuid.uuid4()).replace("-", "")+'.jpg' item['src'] = img_link yield item if response.xpath("//div[@class='NewPages']/ul/li/a[text()='下一页']/@href").get() != "#": next_img = response.xpath("//div[@class='NewPages']/ul/li/a[text()='下一页']/@href").get() yield scrapy.Request(response.urljoin(next_img), callback=self.parse_img_link)
python
from supervised_gym.experience import ExperienceReplay, DataCollector from supervised_gym.models import * # SimpleCNN, SimpleLSTM from supervised_gym.recorders import Recorder from supervised_gym.utils.utils import try_key from torch.optim import Adam, RMSprop from torch.optim.lr_scheduler import ReduceLROnPlateau from torch.nn import CrossEntropyLoss import torch import numpy as np import time from tqdm import tqdm if torch.cuda.is_available(): DEVICE = torch.device("cuda:0") else: DEVICE = torch.device("cpu") def train(rank, hyps, verbose=False): """ This is the main training function. Argue a set of hyperparameters and this function will train a model to solve an openai gym task given an AI oracle. Args: rank: int the index of the distributed training system. hyps: dict a dict of hyperparams keys: str vals: object verbose: bool determines if the function should print status updates """ # Set random seeds hyps['seed'] = try_key(hyps,'seed', int(time.time())) torch.manual_seed(hyps["seed"]) np.random.seed(hyps["seed"]) # Initialize Data Collector and Begin Collecting Data # DataCollector's Initializer does Important changes to hyps data_collector = DataCollector(hyps) data_collector.dispatch_runners() # Initialize model model = globals()[hyps["model_type"]](**hyps) model.to(DEVICE) # Record experiment settings recorder = Recorder(hyps, model) # initialize trainer trainer = Trainer(hyps, model, recorder, verbose=verbose) # Loop training n_epochs = hyps["n_epochs"] if hyps["exp_name"] == "test": n_epochs = 2 hyps["n_eval_steps"] = 1000 for epoch in range(n_epochs): if verbose: print() print("Starting Epoch", epoch, "--", hyps["save_folder"]) # Run environments, automatically fills experience replay's # shared_exp tensors time_start = time.time() data_collector.await_runners() if verbose: print("Data Collection:", time.time()-time_start) trainer.train(model, data_collector.exp_replay) data_collector.dispatch_runners() if verbose: print("\nValidating") for val_sample in tqdm(range(hyps["n_val_samples"])): trainer.validate(epoch, model, data_collector) trainer.end_epoch(epoch) data_collector.terminate_runners() trainer.end_training() class Trainer: """ This class handles the training of the model. """ def __init__(self, hyps, model, recorder, verbose=True): """ Args: hyps: dict keys: str vals: object model: torch.Module recorder: Recorder an object for recording the details of the experiment verbose: bool if true, some functions will print updates to the console """ self.hyps = hyps self.model = model self.recorder = recorder self.verbose = verbose self.set_optimizer_and_scheduler( self.model, self.hyps["optim_type"], self.hyps["lr"] ) self.loss_fxn = globals()[self.hyps["loss_fxn"]]() def set_optimizer_and_scheduler(self, model, optim_type, lr, *args, **kwargs): """ Initializes an optimizer using the model parameters and the hyperparameters. Also sets a scheduler for the optimizer's learning rate. Args: model: Model or torch.Module any object that implements a `.parameters()` member function that returns a sequence of torch.Parameters optim_type: str (one of [Adam, RMSprop]) the type of optimizer. lr: float the learning rate Returns: optim: torch optimizer the model optimizer """ self.optim = globals()[optim_type]( list(model.parameters()), lr=lr ) self.scheduler = ReduceLROnPlateau( self.optim, mode='min', factor=try_key(self.hyps,"factor", 0.5), patience=try_key(self.hyps, "patience", 5), threshold=try_key(self.hyps, "threshold", 0.01), verbose=self.verbose ) def reset_model(self, model, batch_size): """ Determines what type of reset to do. If the data is provided in a random order, the model is simply reset. If, however, the data is provided in sequence, we must store the h value from the first forward loop in the last training loop. """ if self.hyps["randomize_order"]: model.reset(batch_size=batch_size) else: model.reset_to_step(step=1) def train(self, model, data_iter): """ This function handles the actual training. It loops through the available data from the experience replay to train the model. Args: model: torch.Module the model to be trained data_iter: iterable an iterable of the collected experience/data. each iteration must return a dict of data with the keys: obs: torch Float Tensor (N, S, C, H, W) actns: torch Long Tensor (N,S) dones: torch Long Tensor (N,S) n_targs: None or torch LongTensor (N,S) The iter must also implement the __len__ member so that the data can be easily looped through. """ if torch.cuda.is_available(): torch.cuda.empty_cache() model.train() model.reset(self.hyps['batch_size']) for i,data in enumerate(data_iter): iter_start = time.time() self.optim.zero_grad() obs = data['obs'] actns = data['actns'].to(DEVICE) dones = data["dones"] self.reset_model(model, len(obs)) # model uses dones if it is recurrent logits = model(obs.to(DEVICE), dones.to(DEVICE)) loss = self.loss_fxn( logits.reshape(-1, logits.shape[-1]), actns.flatten() ) # Backprop and update loss.backward() self.optim.step() # Calc acc categs = None if "n_targs" not in data else data["n_targs"] accs = self.calc_accs( # accs is a dict of floats logits=logits, targs=actns, categories=categs, prepender="train" ) # Record metrics metrics = { "train_loss": loss.item(), **accs} self.recorder.track_loop(metrics) self.print_loop( i, len(data_iter), loss.item(), accs["train_acc"], iter_start ) if self.hyps["exp_name"] == "test" and i >= 2: break self.scheduler.step( np.mean(self.recorder.metrics["train_loss"]) ) def calc_accs(self, logits, targs, categories=None, prepender=""): """ Calculates the average accuracy over the batch for each possible category Args: logits: torch float tensor (B, N, K) the model predictions. the last dimension must be the same number of dimensions as possible target values. targs: torch long tensor (B, N) the targets for the predictions categories: torch long tensor (B, N) or None if None, this value is ignored. Otherwise it specifies categories for accuracy calculations. prepender: str a string to prepend to all keys in the accs dict Returns: accs: dict keys: str total: float the average accuracy over all categories <categories_type_n>: float the average accuracy over this particular category. for example, if one of the categories is named 1, the key will be "1" and the value will be the average accuracy over that particular category. """ logits = logits.reshape(-1, logits.shape[-1]) argmaxes = torch.argmax(logits, dim=-1).squeeze() targs = targs.reshape(-1) acc = (argmaxes.long()==targs.long()).float().mean() accs = { prepender + "_acc": acc.item() } if type(categories) == torch.Tensor: # (B, N) categories = categories.reshape(-1).data.long() cats = {*categories.numpy()} for cat in cats: argmxs = argmaxes[categories==cat] trgs = targs[categories==cat] acc = (argmxs.long()==trgs.long()).float().mean() accs[prepender+"_acc_"+str(cat)] = acc.item() return accs def print_loop(self, loop_count, max_loops, loss, acc, iter_start): """ Printing statement for inner loop in the epoch. Args: loop_count: int the current loop max_loops: int the number of loops in the epoch loss: float the calculated loss acc: float the calculated accuracy iter_start: float a timestamp collected at the start of the loop """ s = "Loss:{:.5f} | Acc:{:.5f} | {:.0f}% | t:{:.2f}" s = s.format( loss, acc, loop_count/max_loops*100, time.time()-iter_start ) print(s, end=len(s)//4*" " + "\r") def validate(self, epoch, model, data_collector): """ Validates the performance of the model directly on an environment. Steps the learning rate scheduler based on the performance of the model. Args: runner: ValidationRunner """ if torch.cuda.is_available(): torch.cuda.empty_cache() # run model directly on an environment with torch.no_grad(): # Returned tensors are mainly of shape (n_eval_steps,) model.reset(batch_size=1) eval_data = data_collector.val_runner.rollout( model, n_tsteps=self.hyps["n_eval_steps"], n_eps=self.hyps["n_eval_eps"] ) # Calc Loss logits = eval_data["logits"] # already CUDA (N, K) targs = eval_data["targs"].to(DEVICE) # (N,) n_targs = eval_data["n_targs"] # (N,) or None loss = self.loss_fxn(logits, targs) # Calc Acc accs = self.calc_accs( # accs is a dict logits, targs, n_targs, prepender="val" ) eval_eps = self.hyps["n_eval_eps"] eval_steps = self.hyps["n_eval_steps"] divisor = eval_eps if eval_steps is None else eval_steps avg_rew = eval_data["rews"].sum()/divisor metrics = { "val_loss": loss.item(), "val_rew": avg_rew.item(), **accs } # Extra metrics if using gordongames variant if "gordongames" in self.hyps["env_type"]: keys = ["n_items", "n_targs", "n_aligned"] dones = eval_data["dones"].reshape(-1) inpts = {key: eval_data[key].reshape(-1) for key in keys} inpts = {key: val[dones==1] for key,val in inpts.items()} targ_accs = self.calc_targ_accs( **inpts, prepender="val" ) metrics = {**metrics, **targ_accs} inpts = {k:v.cpu().data.numpy() for k,v in inpts.items()} inpts["epoch"] = [ epoch for i in range(len(inpts["n_items"])) ] self.recorder.to_df(**inpts) self.recorder.track_loop(metrics) def calc_targ_accs(self, n_targs, n_items, n_aligned, prepender="val", **kwargs ): """ Calculates the accuracy of the episodes with regards to matching the correct number of objects. Args: n_targs: ndarray or long tensor (N,) Collects the number of targets in the episode only relevant if using a gordongames environment variant n_items: ndarray or long tensor (N,) Collects the number of items over the course of the episode. only relevant if using a gordongames environment variant n_aligned: ndarray or long tensor (N,) Collects the number of items that are aligned with targets over the course of the episode. only relevant if using a gordongames environment variant prepender: str a simple string prepended to each key in the returned dict Returns: metrics: dict keys: str "error": float the difference between the number of target objects and the number of item objects "coef_of_var": float the coefficient of variation. The avg error divided by the goal size "stddev": float the standard deviation of the n_item responses. "mean_resp": float the mean response of the n_item responses. """ fxns = { "error": calc_error, "coef_of_var": coef_of_var, "stddev": stddev, "mean_resp": mean_resp, } metrics = dict() if type(n_targs) == torch.Tensor: n_targs = n_targs.detach().cpu().numpy() if type(n_items) == torch.Tensor: n_items = n_items.detach().cpu().numpy() if type(n_aligned) == torch.Tensor: n_aligned = n_aligned.detach().cpu().numpy() inpts = { "n_items": n_items, "n_targs": n_targs, "n_aligned":n_aligned, } categories = set(n_targs.astype(np.int)) for key,fxn in fxns.items(): metrics[prepender+"_"+ key] = fxn(**inpts) # Calc for each specific target count for cat in categories: targs = n_targs[n_targs==cat] items = n_items[n_targs==cat] aligned = n_aligned[n_targs==cat] if len(targs)==0 or len(items)==0 or len(aligned)==0: continue metrics[prepender+"_"+key+"_"+str(cat)] = fxn( n_items=items, n_targs=targs, n_aligned=aligned, ) return metrics def end_epoch(self, epoch): """ Records, prints, cleans up the epoch statistics. Call this function at the end of the epoch. Args: epoch: int the epoch that has just finished. """ self.recorder.save_epoch_stats( epoch, self.model, self.optim, verbose=self.verbose ) self.recorder.reset_stats() def end_training(self): """ Perform all cleanup actions here. Mainly recording the best metrics. """ pass def mean_resp(n_items, **kwargs): """ Args: n_items: ndarray (same dims as n_targs) Returns: mean: float the standard deviation of the responses """ return n_items.mean() def stddev(n_items, **kwargs): """ Args: n_items: ndarray (same dims as n_targs) Returns: std: float the standard deviation of the responses """ return n_items.std() def calc_error(n_items, n_targs, **kwargs): """ The square root of the mean squared distance between n_items and n_targs. Args: n_items: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: error: float the square root of the average squared distance from the goal. """ return np.sqrt(((n_items-n_targs)**2).mean()) def coef_of_var(n_items, n_targs, **kwargs): """ Returns the coefficient of variation which is the error divided by the average n_targs Args: n_items: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: coef_var: float the error divided by the average n_targs """ return n_items.std()/n_targs.mean() def perc_aligned(n_aligned, n_targs, **kwargs): """ Calculates the percent of items that are aligned Args: n_aligned: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_aligned) Returns: perc: float the average percent aligned over all entries """ perc = n_aligned/n_targs return perc.mean()*100 def perc_unaligned(n_items, n_aligned, n_targs, **kwargs): """ Calculates the percent of items that are unaligned Args: n_items: ndarray (same dims as n_targs) n_aligned: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: perc: float the average percent unaligned over all entries """ perc = (n_items-n_aligned)/n_targs return perc.mean()*100 def perc_over(n_items, n_targs, **kwargs): """ Calculates the average proportion in which the number of items was greater than the number of targets. If the number of items was less than or equal to the number of targets, that entry is counted as 0% Args: n_items: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: perc: float the average amount of items over the number of targets """ n_items = n_items.copy() n_items[n_items<n_targs] = n_targs[n_items<n_targs] perc = (n_items-n_targs)/n_targs return perc.mean()*100 def perc_under(n_items, n_targs, **kwargs): """ Calculates the average proportion in which the number of items was less than the number of targets. If the number of items was greater than or equal to the number of targets, that entry is counted as 0% Args: n_items: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: perc: float the average amount of items less than the number of targets """ n_items = n_items.copy() n_items[n_items>n_targs] = n_targs[n_items>n_targs] perc = (n_targs-n_items)/n_targs return perc.mean()*100 def perc_off(n_items, n_targs, **kwargs): """ Calculates the average proportion in which the number of items was different than the number of targets. Args: n_items: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_items) Returns: perc: float the average amount of items different than the number of targets """ perc = torch.abs(n_targs-n_items)/n_targs return perc.mean()*100 def perc_correct(n_aligned, n_targs, **kwargs): """ Calculates the average proportion in which the number of aligned items is equal to the number of targets. Args: n_aligned: ndarray (same dims as n_targs) n_targs: ndarray (same dims as n_aligned) Returns: perc: float the average number of entries in which the number of aligned items is equal to the number of targets. """ perc = (n_aligned == n_targs) return perc.mean()*100
python
""" This module contains tools for handling evaluation specifications. """ import warnings from operator import itemgetter from ruamel.yaml import YAML from panoptic_parts.utils.utils import ( _sparse_ids_mapping_to_dense_ids_mapping as dict_to_numpy, parse__sid_pid2eid__v2) from panoptic_parts.specs.dataset_spec import DatasetSpec class PartPQEvalSpec(object): """ This class creates an evaluation specification from a YAML specification file and provides convenient attributes from the specification and useful functions. Moreover, it provides defaults and specification checking. """ def __init__(self, spec_path): """ Args: spec_path: a YAML evaluation specification """ with open(spec_path) as fd: espec = YAML().load(fd) self._spec_version = espec['version'] self._dspec = DatasetSpec(espec['dataset_spec_path']) self.ignore_label = espec['ignore_label'] # Dataset ids -> evaluation ids self.dataset_sid_pid2eval_sid_pid = espec['dataset_sid_pid2eval_sid_pid'] self.dataset_sid2eval_sid = espec['dataset_sid2eval_sid'] # Evaluation scene+part ids -> Evaluation flat part ids (for 'flat' part segmentation) self.eval_sid_pid2eval_pid_flat = espec['eval_sid_pid2eval_pid_flat'] # Evaluation ids -> Labels self.eval_sid2scene_label = espec['eval_sid2scene_label'] self.eval_pid_flat2scene_part_label = espec['eval_pid_flat2scene_part_label'] # Get all valid evaluation sid and sid_pids eval_sid_total = set(self.dataset_sid2eval_sid.values()) eval_sid_total.remove('IGNORED') self.eval_sid_total = list(eval_sid_total) eval_sid_pid_total = set(self.dataset_sid_pid2eval_sid_pid.values()) eval_sid_pid_total.remove('IGNORED') self.eval_sid_pid_total = list(eval_sid_pid_total) assert max(self.eval_sid_total) <= 99, "sid should not be larger than 99_99" assert max(self.eval_sid_pid_total) <= 9999, "sid_pid should not be larger than 99_99" # NEW: self.eval_sid_things = espec['eval_sid_things'] self.eval_sid_stuff = espec['eval_sid_stuff'] self.eval_sid_parts = espec['eval_sid_parts'] self.eval_sid_no_parts = espec['eval_sid_no_parts'] eval_sid_total_th_st = list(set(self.eval_sid_things + self.eval_sid_stuff)) eval_sid_total_p_np = list(set(self.eval_sid_parts + self.eval_sid_no_parts)) if not set(eval_sid_total_p_np) == set(eval_sid_total): raise ValueError('The defined set of scene classes with and without parts' 'is not equal to the total set of scene categories.') if not set(eval_sid_total_th_st) == set(eval_sid_total): raise ValueError('The defined set of things and stuff scene classes ' 'is not equal to the total set of scene categories.') self._extract_useful_attributes() def _extract_useful_attributes(self): self.dataset_spec = self._dspec sids_eval2pids_eval = dict() for class_key in self.eval_sid_pid_total: class_id = class_key // 100 if class_id in sids_eval2pids_eval.keys(): if class_key % 100 not in sids_eval2pids_eval[class_id]: sids_eval2pids_eval[class_id].append(class_key % 100) else: sids_eval2pids_eval[class_id] = [class_key % 100] for class_key in self.eval_sid_pid_total: scene_id = class_key // 100 part_id = class_key % 100 assert part_id != self.ignore_label, \ "part-level class cannot be the same as ignore label: {}".format(self.ignore_label) assert part_id != 0, "part-level class cannot be 0. sid_pid: {}".format(class_key) assert part_id >= 0, "part-level class cannot be a negative number: {}".format(part_id) assert part_id <= 99, "part-level class cannot be larger than 99: {}".format(part_id) assert scene_id != self.ignore_label, \ "scene-level class cannot be the same as ignore label: {}".format(self.ignore_label) assert scene_id != 0, "scene-level class cannot be 0. sid_pid: {}".format(class_key) assert scene_id >= 0, "scene-level class cannot be a negative number: {}".format(scene_id) assert scene_id <= 99, "scene-level class cannot be larger than 99: {}".format(scene_id) cat_definition = dict() cat_definition['num_cats'] = len(self.eval_sid_total) cat_definition['cat_def'] = list() for sid in self.eval_sid_total: cat_def = dict() cat_def['sem_cls'] = [sid] if sid in self.eval_sid_parts: if sid in sids_eval2pids_eval.keys(): if len(sids_eval2pids_eval[sid]) > 1: cat_def['parts_cls'] = sids_eval2pids_eval[sid] else: # TODO(daan): make sure this is the behavior we want raise ValueError("Semantic category {} only has 1 part id defined in the EvalSpec: {}, " "so in our format it is not treated as a class with parts. " "In the EvalSpec, remove it as a class with parts.".format(sid, sids_eval2pids_eval[sid])) else: raise ValueError("Semantic category {} has no part ids defined in the EvalSpec, " "so it cannot be treated as a class with parts. " "In the EvalSpec, remove it as a class with parts.".format(sid)) else: cat_def['parts_cls'] = [1] if sid in sids_eval2pids_eval.keys(): if len(sids_eval2pids_eval[sid]) > 1: warnings.warn("Note: Semantic category {} will be treated as a class without parts according to EvalSpec, " "even though there are {} parts defined for it.".format(sid, len(sids_eval2pids_eval[sid])), Warning) cat_definition['cat_def'].append(cat_def) self.cat_definition = cat_definition class SegmentationPartsEvalSpec(object): """ This class creates an evaluation specification from a YAML specification file and provides convenient attributes from the specification and useful functions. Moreover, it provides defaults and specification checking. Accessible specification attributes: - dataset_spec: the associated dataset specification - Nclasses: the number of evaluated classes (including ignored and background) - scene_part_classes: list of str, the names of the scene-part classes for evaluation, ordered by the eval id - eid_ignore: the eval_id to be ignored in evaluation - sid_pid2eval_id: dict, maps all sid_pid (0-99_99) to an eval_id, according to the template in specification yaml - sp2e_np: np.ndarray, shape: (10000,), sid_pid2eval_id as an array for dense gathering, position i has the sid_pid2eval_id[i] value Member functions: - """ def __init__(self, spec_path): """ Args: spec_path: a YAML evaluation specification """ with open(spec_path) as fd: espec = YAML().load(fd) self._spec_version = espec['version'] self.sid_pid2eid__template = espec['sid_pid2eid__template'] self.eval_id2scene_part_class = espec['eval_id2scene_part_class'] self._dspec = DatasetSpec(espec['dataset_spec_path']) self._extract_useful_attributes() def _extract_useful_attributes(self): self.dataset_spec = self._dspec self.sid_pid2eval_id = parse__sid_pid2eid__v2(self.sid_pid2eid__template) # TODO(panos): here we assume that IGNORE eval_id exists and is the max eval_id self.eid_ignore = max(self.sid_pid2eval_id.values()) self.sp2e_np = dict_to_numpy(self.sid_pid2eval_id, self.eid_ignore) self.scene_part_classes = list( map(itemgetter(1), sorted(self.eval_id2scene_part_class.items()))) self.Nclasses = len(self.scene_part_classes)
python
from typing import Tuple import numpy as np from tensorflow import Tensor from decompose.distributions.distribution import Distribution from decompose.distributions.normal import Normal from decompose.distributions.product import Product class NormalNormal(Product): def fromUnordered(self, d0: Distribution, d1: Distribution) -> Normal: if isinstance(d0, Normal) and isinstance(d1, Normal): return(self.product(d0, d1)) else: raise ValueError("Expecting Normal and Normal") def product(self, n0: Normal, n1: Normal) -> Normal: mu = self.mu(n0, n1) tau = self.tau(n0, n1) otherParams = self.productParams(n0, n1) pd = Normal(mu=mu, tau=tau, **otherParams) return(pd) def mu(self, n0, n1) -> Tensor: mu0, tau0 = n0.mu, n0.tau mu1, tau1 = n1.mu, n1.tau tau = self.tau(n0, n1) mu = (mu0*tau0 + mu1*tau1)/tau return(mu) def tau(self, n0, n1) -> Tensor: tau = n0.tau + n1.tau return(tau)
python
import spacy from spacy.lang.en.stop_words import STOP_WORDS from string import punctuation from heapq import nlargest class Summarizer: def __init__(self): print("Summarizer is being initiallized...") def summarize(self, text): # test1 = inputField.get('1.0', tk.END) #test2 = numField.get() #print(test1) #print(test2) stopwords = list(STOP_WORDS) # document1 ="""Machine learning (ML) is the scientific study of algorithms and statistical models that computer systems use to progressively improve their performance on a specific task. Machine learning algorithms build a mathematical model of sample data, known as "training data", in order to make predictions or decisions without being explicitly programmed to perform the task. Machine learning algorithms are used in the applications of email filtering, detection of network intruders, and computer vision, where it is infeasible to develop an algorithm of specific instructions for performing the task. Machine learning is closely related to computational statistics, which focuses on making predictions using computers. The study of mathematical optimization delivers methods, theory and application domains to the field of machine learning. Data mining is a field of study within machine learning, and focuses on exploratory data analysis through unsupervised learning.In its application across business problems, machine learning is also referred to as predictive analytics.""" document1 = text nlp = spacy.load('en_core_web_sm') docx = nlp(document1) mytokens = [token.text for token in docx] word_frequencies = {} for word in docx: if word.text not in stopwords: if word.text not in word_frequencies.keys(): word_frequencies[word.text] = 1 else: word_frequencies[word.text] += 1 maximum_frequency = max(word_frequencies.values()) for word in word_frequencies.keys(): word_frequencies[word] = (word_frequencies[word]/maximum_frequency) #print(word_frequencies) sentence_list = [ sentence for sentence in docx.sents ] sentence_scores = {} for sent in sentence_list: for word in sent: if word.text.lower() in word_frequencies.keys(): if len(sent.text.split(' ')) < 30: if sent not in sentence_scores.keys(): sentence_scores[sent] = word_frequencies[word.text.lower()] else: sentence_scores[sent] += word_frequencies[word.text.lower()] #print(sentence_scores) summarized_sentences = nlargest(5, sentence_scores, key=sentence_scores.get) #print(summarized_sentences) final_sentences = [ w.text for w in summarized_sentences ] summary = ' '.join(final_sentences) print("---------") print(document1) print("---------") #print(summary) return summary
python
from django.shortcuts import render, redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token from rest_framework.decorators import api_view, permission_classes from rest_framework.permissions import AllowAny from rest_framework.status import ( HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, HTTP_200_OK ) from rest_framework.response import Response from rest_framework.views import APIView from django.contrib.auth import authenticate from .models import Nutrient, Record, Symptomrecord, Diseaserecord, Foodrecord, Foodlist, Selfcarediary from .serializers import NutrientsSerializer from rest_framework.views import APIView from rest_framework import permissions, status import infermedica_api # import Symp from .serializers import SelfcarediarySerializer import requests,json infermedica_api.configure(app_id='945555e1', app_key='be2ee424c225c567086a084637a359de') def home(request): if request.user.is_authenticated(): return render(request, 'drug/home.html',{}) return redirect('accounts/login') def loginpage(request): return render(request, 'drug/login.html', {}) def search(symptom): api = infermedica_api.get_api() data = api.search(symptom["orth"]) return data def nutrients(request): if request.user.is_authenticated(): return render(request, 'drug/nutrients.html', {}) return redirect('accounts/login') def selfdiary(request): if request.user.is_authenticated(): return render(request, 'drug/selfdiary.html', {}) return redirect('accounts/login') def analytics(request): if request.user.is_authenticated(): return render(request, 'drug/analytics.html', {}) return redirect('accounts/login') class Prescription(APIView): @csrf_exempt def post(self,request): medicname = request.data.get("text") # import pdb; pdb.set_trace() data = requests.get("https://api.fda.gov/drug/label.json?search="+medicname).json() return Response(data, status=status.HTTP_200_OK) def medication(request): if request.user.is_authenticated(): return render(request, 'drug/medication.html', {}) return redirect('accounts/login.html') class ParseD(APIView): @csrf_exempt def post(self,request): sentence = request.data.get("text") dbrow = Record(user=request.user,search_query=sentence) dbrow.save() api = infermedica_api.get_api() response = api.parse(sentence).to_dict()["mentions"] mysymptomlist = [] templist = {} print("reached templist") for data in response: templist["orth"] = data["orth"] templist["id"] = data["id"] mysymptomlist.append(templist.copy()) finalsearchdata = [] print("reached finalserach") for symptom in mysymptomlist: callsearchdata = api.search(symptom['orth']) finalsearchdata.extend(callsearchdata) finaldict = {} print("conversion") for dictdata in finalsearchdata: finaldict[dictdata['label']] = dictdata['id'] symprow = Symptomrecord(user_record=dbrow,present_symptoms=dictdata['label'],present_symptoms_id=dictdata['id']) symprow.save() return Response(finaldict, status=status.HTTP_200_OK) class Condition(APIView): @csrf_exempt def post(self, request): api = infermedica_api.API(app_id='945555e1', app_key='be2ee424c225c567086a084637a359de') # r = infermedica_api.Diagnosis(app_id='945555e1', app_key='be2ee424c225c567086a084637a359de') data = api.conditions_list() # r = requests.post(url, data=json.dumps({'text': text}),headers={'Authorization': apiKey, 'Content-Type': 'application/json'}) return Response({"test":data}, status=status.HTTP_200_OK) # class Search(APIView): class Diagnosis(APIView): @csrf_exempt def post(self,request): try: present_symptoms = request.data.getlist('choices[]') absent_symptoms = request.data.getlist('unchoices[]') except AttributeError: present_symptoms = request.data.get('choices') absent_symptoms = request.data.get('unchoices') query_text = request.data.get('queryText') recordobject = Record.objects.get(user=request.user,search_query=query_text) api = infermedica_api.get_api() re = infermedica_api.Diagnosis(sex=request.data.get("gender"), age=request.data.get("age")) for symptom in present_symptoms: re.add_symptom(symptom, 'present') for symptom in absent_symptoms: re.add_symptom(symptom, 'absent') re= api.diagnosis(re).to_dict() for dictdata in re['conditions']: diseaseobject = Diseaserecord(user_record=recordobject, probable_diseases=dictdata['name'], probable_diseases_id=dictdata['id']) diseaseobject.save() return Response({"test":re}, status=status.HTTP_200_OK) # call diagnosis class Symptom(APIView): @csrf_exempt def post(self,request): api = infermedica_api.get_api() response = api.parse(sentence).to_dict()["mentions"] # import pdb; pdb.set_trace() mysymptomlist = {} for data in response: mysymptomlist["orth"] = data["orth"] mysymptomlist["id"] = data["id"] data.append(api.symptom_details(mysymptomlist["id"])) return Response({"test":data},status=status.HTTP_200_OK) # @csrf_exempt # @api_view(["POST"]) # @permission_classes((AllowAny,)) # def login(request): # username = request.data.get("username") # password = request.data.get("password") # if username is None or password is None: # return Response({'error': 'Please provide both username and password'}, # status=HTTP_400_BAD_REQUEST) # user = authenticate(username=username, password=password) # if not user: # return Response({'error': 'Invalid Credentials'}, # status=HTTP_404_NOT_FOUND) # token, restdetails = Token.objects.get_or_create(user=user) # return Response({'token': token.key, "hasuraid": user.id}, # status=HTTP_200_OK) # @csrf_exempt # @api_view(["GET"]) # def sample_api(request): # data = {'sample_data': 123} # return Response(data, status=HTTP_200_OK) class HeartRateApi(APIView): @csrf_exempt def get(self, request): try: heartrate = HeartRate.objects.all() hserializer = HeartRateSerializer(heartrate) heartrate_data = hserializer.data return Response(heartrate_data, status=status.HTTP_200_OK) except: return Response({'success': False, 'message': 'No details found for given date'}, status=status.HTTP_400_BAD_REQUEST) @csrf_exempt def post(self, request, user): request_data = request.data.copy() request_data['user'] = user singleroomaval = request_data.get('singleroomaval','') doubleroomaval = request_data.get('doubleroomaval','') if singleroomaval != '': if int(singleroomaval) > 5 or int(singleroomaval) < 0: return Response({"success": False,"message": "Availability must be between 0 and 5."}, status=status.HTTP_400_BAD_REQUEST) if doubleroomaval != '': if int(doubleroomaval) > 5 or int(doubleroomaval) < 0: return Response({"success": False,"message": "Availability must be between 0 and 5."}, status=status.HTTP_400_BAD_REQUEST) try: booking = Booking.objects.get(date=datebooking) bserializer = BookingSerializer(booking, data=request_data, partial=True) except: bserializer = BookingSerializer(data=request_data) if bserializer.is_valid(): bserializer.save() return Response(bserializer.data, status=status.HTTP_200_OK) return Response(bserializer.errors, status=status.HTTP_400_BAD_REQUEST) class NutrientsApi(APIView): @csrf_exempt def get(self, request): try: nutrients = Nutrient.objects.all() nserializer = NutrientsSerializer(nutrients) nutrient_data = nserializer.data return Response(nutrient_data, status=status.HTTP_200_OK) except: return Response({'success': False, 'message': 'No details found for given date'}, status=status.HTTP_400_BAD_REQUEST) @csrf_exempt def post(self, request): request_data = request.data.copy() request_data["user"] = request.user.pk mealval = request_data.get('meal') data = { "query":mealval, "timezone": "US/Eastern" } result = requests.post('https://trackapi.nutritionix.com/v2/natural/nutrients', data, headers={"x-app-id":"94f5edb6","x-app-key":"8bb3ae712275e9810ceec3b583e2727d"}) calories = 0 fat = 0 sugar = 0 protein = 0 carbs = 0 vita = 0 vitb = 0 vitc = 0 vitd = 0 vite = 0 foodlist = "" for fooditem in result.json()["foods"]: foodlist += fooditem["food_name"]+"; " calories+=fooditem["nf_calories"] fat+=fooditem["nf_total_fat"] sugar+=fooditem["nf_sugars"] protein+=fooditem["nf_protein"] carbs+=fooditem["nf_total_carbohydrate"] nutlist = fooditem["full_nutrients"] vita+=nutlist[22]["value"]+nutlist[24]["value"] vitb+=nutlist[38]["value"]+nutlist[40]["value"] vitc+=nutlist[33]["value"] vitd+=nutlist[29]["value"] vite+=nutlist[27]["value"] foodrecord = Foodrecord(user=request.user,search_query=mealval,calories=calories,fat=fat,sugars=sugar,protein=protein,carbohydrates=carbs,vitamina=vita,vitaminbcomplex=vitb,vitaminc=vitc,vitamind=vitd,vitamine=vite) foodrecord.save() for fooditem in result.json()["foods"]: foodlistobj = Foodlist(food_record=foodrecord,food_item=fooditem["food_name"]) foodlistobj.save() response = { "foodlist":foodlist, "calories":calories, "fat":fat, "sugars":sugar, "protein":protein, "carbohydrates":carbs, "vitamina":vita, "vitaminbcomplex":vitb, "vitaminc":vitc, "vitamind":vitd, "vitamine":vite } # nserializer = NutrientsSerializer(data=request.data) # if nserializer.is_valid(): # nserializer.save() return Response(response, status=status.HTTP_200_OK) # return Response(nserializer.errors, status=status.HTTP_400_BAD_REQUEST) class SelfdiaryApi(APIView): def post(self, request): request_data = request.data.copy() request_data["user"] = request.user.pk sserializer = SelfcarediarySerializer(data=request_data) if sserializer.is_valid(): sserializer.save() return Response(sserializer.data, status=status.HTTP_200_OK) return Response(sserializer.errors, status=status.HTTP_400_BAD_REQUEST) def get(self, request): try: selfdiary = Selfcarediary.objects.filter(user=request.user) resplist = [] for qset in selfdiary: resplist.append({"diary":qset.diary,"date":qset.date}) return Response({"data":resplist}, status=status.HTTP_200_OK) except: return Response({"success": False}, status=status.HTTP_400_BAD_REQUEST)
python
import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D class HelicalGenerator(): def __init__(self, start_pos, des_pos, # total_time, dt, z_max=0.01, start_vel=[0,0,0], des_vel=[0,0,0], m=1): # self.theta = 0 self.x = 0 self.y = 0 self.z = 0 # self.dt = dt # self.z_max = z_max # self.r = self.theta # self.total_time = total_time self.x1 = start_pos[0] self.y1 = start_pos[1] self.z1 = start_pos[2] self.x2 = des_pos[0] self.y2 = des_pos[1] self.z2 = des_pos[2] self.start_x_vel = start_vel[0] self.start_y_vel = start_vel[1] self.start_z_vel = start_vel[2] self.des_x_vel = des_vel[0] self.des_y_vel = des_vel[1] self.des_z_vel = des_vel[2] self.d = np.sqrt((self.x1 - self.x2)**2 + (self.y1 - self.y2)**2) self.t0 = np.tan((self.y2 - self.y1)/(self.x1 - self.x2)) self.rev = 1 self.m = m def helical_traj(self, t): # self.theta = t # self.r = self.theta/30 # self.x = 1.25 * self.r*np.cos(self.theta) # self.y = 1.25 * self.r*np.sin(self.theta) # self.z = 0.2 + self.z_max*self.theta self.x = self.x1 + self.m * t * self.d * np.cos(2 * np.pi * self.rev * t + self.t0) self.y = self.y1 + self.m * t * self.d * np.sin(2 * np.pi * self.rev * t + self.t0) self.z = self.z1 + t * (self.z2 - self.z1) def calculate_position(self, c, t): self.helical_traj(t) if c == 0: return self.x if c == 1: return self.y if c == 2: return self.z def calculate_velocity(self, c, t): if c == 0: return (self.m * t*self.d * -np.sin(2*np.pi*self.rev*t+self.t0)*(2*np.pi*self.rev)) + \ (np.cos(2*np.pi*self.rev*t+self.t0) * self.m * self.d) if c == 1: return (self.m * t*self.d * np.cos(2*np.pi*self.rev*t+self.t0)*(2*np.pi*self.rev)) + \ (np.sin(2*np.pi*self.rev*t+self.t0) * self.m * self.d) if c == 2: return self.z2 - self.z1 # def helical_getVel(self): # self.x = self.r*np.cos(self.theta) # self.y = self.r*np.sin(self.theta) # self.z = self.z_max*self.theta # def helical_getTraj(self, t): # theta = np.radians(np.linspace(180, 180*2.5, int(self.total_time/self.dt))) # return self.helical_traj(theta[t]) if __name__ == "__main__": x_2 = [] y_2 = [] z_2 = [] x_v = [] y_v = [] z_v = [] # hell = TrajectoryGenerator() # ax = plt.axes(projection='3d') # theta = np.radians(np.linspace(180,180*2.5,1000)) # for xx in theta: # hell.helical_traj(xx) # x_2.append(hell.x) # y_2.append(hell.y) # z_2.append(hell.z) # hell = HelicalGenerator(1, 0.001) # ax = plt.axes(projection='3d') # for xx in np.arange(1000): # hell.helical_getTraj(xx) # x_2.append(hell.x) # y_2.append(hell.y) # z_2.append(hell.z) import os print(os.getcwd()) import sys sys.path.append("../") sys.path.append("./ConcentricTubeRobot/") from CurvatureController import UzController from CTR_model import CTRobotModel, plot_3D no_of_tubes = 3 # ONLY WORKS FOR 3 TUBES for now initial_q = [-0.2858, -0.2025, -0.0945, 0, 0, 0] tubes_length = 1e-3 * np.array([431, 332, 174]) # length of tubes curve_length = 1e-3 * np.array([103, 113, 134]) # length of the curved part of tubes Uzdt = 0.1 # physical parameters E = np.array([ 6.4359738368e+10, 5.2548578304e+10, 4.7163091968e+10]) # E stiffness J = 1.0e-11 * np.array([0.0120, 0.0653, 0.1686]) # J second moment of inertia I = 1.0e-12 * np.array([0.0601, 0.3267, 0.8432]) # I inertia G = np.array([2.5091302912e+10, 2.1467424256e+10, 2.9788923392e+10] ) # G torsion constant Ux = np.array([21.3, 13.108, 3.5]) # constant U curvature vectors for each tubes Uy = np.array([0, 0, 0]) ctr = CTRobotModel(no_of_tubes, tubes_length, curve_length, initial_q, E, J, I, G, Ux, Uy) ctr_model = lambda q,uz:ctr.moving_CTR(q,uz) model = lambda q,uz:UzController(q,uz, dt=Uzdt, model=ctr_model).Uz_controlled_model() ax = plt.axes(projection='3d') a_ans = (2*np.pi)/4 start_pos = [0, 0, 0.05] q_start = np.array([0.0101, 0.0101, 0.0101, -a_ans, -a_ans, -a_ans]) # a_ans, a_ans, a_ans uz_0 = np.array([0.0, 0.0, 0.0]) (r1,r2,r3,Uz) = model(q_start, uz_0) plot_3D(ax, r1, r2, r3) start_pos = r1[-1] print(start_pos) des_pos = [0.145, -0.145, 0.145] hell = HelicalGenerator(start_pos, des_pos, m=0.3) # ax = plt.axes(projection='3d') for xx in np.linspace(0,1,100): hell.helical_traj(xx) x_2.append(hell.x) y_2.append(hell.y) z_2.append(hell.z) x_v.append(hell.calculate_velocity(0, xx)) y_v.append(hell.calculate_velocity(1, xx)) z_v.append(hell.calculate_velocity(2, xx)) ax.plot3D(x_2, y_2, z_2) ax.scatter(x_2[-1], y_2[-1], z_2[-1], label='({:03f},{:03f},{:03f})'.format(x_2[-1], y_2[-1], z_2[-1])) ax.legend() ax.set_xlabel('x') ax.set_ylabel('y') ax.set_zlabel('z ') plt.subplots(1) tt = np.arange(0, 1, 0.01) plt.plot(tt, x_v, label='x') plt.plot(tt, y_v, label='y') plt.plot(tt, z_v, label='z') plt.title('xyz velocity') plt.legend() plt.show()
python
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import math import numpy as np import paddle import paddle.nn as nn import paddle.nn.functional as F __all__ = ['ProtoTypical', 'AMSoftmaxLoss', 'CMSoftmax'] class AMSoftmaxLoss(nn.Layer): """Additive margin softmax loss. Additive margin softmax loss is usefully for training neural networks for speaker recognition/verification. Notes: The loss itself contains parameters that need to pass to optimizer for gradient descends. References: Wang, Feng, et al. “Additive Margin Softmax for Face Verification.” IEEE Signal Processing Letters, vol. 25, no. 7, 2018, pp. 926–930. """ def __init__(self, feature_dim: int, n_classes: int, eps: float = 1e-5, margin: float = 0.3, scale: float = 30.0): super(AMSoftmaxLoss, self).__init__() self.w = paddle.create_parameter((feature_dim, n_classes), 'float32') self.eps = eps self.scale = scale self.margin = margin self.nll_loss = nn.NLLLoss() self.n_classes = n_classes def forward(self, logits, label): logits = F.normalize(logits, p=2, axis=1, epsilon=self.eps) wn = F.normalize(self.w, p=2, axis=0, epsilon=self.eps) cosine = paddle.matmul(logits, wn) y = paddle.zeros((logits.shape[0], self.n_classes)) for i in range(logits.shape[0]): y[i, label[i]] = self.margin pred = F.log_softmax((cosine - y) * self.scale, -1) return self.nll_loss(pred, label), pred class ProtoTypical(nn.Layer): """Proto-typical loss as described in [1]. Reference: [1] Chung, Joon Son, et al. “In Defence of Metric Learning for Speaker Recognition.” Interspeech 2020, 2020, pp. 2977–2981. """ def __init__(self, s=20.0, eps=1e-8): super(ProtoTypical, self).__init__() self.nll_loss = nn.NLLLoss() self.eps = eps self.s = s def forward(self, logits): assert logits.ndim == 3, ( f'the input logits must be a ' + f'3d tensor of shape [n_spk,n_uttns,emb_dim],' + f'but received logits.ndim = {logits.ndim}') import pdb pdb.set_trace() logits = F.normalize(logits, p=2, axis=-1, epsilon=self.eps) proto = paddle.mean(logits[:, 1:, :], axis=1, keepdim=False).transpose( (1, 0)) # [emb_dim, n_spk] query = logits[:, 0, :] # [n_spk, emb_dim] similarity = paddle.matmul(query, proto) * self.s #[n_spk,n_spk] label = paddle.arange(0, similarity.shape[0]) log_sim = F.log_softmax(similarity, -1) return self.nll_loss(log_sim, label), log_sim class AngularMargin(nn.Layer): def __init__(self, margin=0.0, scale=1.0): super(AngularMargin, self).__init__() self.margin = margin self.scale = scale def forward(self, outputs, targets): outputs = outputs - self.margin * targets return self.scale * outputs class LogSoftmaxWrapper(nn.Layer): def __init__(self, loss_fn): super(LogSoftmaxWrapper, self).__init__() self.loss_fn = loss_fn self.criterion = paddle.nn.KLDivLoss(reduction="sum") def forward(self, outputs, targets, length=None): targets = F.one_hot(targets, outputs.shape[1]) try: predictions = self.loss_fn(outputs, targets) except TypeError: predictions = self.loss_fn(outputs) predictions = F.log_softmax(predictions, axis=1) loss = self.criterion(predictions, targets) / targets.sum() return loss class AdditiveAngularMargin(AngularMargin): def __init__(self, margin=0.0, scale=1.0, feature_dim=256, n_classes=1000, easy_margin=False): super(AdditiveAngularMargin, self).__init__(margin, scale) self.easy_margin = easy_margin self.w = paddle.create_parameter((feature_dim, n_classes), 'float32') self.cos_m = math.cos(self.margin) self.sin_m = math.sin(self.margin) self.th = math.cos(math.pi - self.margin) self.mm = math.sin(math.pi - self.margin) * self.margin self.nll_loss = nn.NLLLoss() self.n_classes = n_classes def forward(self, logits, targets): # logits = self.drop(logits) logits = F.normalize(logits, p=2, axis=1, epsilon=1e-8) wn = F.normalize(self.w, p=2, axis=0, epsilon=1e-8) cosine = logits @ wn #cosine = outputs.astype('float32') sine = paddle.sqrt(1.0 - paddle.square(cosine)) phi = cosine * self.cos_m - sine * self.sin_m # cos(theta + m) if self.easy_margin: phi = paddle.where(cosine > 0, phi, cosine) else: phi = paddle.where(cosine > self.th, phi, cosine - self.mm) target_one_hot = F.one_hot(targets, self.n_classes) outputs = (target_one_hot * phi) + ((1.0 - target_one_hot) * cosine) outputs = self.scale * outputs pred = F.log_softmax(outputs, axis=-1) return self.nll_loss(pred, targets), pred class CMSoftmax(AngularMargin): def __init__(self, margin=0.0, margin2=0.0, scale=1.0, feature_dim=256, n_classes=1000, easy_margin=False): super(CMSoftmax, self).__init__(margin, scale) self.easy_margin = easy_margin self.w = paddle.create_parameter((feature_dim, n_classes), 'float32') self.cos_m = math.cos(self.margin) self.sin_m = math.sin(self.margin) self.th = math.cos(math.pi - self.margin) self.mm = math.sin(math.pi - self.margin) * self.margin self.nll_loss = nn.NLLLoss() self.n_classes = n_classes self.margin2 = margin2 def forward(self, logits, targets): logits = F.normalize(logits, p=2, axis=1, epsilon=1e-8) wn = F.normalize(self.w, p=2, axis=0, epsilon=1e-8) cosine = logits @ wn sine = paddle.sqrt(1.0 - paddle.square(cosine)) phi = cosine * self.cos_m - sine * self.sin_m # cos(theta + m) if self.easy_margin: phi = paddle.where(cosine > 0, phi, cosine) else: phi = paddle.where(cosine > self.th, phi, cosine - self.mm) target_one_hot = F.one_hot(targets, self.n_classes) outputs = (target_one_hot * phi) + ( (1.0 - target_one_hot) * cosine) - target_one_hot * self.margin2 outputs = self.scale * outputs pred = F.log_softmax(outputs, axis=-1) return self.nll_loss(pred, targets), pred
python
from enum import Enum from pydantic import BaseModel class DeleteBookResponseStatus(Enum): """status codes for deleting a book""" success = "book deleted" borrowed = "book still borrowed" fail = "book not deleted" class DeleteBookResponseModel(BaseModel): """""" status: DeleteBookResponseStatus
python
import string """ - Atividade de Logica para Computação. - Autores: Paulo Henrique Diniz de Lima Alencar, Yan Rodrigues e Alysson Lucas Pinheiro. - Professor: Alexandre Arruda. """ # Alphabet atoms = list(string.ascii_lowercase) operatores = ["#", ">", "&", "-"] delimiters = ["(", ")"] # Removing blank spaces def format(formula: str) -> str: return formula.replace(" ", "") # Looking for minors issues def lexer_analyzer(formula: str) -> tuple: open_p = close_p = 0 if len(formula) == 1 and formula[0] in operatores: return False elif len(formula) == 1 and formula[0] in delimiters: return False tokens = [] paranteses_state = True for i in formula: if i == "(": open_p += 1 if i == ")": close_p += 1 tokens.append(i) if open_p != close_p: paranteses_state = False return False, paranteses_state flag = False # flag is a var to check if there's an atoms in formula for token in tokens: if token in atoms: flag = True if not token in atoms and not token in operatores and not token in delimiters: return False, paranteses_state return flag, paranteses_state # Check majors issues def verify(curr: str, next: str, prev="-") -> bool: if curr == "-": if prev in atoms: return False if not (next in atoms or next == "-" or next == "("): return False elif curr == "(": if not (next in atoms or next == "(" or next == "-"): return False elif curr in atoms: if not (next in operatores or next == ")"): return False elif curr in operatores: if not (next in atoms or next == "(" or next == "-"): return False else: if not (next == ")" or next in operatores): return False return True def semantic_analyzer(formula: str) -> bool: formula = format(formula) response, paranteses_state = lexer_analyzer(formula) state = True if response: if formula[-1] in operatores: # if there's an operator in last position return False if paranteses_state == False: if formula[0] == "(": if formula[-1] != ")": return False else: if formula[-1] == ")": return False for i in range(0, len(formula) - 1): if i == 0: state = verify(formula[i], formula[i + 1]) else: state = verify(formula[i], formula[i + 1], formula[i - 1]) if state == False: break return state else: return False def is_formula(formula: str) -> bool: return semantic_analyzer(formula)
python
from typing import Tuple, Union import pygame from pygame_gui.core.colour_gradient import ColourGradient from pygame_gui.core.ui_font_dictionary import UIFontDictionary from pygame_gui.core.utility import render_white_text_alpha_black_bg, apply_colour_to_surface from pygame_gui.elements.text.html_parser import CharStyle class StyledChunk: """ Takes care of turning styling and some ordinary text into a rendered pygame Surface of the text in an appropriate style. :param font_size: The size of the font to use. :param font_name: The name of the font to use. :param chunk: The chunk of normal string text we are styling. :param style: The bold/italic/underline style of the text. :param colour: The colour or gradient of the text. :param bg_colour: The colour or gradient of the text background. :param is_link: True if the chunk is a link. :param link_href: The target of the link if it is one. :param link_style: The style for link text. :param position: Surface position of this chunk of text. :param font_dictionary: The UI's font dictionary where all loaded fonts are stored. """ def __init__(self, font_size: int, font_name: str, chunk: str, style: CharStyle, colour: Union[pygame.Color, ColourGradient], bg_colour: Union[pygame.Color, ColourGradient], is_link: bool, link_href: str, link_style: CharStyle, position: Tuple[int, int], font_dictionary: UIFontDictionary): self.style = style self.chunk = chunk self.font_size = font_size self.font_name = font_name self.is_link = is_link self.link_href = link_href self.link_style = link_style self.font = font_dictionary.find_font(font_size, font_name, self.style.bold, self.style.italic) if self.is_link: self.normal_colour = self.link_style['link_text'] self.hover_colour = self.link_style['link_hover'] self.selected_colour = self.link_style['link_selected'] self.link_normal_underline = self.link_style['link_normal_underline'] self.link_hover_underline = self.link_style['link_hover_underline'] else: self.normal_colour = colour self.hover_colour = None self.selected_colour = None self.link_normal_underline = False self.link_hover_underline = False self.colour = self.normal_colour self.bg_colour = bg_colour self.position = position self.is_hovered = False self.is_selected = False if self.style.underline or (self.is_hovered and self.link_hover_underline) or \ (self.link_normal_underline and not self.is_hovered): self.font.set_underline(True) if len(self.chunk) > 0: if not isinstance(self.colour, ColourGradient): if isinstance(self.bg_colour, ColourGradient) or self.bg_colour.a != 255: self.rendered_chunk = render_white_text_alpha_black_bg(self.font, self.chunk) apply_colour_to_surface(self.colour, self.rendered_chunk) else: self.rendered_chunk = self.font.render(self.chunk, True, self.colour, self.bg_colour).convert_alpha() else: self.rendered_chunk = render_white_text_alpha_black_bg(self.font, self.chunk) self.colour.apply_gradient_to_surface(self.rendered_chunk) else: self.rendered_chunk = pygame.surface.Surface((0, 0), flags=pygame.SRCALPHA, depth=32) metrics = self.font.metrics(self.chunk) self.ascent = self.font.get_ascent() self.width = self.font.size(self.chunk)[0] self.height = self.font.size(self.chunk)[1] self.advance = 0 for i in range(len(self.chunk)): if len(metrics[i]) == 5: self.advance += metrics[i][4] self.rect = pygame.Rect(self.position, (self.width, self.height)) self.metrics_changed_after_redraw = False self.unset_underline_style() def unset_underline_style(self): """ Un-sets the underline style. This is a function we have to call on our loaded font before rendering. """ self.font.set_underline(False) def redraw(self): """ Renders the 'chunk' text to the 'rendered_chunk' surface. """ if self.style.underline or (self.is_hovered and self.link_hover_underline) or \ (self.link_normal_underline and not self.is_hovered): self.font.set_underline(True) if len(self.chunk) > 0: if isinstance(self.colour, ColourGradient): self.rendered_chunk = render_white_text_alpha_black_bg(self.font, self.chunk) self.colour.apply_gradient_to_surface(self.rendered_chunk) else: if isinstance(self.bg_colour, ColourGradient) or self.bg_colour.a != 255: self.rendered_chunk = render_white_text_alpha_black_bg(self.font, self.chunk) apply_colour_to_surface(self.colour, self.rendered_chunk) else: self.rendered_chunk = self.font.render(self.chunk, True, self.colour, self.bg_colour).convert_alpha() else: self.rendered_chunk = pygame.surface.Surface((0, 0), flags=pygame.SRCALPHA, depth=32) self.font.set_underline(False) new_metrics = self.font.metrics(self.chunk) new_ascent = self.font.get_ascent() new_width = self.font.size(self.chunk)[0] new_height = self.font.size(self.chunk)[1] new_advance = sum(new_metrics[i][4] for i in range(len(self.chunk)) if len(new_metrics[i]) == 5) if (new_ascent == self.ascent and new_width == self.width and new_height == self.height and new_advance == self.advance): self.metrics_changed_after_redraw = False else: self.metrics_changed_after_redraw = True self.ascent = new_ascent self.width = new_width self.height = new_height self.advance = new_advance self.rect = pygame.Rect(self.position, (self.width, self.height)) def on_hovered(self): """ Handles hovering over this text chunk with the mouse. Used for links. """ if not self.is_selected: self.colour = self.hover_colour self.is_hovered = True self.redraw() def on_unhovered(self): """ Handles hovering over this text chunk with the mouse. Used for links. """ if not self.is_selected: self.colour = self.normal_colour self.is_hovered = False self.redraw() def on_selected(self): """ Handles clicking on this text chunk with the mouse. Used for links. TODO: Should this be set_active/set_inactive? To be internally consistent with buttons. """ self.colour = self.selected_colour self.is_selected = True self.redraw() def on_unselected(self): """ Handles clicking on this text chunk with the mouse. Used for links. """ self.colour = self.normal_colour self.is_selected = False self.redraw()
python
import os import sys sys.path.append('..') sys.path.append('.') import mitogen VERSION = '%s.%s.%s' % mitogen.__version__ author = u'Network Genomics' copyright = u'2021, the Mitogen authors' exclude_patterns = ['_build', '.venv'] extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinxcontrib.programoutput', 'domainrefs'] # get rid of version from <title>, it messes with piwik html_title = 'Mitogen Documentation' html_show_copyright = False html_show_sourcelink = False html_show_sphinx = False html_sidebars = {'**': ['globaltoc.html', 'github.html']} html_additional_pages = {'ansible': 'ansible.html'} html_static_path = ['_static'] html_theme = 'alabaster' html_theme_options = { 'font_family': "Georgia, serif", 'head_font_family': "Georgia, serif", 'fixed_sidebar': True, 'show_powered_by': False, 'pink_2': 'fffafaf', 'pink_1': '#fff0f0', } htmlhelp_basename = 'mitogendoc' intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} language = None master_doc = 'toc' project = u'Mitogen' pygments_style = 'sphinx' release = VERSION source_suffix = '.rst' templates_path = ['_templates'] todo_include_todos = False version = VERSION domainrefs = { 'gh:commit': { 'text': '%s', 'url': 'https://github.com/dw/mitogen/commit/%s', }, 'gh:issue': { 'text': '#%s', 'url': 'https://github.com/dw/mitogen/issues/%s', }, 'gh:pull': { 'text': '#%s', 'url': 'https://github.com/dw/mitogen/pull/%s', }, 'ans:mod': { 'text': '%s module', 'url': 'https://docs.ansible.com/ansible/latest/modules/%s_module.html', }, 'ans:conn': { 'text': '%s connection plug-in', 'url': 'https://docs.ansible.com/ansible/latest/plugins/connection/%s.html', }, 'freebsd:man2': { 'text': '%s(2)', 'url': 'https://www.freebsd.org/cgi/man.cgi?query=%s', }, 'linux:man1': { 'text': '%s(1)', 'url': 'http://man7.org/linux/man-pages/man1/%s.1.html', }, 'linux:man2': { 'text': '%s(2)', 'url': 'http://man7.org/linux/man-pages/man2/%s.2.html', }, 'linux:man3': { 'text': '%s(3)', 'url': 'http://man7.org/linux/man-pages/man3/%s.3.html', }, 'linux:man7': { 'text': '%s(7)', 'url': 'http://man7.org/linux/man-pages/man7/%s.7.html', }, } rst_epilog = """ .. |mitogen_version| replace:: %(VERSION)s .. |mitogen_url| replace:: `mitogen-%(VERSION)s.tar.gz <https://networkgenomics.com/try/mitogen-%(VERSION)s.tar.gz>`__ """ % locals()
python
from ..definitions.method import MethodDefinition from ..definitions.outputparameter import OutputParameterDefinition from .method import ServiceMethod class ServiceOutputParameter(object): def __call__(self, name, convertType=None, many=False, optional=False, page=False, per_page=None): def decorator(func): if name: _name = name else: _name = func.__name__ if not hasattr(func, ServiceMethod.PARAM): methodDefinition = MethodDefinition(func) setattr(func, ServiceMethod.PARAM, methodDefinition) else: methodDefinition = getattr(func, ServiceMethod.PARAM) parameter = OutputParameterDefinition(name, convertType, many, optional, page, per_page) methodDefinition.outputs.append(parameter) return func return decorator # The parameters are just for intellisense def __init__(self, name='', convertType=None, many=False, optional=False, page=False, per_page=None): return
python
# -*- coding: utf-8 -*- import pdb import argparse import sys as sys import logging as logging import time as time import oneapi as oneapi import oneapi.models as models import oneapi.utils as mod_utils logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') parser = argparse.ArgumentParser() parser.add_argument("-s", "--server", help="Address of the server (default=https://oneapi.infobip.com)") parser.add_argument("username", help="Login") parser.add_argument("password", help="Password") parser.add_argument("address", help="Destination address") parser.add_argument("-p", "--port", help="local port for delivery notification") parser.add_argument("-d", "--data_format", help="Type of data used in request, can be url or json (default=url)") parser.add_argument("-a", "--accept", help="Type of data used for response, can be url or json (default=url)") parser.add_argument("-l", "--is_legacy", help="Support pre 2013 OMA specifications for URI", action='store_true') args = parser.parse_args() data_format = "url" if args.data_format: if (args.data_format == "json"): data_format = "json" port = 7090 if args.port: port = int(args.port) header = None if 'accept' in locals(): if args.accept: header = {"accept" : args.accept} # example:initialize-sms-client sms_client = oneapi.SmsClient(args.username, args.password, args.server) # ---------------------------------------------------------------------------------------------------- # example:prepare-message-without-notify-url sms = models.SMSRequest() sms.address = args.address sms.notify_url = 'http://{}:{}'.format('localhost', port) sms.callback_data = 'Any string' sms.filter_criteria = "py_test_"+mod_utils.get_random_alphanumeric_string() # ---------------------------------------------------------------------------------------------------- # example:send-message result = sms_client.subscribe_messages_sent_notification(sms, header, data_format, args.is_legacy) # store client correlator because we can later query for the delivery status with it: resource_url = result.resource_url # ---------------------------------------------------------------------------------------------------- if not result.is_success(): print 'Error sending message:', result.exception sys.exit(1) print 'Is success = ', result.is_success() print 'Resource URL = ', result.resource_url server = dummyserver.DummyWebWerver(port) server.start_wait_and_shutdown(15) requests = server.get_requests() if not requests: print 'No requests received' sys.exit(1) for method, path, http_body in requests: inbound_notif = oneapi.SmsClient.unserialize_inbound_message(http_body) print inbound_notif #Few seconds later we can delete the subscription time.sleep(10) sms_client = oneapi.SmsClient(args.username, args.password, args.server) sms_client.delete_messages_sent_subscription(resource_url) # ----------------------------------------------------------------------------------------------------
python
from ixnetwork_restpy.base import Base from ixnetwork_restpy.files import Files class IPv6_Encapsulation_Header(Base): __slots__ = () _SDM_NAME = 'ipv6Encapsulation' _SDM_ATT_MAP = { 'Security Paramaters Index': 'ipv6Encapsulation.header.spi', 'Sequence Number': 'ipv6Encapsulation.header.sequenceNumber', } def __init__(self, parent): super(IPv6_Encapsulation_Header, self).__init__(parent) @property def Security_Paramaters_Index(self): from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Security Paramaters Index'])) @property def Sequence_Number(self): from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Sequence Number'])) def add(self): return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
python
# -*- coding: utf-8 -*- """spear2sc.spear_utils: Utitlity methods to read SPEAR files""" def process_line(line): """ (list of str) -> list of list of float Parses line, a line of time, frequency and amplitude data output by SPEAR in the 'text - partials' format. Returns a list of timepoints. Each timepoint is a list of floats in the form: [<time in s>, <frequency in Hz>, <amplitude 0.0-1.0>] >>> process_line('0.145 443.309723 0.112565 0.1575 443.597656 0.124895') [[0.145, 443.309723, 0.112565], [0.1575, 443.597656, 0.124895]] """ partial = [] split_line = line.strip().split() while len(split_line) > 0: time_point = [] for i in range(3): item = float(split_line.pop(0)) time_point.append(item) partial.append(time_point) return pad_duration(partial) index_time = 0 index_freq = 1 index_amp = 2 def get_durations(partial): """Converts partial's absolute time offsets into durations Note, that the size of duration's list is one element smaller than partial's entry count. :param partial: Sound partial, [<time in s>, <frequency in Hz>, <amplitude 0.0-1.0>] :type partial: list :return: A list of partial's duration, e.g. partial's time envelope :rtype: list """ res = [] for x in range(1, len(partial)): res.append((partial[x][index_time] - partial[x - 1][index_time])) return res def pad_duration(partial): """Pads the envelope of the partial if it has a time offset Auxiliary node added to the envelope to smooth the transition. Coefficients are empirical :param partial: :type partial: list :return: :rtype: list """ offset = partial[0][index_time] if offset > 0: next_node = partial[1] pad_node = [[0, 0, 0], [offset * 0.99, 0, 0], [offset * 0.999, next_node[index_freq] * 0.9, next_node[index_amp] * 0.9]] padded_partial = pad_node + partial return padded_partial return partial
python
import threading import os import json import time from rsa import sign from server import server_start from client import send from var import my_id if os.name == "nt": os.system("cls") else: os.system("clear") if os.sys.argv[1] == "server": server = threading.Thread(target = server_start()) server.start() else: while True: participate = input("who do you want to send it to?") action = input("what do you want to do?") if action == "hello": msg = ['!HELLO!'] elif action == "ip": ask_ip = input("who's ip?") msg = ['?IP?', f'{ask_ip}'] elif action == "msg": msg = f"[{input('msg:')}]" print(send(1, f'["ID", "{my_id}"]', eval(f"['{int(time.time())}','{my_id}', ['!CAST!', {participate.split(' ')}, ['{int(time.time())}','{my_id}', {msg}]]]")))
python
from allennlp_dataframe_mapper.transforms.base import RegistrableTransform # NOQA from allennlp_dataframe_mapper.transforms.hash_name import HashName # NOQA from allennlp_dataframe_mapper.transforms.preprocessing import ( # NOQA FlattenTransformer, LabelEncoder, Logarithmer, MinMaxScaler, StandardScaler, )
python
##Classes for future implementation def stats_player(name_player, data_df): condition = data_df["Player" == name_player] df_single_player = data_df[condition] class Player(): def __init__(self, three_shot, two_shot, one_shot): self.three_shot = three_shot self.two_shot = two_shot self.one_shot = one_shot
python
# Calculando a raiz quadrada de um número. n = 81 ** (1/2) print(f'A raiz quadrada de 81 é {n}')
python
import cmsisdsp as dsp import numpy as np import cmsisdsp.fixedpoint as f import cmsisdsp.mfcc as mfcc import scipy.signal as sig from mfccdebugdata import * from cmsisdsp.datatype import Q31 import cmsisdsp.datatype as dt mfccq31=dsp.arm_mfcc_instance_q31() sample_rate = 16000 FFTSize = 256 numOfDctOutputs = 13 freq_min = 64 freq_high = sample_rate / 2 numOfMelFilters = 20 windowQ31 = dt.convert(sig.hamming(FFTSize, sym=False),Q31) filtLen,filtPos,packedFiltersQ31 = mfcc.melFilterMatrix(Q31,freq_min, freq_high, numOfMelFilters,sample_rate,FFTSize) dctMatrixFiltersQ31 = mfcc.dctMatrix(Q31,numOfDctOutputs, numOfMelFilters) status=dsp.arm_mfcc_init_q31(mfccq31,FFTSize,numOfMelFilters,numOfDctOutputs, dctMatrixFiltersQ31, filtPos,filtLen,packedFiltersQ31,windowQ31) print("Init status = %d" % status) tmp=np.zeros(2*FFTSize,dtype=np.int32) debugQ31 = f.toQ31(debug) errorStatus,resQ31=dsp.arm_mfcc_q31(mfccq31,debugQ31,tmp) print("MFCC status = %d" % errorStatus) res=(1<<8)*f.Q31toF32(resQ31) print(res) print(ref) print("FFT Length = %d" % mfccq31.fftLen()) print("Nb MEL Filters = %d" % mfccq31.nbMelFilters()) print("Nb DCT Outputs = %d" % mfccq31.nbDctOutputs())
python
import os import pandas as pd import matplotlib.pyplot as plt def get_speedup(precision: str, df1: pd.DataFrame, df2: pd.DataFrame, sys: str, dev: str) -> list: speedup = [{} for x in range(2, 11)] d1: pd.DataFrame = df1.copy() d2: pd.DataFrame = df2.copy() d1 = d1[d1['precision'] == precision] d2 = d2[d2['precision'] == precision] dimensions = ['5000x38', '16063x280', '3602x5888', '8555x5177', '54675x1973'] dataset_tags = ['ALL-AML', 'Lung', 'TCGA', 'GTEX', 'ExpO'] k = [x for x in range(2, 11)] for i, d in enumerate(dimensions): res1 = d1[d1['dimension'] == d].sort_values(by='k') res2 = d2[d2['dimension'] == d].sort_values(by='k') t1 = res1['time'].tolist() t2 = res2['time'].tolist() if len(t1) != len(t2): print(f'ERORR: {sys} in {dev} with size of {d}') continue for j, t in enumerate(t1): speedup[j][dataset_tags[i]] = t2[j]/t1[j] return pd.DataFrame(speedup, index=k) if __name__ == '__main__': in_path: str = os.path.join('.', 'datawarehouse', 'system', 'system_times.csv') df = pd.read_csv(in_path, header=0) base = df[df['device']=='base_code'] fig_system=['lab', 'lab', 'devcloud', 'devcloud', 'devcloud_dual', 'lab_hybrid', \ 'devcloud_openmp', 'devcloud_openmp', 'lab_openmp', 'lab_openmp'] fig_dev=['cpu', 'igpu', 'cpu', 'igpu', 'dual_gpu', 'hybrid', 'cpu', 'gpu', 'cpu', 'gpu'] title = ['Intel Core i7-10700 (oneAPI)', 'Intel UHD 630 (oneAPI)', \ 'Intel i9-10920X (oneAPI)', 'Intel Iris Xe DG1 (oneAPI)', \ 'Dual (Intel Iris Xe DG1)', 'i7-10700 + UHD 630', 'Intel i9-10920X (OpenMP)', \ 'Intel Iris Xe DG1 (OpenMP)', 'Intel Core i7-10700 (OpenMP)', 'Intel UHD 630 (OpenMP)'] for i in range(len(fig_system)): sys = fig_system[i] base_sys = base[base['system'] == sys] dev = fig_dev[i] test = df[df['system']==sys] test = test[test['device']==dev] speedup = get_speedup('simple', test, base_sys, sys, dev) fig, ax = plt.subplots() speedup.plot( kind='bar', figsize=(10,10), color = ['#2196f3', '#ef553b', '#00cc96', '#636efa', '#ffa15a'], width=0.8, linewidth=10, ecolor='blue', ax = ax ) ax.legend(loc='upper center', ncol=2, prop={"size":25}) ax.grid(linestyle='-', color='#B0BEC5') ax.set_ylim(0,4) plt.title(title[i], loc='center', fontsize=40) plt.ylabel('Speedup', fontsize=30) ax.xaxis.label.set_size(30) ax.ticklabel_format(axis='y', style='sci', scilimits=(-3, 3), useOffset=False) ax.tick_params(axis='both', which='major', labelsize=25) ax.plot([-0.5, 8.5], [1, 1], 'black', linestyle='dashed', linewidth=3) # Linea de speedup 1 fig.savefig('speedup_'+sys+'_'+dev+'.png', format='png')
python
from typing import TypedDict, Optional class IMeasInfo(TypedDict): file_tag: str entity_tag: str metric_name: str time_offset_hrs_mins: str address: str aggregation_strategy: Optional[str] equation: Optional[str]
python
import re import subprocess import sys import time import traceback import uuid from collections import namedtuple from PySide2.QtCore import (QObject, QRunnable, Qt, QThreadPool, QTimer, Signal, Slot) from PySide2.QtWidgets import (QApplication, QMainWindow, QPlainTextEdit, QProgressBar, QPushButton, QVBoxLayout, QWidget) # tag::parser[] def timestr_to_seconds(s): """ Convert a string in the format 00:00:00 into seconds. """ hours, minutes, seconds = s.split(":") hours = int(hours) * 3600 minutes = int(minutes) * 60 seconds = int(seconds) return hours + minutes + seconds total_re = re.compile("Total time: (\d\d:\d\d:\d\d)") elapsed_re = re.compile("Elapsed time: (\d\d:\d\d:\d\d)") def time_to_percent_parser(l): """ Extract the elepsed time value and the total time value, and use them to calculate a % complete. """ total_time = None elapsed_time = None output = "".join(l) # Turn into a single string. m = total_re.findall(output) if m: # Should only be one of these. total_time = timestr_to_seconds(m[0]) m = elapsed_re.findall(output) if m: # Get the last match (latest result) using -1 on the list. elapsed_time = timestr_to_seconds(m[-1]) # If we have both the latest, and the target, we can calculate %. if total_time and elapsed_time: return int(100 * elapsed_time / total_time) # end::parser[] class WorkerSignals(QObject): """ Defines the signals available from a running worker thread. Supported signals are: finished: No data result: str """ result = Signal(str) # Send back the output from the process as a string. progress = Signal(int) # Return an integer 0-100 showing the current progress. finished = Signal() class SubProcessWorker(QRunnable): """ ProcessWorker worker thread Inherits from QRunnable to handle worker thread setup, signals and wrap-up. :param command: command to execute with `subprocess`. """ def __init__(self, command, parser=None): super().__init__() # Store constructor arguments (re-used for processing). self.signals = WorkerSignals() # The command to be executed. self.command = command # The parser function to extract the progress information. self.parser = parser # tag::workerRun[] @Slot() def run(self): """ Initialize the runner function with passed args, kwargs. """ result = [] with subprocess.Popen( # <1> self.command, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, # <2> universal_newlines=True, ) as proc: while proc.poll() is None: data = proc.stdout.readline() # <3> result.append(data) if self.parser: # <4> value = self.parser(result) if value: self.signals.progress.emit(value) output = "".join(result) self.signals.result.emit(output) # end::workerRun[] class MainWindow(QMainWindow): def __init__(self): super().__init__() layout = QVBoxLayout() self.text = QPlainTextEdit() layout.addWidget(self.text) self.progress = QProgressBar() self.progress.setRange(0, 100) self.progress.setValue(0) layout.addWidget(self.progress) btn_run = QPushButton("Execute") btn_run.clicked.connect(self.start) layout.addWidget(btn_run) w = QWidget() w.setLayout(layout) self.setCentralWidget(w) # Thread runner self.threadpool = QThreadPool() self.show() # tag::start[] def start(self): # Create a runner self.runner = SubProcessWorker( command="python dummy_script.py", parser=time_to_percent_parser ) self.runner.signals.result.connect(self.result) self.runner.signals.progress.connect(self.progress.setValue) self.threadpool.start(self.runner) # end::start[] def result(self, s): self.text.appendPlainText(s) app = QApplication(sys.argv) w = MainWindow() app.exec_()
python
import os from flask import Flask class Config: def __init__(self, app: Flask = None) -> None: self.app = None if app: self.init_app(app) def init_app(self, app: Flask) -> None: config = self.get_user_config() app.config.update(config) @staticmethod def get_user_config(): from flask import Config config = Config('/') config.from_object('alerta.settings') config.from_pyfile('/etc/alertad.conf', silent=True) config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True) config['DEBUG'] = get_config('DEBUG', default=True, type=bool, config=config) config['BASE_URL'] = get_config('BASE_URL', default='', type=str, config=config) config['USE_PROXYFIX'] = get_config('USE_PROXYFIX', default=False, type=bool, config=config) config['SECRET_KEY'] = get_config('SECRET_KEY', default='', type=str, config=config) database_url = ( # The following database settings are deprecated. os.environ.get('MONGO_URI', None) or os.environ.get('MONGODB_URI', None) or os.environ.get('MONGOHQ_URL', None) or os.environ.get('MONGOLAB_URI', None) ) # Use app config for DATABASE_URL if no env var from above override it config['DATABASE_URL'] = get_config('DATABASE_URL', default=database_url, type=str, config=config) config['DATABASE_NAME'] = get_config('DATABASE_NAME', default=None, type=str, config=config) config['AUTH_REQUIRED'] = get_config('AUTH_REQUIRED', default=None, type=bool, config=config) config['AUTH_PROVIDER'] = get_config('AUTH_PROVIDER', default=None, type=str, config=config) config['ADMIN_USERS'] = get_config('ADMIN_USERS', default=[], type=list, config=config) config['SIGNUP_ENABLED'] = get_config('SIGNUP_ENABLED', default=True, type=bool, config=config) config['CUSTOMER_VIEWS'] = get_config('CUSTOMER_VIEWS', default=False, type=bool, config=config) config['OAUTH2_CLIENT_ID'] = get_config('OAUTH2_CLIENT_ID', default=None, type=str, config=config) config['OAUTH2_CLIENT_SECRET'] = get_config('OAUTH2_CLIENT_SECRET', default=None, type=str, config=config) config['ALLOWED_EMAIL_DOMAINS'] = get_config('ALLOWED_EMAIL_DOMAINS', default=[], type=list, config=config) config['AZURE_TENANT'] = get_config('AZURE_TENANT', default=None, type=str, config=config) config['GITHUB_URL'] = get_config('GITHUB_URL', default=None, type=str, config=config) config['ALLOWED_GITHUB_ORGS'] = get_config('ALLOWED_GITHUB_ORGS', default=[], type=list, config=config) config['GITLAB_URL'] = get_config('GITLAB_URL', default=None, type=str, config=config) if 'ALLOWED_GITLAB_GROUPS' in os.environ: config['ALLOWED_OIDC_ROLES'] = get_config('ALLOWED_GITLAB_GROUPS', default=[], type=list, config=config) config['KEYCLOAK_URL'] = get_config('KEYCLOAK_URL', default=None, type=str, config=config) config['KEYCLOAK_REALM'] = get_config('KEYCLOAK_REALM', default=None, type=str, config=config) if 'ALLOWED_KEYCLOAK_ROLES' in os.environ: config['ALLOWED_OIDC_ROLES'] = get_config('ALLOWED_KEYCLOAK_ROLES', default=[], type=list, config=config) config['LDAP_BIND_PASSWORD'] = get_config('LDAP_BIND_PASSWORD', default=None, type=str, config=config) config['OIDC_ISSUER_URL'] = get_config('OIDC_ISSUER_URL', default=None, type=str, config=config) config['ALLOWED_OIDC_ROLES'] = get_config('ALLOWED_OIDC_ROLES', default=[], type=list, config=config) config['CORS_ORIGINS'] = get_config('CORS_ORIGINS', default=[], type=list, config=config) config['MAIL_FROM'] = get_config('MAIL_FROM', default=None, type=str, config=config) config['SMTP_PASSWORD'] = get_config('SMTP_PASSWORD', default=None, type=str, config=config) config['GOOGLE_TRACKING_ID'] = get_config('GOOGLE_TRACKING_ID', default=None, type=str, config=config) # housekeeping delete_expired_hrs = ( os.environ.get('DEFAULT_EXPIRED_DELETE_HRS', None) or os.environ.get('HK_EXPIRED_DELETE_HRS', None) ) delete_expired = delete_expired_hrs * 60 * 60 if delete_expired_hrs else None config['DELETE_EXPIRED_AFTER'] = get_config('DELETE_EXPIRED_AFTER', default=delete_expired, type=int, config=config) delete_info_hrs = ( os.environ.get('DEFAULT_INFO_DELETE_HRS', None) or os.environ.get('HK_INFO_DELETE_HRS', None) ) delete_info = delete_info_hrs * 60 * 60 if delete_info_hrs else None config['DELETE_INFO_AFTER'] = get_config('DELETE_INFO_AFTER', default=delete_info, type=int, config=config) # plugins config['PLUGINS'] = get_config('PLUGINS', default=[], type=list, config=config) # blackout plugin config['BLACKOUT_DURATION'] = get_config('BLACKOUT_DURATION', default=None, type=int, config=config) config['NOTIFICATION_BLACKOUT'] = get_config('NOTIFICATION_BLACKOUT', default=None, type=bool, config=config) config['BLACKOUT_ACCEPT'] = get_config('BLACKOUT_ACCEPT', default=[], type=list, config=config) # reject plugin config['ORIGIN_BLACKLIST'] = get_config('ORIGIN_BLACKLIST', default=[], type=list, config=config) config['ALLOWED_ENVIRONMENTS'] = get_config('ALLOWED_ENVIRONMENTS', default=[], type=list, config=config) # webhooks config['DEFAULT_ENVIRONMENT'] = get_config('DEFAULT_ENVIRONMENT', default=None, type=str, config=config) # Runtime config check if config['CUSTOMER_VIEWS'] and not config['AUTH_REQUIRED']: raise RuntimeError('Must enable authentication to use customer views') if config['CUSTOMER_VIEWS'] and not config['ADMIN_USERS']: raise RuntimeError('Customer views is enabled but there are no admin users') if config['DEFAULT_ENVIRONMENT'] not in config['ALLOWED_ENVIRONMENTS']: raise RuntimeError(f"Default environment \"{config['DEFAULT_ENVIRONMENT']}\" not in list of allowed environments") return config def get_config(key, default=None, type=None, **kwargs): if key in os.environ: rv = os.environ[key] if type == bool: return rv.lower() in ['yes', 'on', 'true', 't', '1'] elif type == list: return rv.split(',') elif type is not None: try: rv = type(rv) except ValueError: rv = default return rv try: rv = kwargs['config'].get(key, default) except KeyError: rv = default return rv
python
""" The sensors module contains the base definition for a generic sensor call and the implementation of all the specific sensors """ from __future__ import print_function from qds_sdk.qubole import Qubole from qds_sdk.resource import Resource from argparse import ArgumentParser import logging import json log = logging.getLogger("qds_sensors") class SensorCmdLine: @staticmethod def check(sensor_class, args): """ Method to call Sensor.check after parsing args from cmdline :param sensor_class: sensor class :param args: inline arguments :return: True or False """ parser = SensorCmdLine.parsers(sensor_class) parsed = parser.parse_args(args) return sensor_class.check(json.loads(parsed.data)) @staticmethod def parsers(sensor_class): argparser = ArgumentParser(prog=sensor_class.usage, description=sensor_class.description) subparsers = argparser.add_subparsers() #Check check = subparsers.add_parser("check", help="Check a Sensor") check.add_argument("-d", "--data", dest="data", required=True, help="String containing a valid json object") check.set_defaults(func=Sensor.check) return argparser class Sensor(Resource): """ qds_sdk.Sensor is the base Qubole sensor class. Different types of Qubole sensors can subclass this. """ @classmethod def check(cls, data): """ Method to call the sensors api with json payload :param data: valid json object :return: True or False """ conn = Qubole.agent() return conn.post(cls.rest_entity_path, data=data)['status'] class FileSensor(Sensor): rest_entity_path = "sensors/file_sensor" usage = ("qds.py filesensor check -d 'json string'") description = "File Sensor client for Qubole Data Services" class PartitionSensor(Sensor): rest_entity_path = "sensors/partition_sensor" usage = ("qds.py partitionsensor check -d 'json string'") description = "Hive Partition Sensor client for Qubole Data Services"
python
from types import SimpleNamespace from typing import Any, cast from unittest.mock import Mock import pytest from playbacker.track import Shared, SoundTrack, StreamBuilder from playbacker.tracks.file import FileSounds, FileTrack from tests.conftest import get_audiofile_mock, get_tempo @pytest.fixture def file_track(stream_builder: StreamBuilder): return FileTrack(shared=Shared(), stream_builder=stream_builder) def test_get_sound_none(file_track: FileTrack): file_track.sounds = FileSounds(None) assert file_track.get_sound() is None def test_get_sound_with_sound(file_track: FileTrack): mock, prop = get_audiofile_mock() file_track.sounds = FileSounds(mock) file_track.get_sound() prop.assert_called_once() @pytest.mark.parametrize( ("result", "expected_current_frame"), (("myval", 0), (None, 100)) ) def test_callback( file_track: FileTrack, monkeypatch: pytest.MonkeyPatch, result: Any, expected_current_frame: int, ): mock = Mock() mock.return_value = result monkeypatch.setattr(SoundTrack, "callback", mock) file_track.enabled = False assert file_track.callback(100) is result assert file_track.current_frame == expected_current_frame def prepare_for_getting_new_frame(file_track: FileTrack): file_track.shared.tempo = get_tempo(bpm=120) file_track.stream = cast(Any, SimpleNamespace(sample_rate=44100)) @pytest.mark.parametrize(("position", "expected"), ((0, 0), (-10, 0), (10, 55125))) def test_get_new_frame(file_track: FileTrack, position: int, expected: int): prepare_for_getting_new_frame(file_track) file_track.shared.position = position assert file_track.get_new_frame() == expected def test_resume(file_track: FileTrack): prepare_for_getting_new_frame(file_track) file_track.current_frame = 10 file_track.shared.position = 10 file_track.pause() file_track.resume() assert not file_track.paused assert file_track.current_frame == 55125 @pytest.mark.parametrize("has_sound", (True, False)) def test_start_with_sound( file_track: FileTrack, monkeypatch: pytest.MonkeyPatch, has_sound: bool ): monkeypatch.setattr(SoundTrack, "start", Mock()) file_track.enabled = True sound = cast(Any, object()) if has_sound else None file_track.sounds = FileSounds(sound) file_track.start(file=sound) assert file_track.enabled is has_sound
python
""" Pytest firewallreader """ import pickle import pytest from nftfw.rulesreader import RulesReader from nftfw.ruleserr import RulesReaderError from nftfw.firewallreader import FirewallReader from .configsetup import config_init @pytest.fixture def cf(): # pylint: disable=invalid-name """ Get config from configsetup """ _cf = config_init() try: _rules = RulesReader(_cf) # this is an internal convention _cf.rulesreader = _rules except RulesReaderError as e: assert e is not None, 'RulesReaderError - str(e)' return _cf @pytest.fixture def firewallreader(cf): """ Firewall reader """ _fr = FirewallReader(cf, 'incoming') return _fr def test_reader(firewallreader): """ Validate information from firewall reader """ records = firewallreader.records assert len(records) == 16, "Should be 16 records" file = open('newdata/firewallreader.pickle', 'wb') pickle.dump(records, file) file.close() file = open('srcdata/firewallreader.pickle', 'rb') reference = pickle.load(file) file.close() for i in range(len(reference)): # pylint: disable=consider-using-enumerate ref = reference[i] rec = records[i] for ix in ['baseaction', 'action', 'ports', 'content', 'ip', 'ip6']: if ix in ref: assert rec[ix] == ref[ix]
python
from django.urls import path from errors import views app_name = 'errors' urlpatterns = [ path('403.html', views.view_403, name="403"), path('405.html', views.view_405, name="405"), path('404.html', views.view_404, name="404"), ]
python
"""Service module to store package loggers""" import logging import sys def configure_logger(): logger = logging.getLogger(name='lexibot') console_handler = logging.StreamHandler(stream=sys.stdout) console_handler.setFormatter( logging.Formatter('%(filename)s:%(lineno)d %(message)s')) logger.addHandler(console_handler) logger.setLevel(logging.INFO) if __name__ == "__main__": pass
python
import argparse import glob import math import ntpath import os import shutil import pyedflib import numpy as np import pandas as pd import mxnet as mx from sleepstage import stage_dict from logger import get_logger # Have to manually define based on the dataset ann2label = { "Sleep stage W": 0, "Sleep stage N1": 1, "Sleep stage N2": 2, "Sleep stage N3": 3, "Sleep stage 4": 3, # Follow AASM Manual "Sleep stage R": 4, "Sleep stage ?": 6, "Movement time": 5 } def main(): parser = argparse.ArgumentParser() parser.add_argument("--data_dir", type=str, default="./data/haaglanden/recordings", help="File path to the Haaglanden dataset.") parser.add_argument("--output_dir", type=str, default="./data/haaglanden/recordings/eeg_channel_C4_A1", help="Directory where to save outputs.") parser.add_argument("--select_ch", type=str, default="EEG C4-M1", help="Name of the channel in the dataset.") parser.add_argument("--log_file", type=str, default="info_ch_extract.log", help="Log file.") args = parser.parse_args() # Output dir if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) else: shutil.rmtree(args.output_dir) os.makedirs(args.output_dir) args.log_file = os.path.join(args.output_dir, args.log_file) # Create logger logger = get_logger(args.log_file, level="info") # Select channel select_ch = args.select_ch # Read raw and annotation from EDF files psg_fnames = glob.glob(os.path.join(args.data_dir, "*PSG.edf")) ann_fnames = glob.glob(os.path.join(args.data_dir, "*sleepscoring.edf")) psg_fnames.sort() ann_fnames.sort() psg_fnames = np.asarray(psg_fnames) ann_fnames = np.asarray(ann_fnames) for i in range(len(psg_fnames)): logger.info("Loading ...") logger.info("Signal file: {}".format(psg_fnames[i])) logger.info("Annotation file: {}".format(ann_fnames[i])) psg_f = pyedflib.EdfReader(psg_fnames[i]) ann_f = pyedflib.EdfReader(ann_fnames[i]) assert psg_f.getStartdatetime() == ann_f.getStartdatetime() start_datetime = psg_f.getStartdatetime() logger.info("Start datetime: {}".format(str(start_datetime))) file_duration = psg_f.getFileDuration() logger.info("File duration: {} sec".format(file_duration)) epoch_duration = psg_f.datarecord_duration if psg_f.datarecord_duration == 60: # Fix problems of SC4362F0-PSG.edf, SC4362FC-Hypnogram.edf epoch_duration = epoch_duration / 2 logger.info("Epoch duration: {} sec (changed from 60 sec)".format(epoch_duration)) elif psg_f.datarecord_duration == 30: logger.info("Epoch duration: {} sec".format(epoch_duration)) elif psg_f.datarecord_duration == 1: epoch_duration = epoch_duration * 30 logger.info("Epoch_duration: {} sec (changed from 1 sec)".format(epoch_duration)) else: logger.info("Epoch duration: {} sec".format(epoch_duration)) # Extract signal from the selected channel ch_names = psg_f.getSignalLabels() ch_samples = psg_f.getNSamples() select_ch_idx = -1 for s in range(psg_f.signals_in_file): if ch_names[s] == select_ch: select_ch_idx = s break if select_ch_idx == -1: raise Exception("Channel not found.") sampling_rate = psg_f.getSampleFrequency(select_ch_idx) n_epoch_samples = int(epoch_duration * sampling_rate) psg_f_orig = psg_f.readSignal(select_ch_idx) res_psg_f = np.mod(ch_samples[select_ch_idx], n_epoch_samples) signals = psg_f_orig[:(ch_samples[select_ch_idx]-res_psg_f)].reshape(-1, n_epoch_samples) logger.info("Select channel: {}".format(select_ch)) logger.info("Select channel samples: {}".format(ch_samples[select_ch_idx])) logger.info("Sample rate: {}".format(sampling_rate)) # Sanity check n_epochs = psg_f.datarecords_in_file if psg_f.datarecord_duration == 60: # Fix problems of SC4362F0-PSG.edf, SC4362FC-Hypnogram.edf n_epochs = n_epochs * 2 elif psg_f.datarecord_duration == 1: n_epochs = np.floor(n_epochs/30) assert len(signals) == n_epochs, f"signal: {signals.shape} != {n_epochs}" # Generate labels from onset and duration annotation labels = [] total_duration = 0 ann_onsets, ann_durations, ann_stages = ann_f.readAnnotations() d_idx = np.where(ann_durations < 30) ann_onsets = np.delete(ann_onsets, d_idx) ann_durations = np.delete(ann_durations, d_idx) ann_stages = np.delete(ann_stages, d_idx) for a in range(len(ann_stages)): onset_sec = int(ann_onsets[a]) duration_sec = int(ann_durations[a]) ann_str = "".join(ann_stages[a]) # Sanity check assert onset_sec == total_duration # Get label value label = ann2label[ann_str] # Compute # of epoch for this stage if duration_sec % epoch_duration != 0: logger.info(f"Something wrong: {duration_sec} {epoch_duration}") raise Exception(f"Something wrong: {duration_sec} {epoch_duration}") duration_epoch = int(duration_sec / epoch_duration) # Generate sleep stage labels label_epoch = np.ones(duration_epoch, dtype=np.int) * label labels.append(label_epoch) total_duration += duration_sec logger.info("Include onset:{}, duration:{}, label:{} ({})".format( onset_sec, duration_sec, label, ann_str )) labels = np.hstack(labels) # Remove annotations that are longer than the recorded signals labels = labels[:len(signals)] # Get epochs and their corresponding labels x = signals.astype(np.float32) y = labels.astype(np.int32) # Select only sleep periods w_edge_mins = 30 nw_idx = np.where(y != stage_dict["W"])[0] start_idx = nw_idx[0] - (w_edge_mins * 2) end_idx = nw_idx[-1] + (w_edge_mins * 2) if start_idx < 0: start_idx = 0 if end_idx >= len(y): end_idx = len(y) - 1 select_idx = np.arange(start_idx, end_idx+1) logger.info("Data before selection: {}, {}".format(x.shape, y.shape)) x = x[select_idx] y = y[select_idx] logger.info("Data after selection: {}, {}".format(x.shape, y.shape)) # Remove movement and unknown move_idx = np.where(y == stage_dict["MOVE"])[0] unk_idx = np.where(y == stage_dict["UNK"])[0] if len(move_idx) > 0 or len(unk_idx) > 0: remove_idx = np.union1d(move_idx, unk_idx) logger.info("Remove irrelavant stages") logger.info(" Movement: ({}) {}".format(len(move_idx), move_idx)) logger.info(" Unknown: ({}) {}".format(len(unk_idx), unk_idx)) logger.info(" Remove: ({}) {}".format(len(remove_idx), remove_idx)) logger.info(" Data before removal: {}, {}".format(x.shape, y.shape)) select_idx = np.setdiff1d(np.arange(len(x)), remove_idx) x = x[select_idx] y = y[select_idx] logger.info(" Data after removal: {}, {}".format(x.shape, y.shape)) # Save filename = ntpath.basename(psg_fnames[i]).replace("PSG.edf", ".npz") save_dict = { "x": x, "y": y, "fs": sampling_rate, "ch_label": select_ch, "start_datetime": start_datetime, "file_duration": file_duration, "epoch_duration": epoch_duration, "n_all_epochs": n_epochs, "n_epochs": len(x), } np.savez(os.path.join(args.output_dir, filename), **save_dict) logger.info("\n=======================================\n") if __name__ == "__main__": main()
python
# Generated by Django 3.2.7 on 2021-09-28 13:56 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Station', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('firstname', models.CharField(default='A.', max_length=255)), ('insertion', models.CharField(blank=True, max_length=255, null=True)), ('lastname', models.CharField(default='Noniem', max_length=255)), ('moderated', models.BooleanField(default=False)), ('moderated_by_fk', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), ('station_fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ns_zuil.station')), ], ), ]
python
_base_ = [ '../_base_/datasets/dota.py', '../_base_/schedules/schedule_1x.py', '../../_base_/default_runtime.py' ] model = dict( type='OrientedRCNN', backbone=dict( type='SwinTransformer', embed_dims=96, depths=[2, 2, 6, 2], num_heads=[3, 6, 12, 24], window_size=7, mlp_ratio=4, qkv_bias=True, qk_scale=None, drop_rate=0., attn_drop_rate=0., drop_path_rate=0.2, patch_norm=True, out_indices=(0, 1, 2, 3), with_cp=False, convert_weights=True, init_cfg=dict(type='Pretrained', checkpoint='swin_tiny_patch4_window7_224.pth')), neck=dict( type='FPN', in_channels=[96, 192, 384, 768], out_channels=256, num_outs=5), rpn_head=dict( type='OrientedRPNHead', in_channels=256, feat_channels=256, anchor_generator=dict( type='AnchorGenerator', scales=[6], ratios=[0.5, 1.0, 2.0], strides=[4, 8, 16, 32, 64]), bbox_coder=dict( type='MidpointOffsetCoder', target_means=[.0, .0, .0, .0, .0, .0], target_stds=[1.0, 1.0, 1.0, 1.0, 0.5, 0.5]), loss_cls=dict( type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), roi_head=dict( type='OBBStandardRoIHead', bbox_roi_extractor=dict( type='OBBSingleRoIExtractor', roi_layer=dict(type='RoIAlignRotated', out_size=7, sample_num=2), out_channels=256, extend_factor=(1.4, 1.2), featmap_strides=[4, 8, 16, 32]), bbox_head=dict( type='OBBShared2FCBBoxHead', start_bbox_type='obb', end_bbox_type='obb', in_channels=256, fc_out_channels=1024, roi_feat_size=7, num_classes=37, bbox_coder=dict( type='OBB2OBBDeltaXYWHTCoder', target_means=[0., 0., 0., 0., 0.], target_stds=[0.1, 0.1, 0.2, 0.2, 0.1]), reg_class_agnostic=True, loss_cls=dict( type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) # model training and testing settings train_cfg = dict( rpn=dict( assigner=dict( type='MaxIoUAssigner', pos_iou_thr=0.7, neg_iou_thr=0.3, min_pos_iou=0.3, match_low_quality=True, gpu_assign_thr=200, ignore_iof_thr=-1), sampler=dict( type='RandomSampler', num=1000, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=False), allowed_border=0, pos_weight=-1, debug=False), rpn_proposal=dict( nms_across_levels=False, nms_pre=5000, nms_post=5000, max_num=5000, nms_thr=0.8, min_bbox_size=0), rcnn=dict( assigner=dict( type='MaxIoUAssigner', pos_iou_thr=0.5, neg_iou_thr=0.5, min_pos_iou=0.5, match_low_quality=False, ignore_iof_thr=-1, iou_calculator=dict(type='OBBOverlaps')), sampler=dict( type='OBBRandomSampler', num=2000, pos_fraction=0.25, neg_pos_ub=-1, add_gt_as_proposals=True), pos_weight=-1, debug=False)) test_cfg = dict( rpn=dict( nms_across_levels=False, nms_pre=5000, nms_post=5000, max_num=5000, nms_thr=0.8, min_bbox_size=0), rcnn=dict( score_thr=0.005, nms=dict(type='obb_nms', iou_thr=0.1), max_per_img=2000)) optimizer = dict( _delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, paramwise_cfg=dict( custom_keys={ 'absolute_pos_embed': dict(decay_mult=0.), 'relative_position_bias_table': dict(decay_mult=0.), 'norm': dict(decay_mult=0.) })) lr_config = dict(warmup_iters=1000, step=[9, 11]) runner = dict(max_epochs=12)
python
import sys, math nums = map(int, sys.stdin.readlines()[1:]) gauss = lambda x: (x/2.0)*(1+x) total = gauss(len(nums)-1) a = max(nums) nums.remove(a) b = max(nums) nums.remove(b) if a == b: cnt = gauss(1 + nums.count(a)) else: cnt = 1 + nums.count(b) shit_fmt = lambda x: math.floor(x*100.0)/100.0 # b/c hackerrank is dumb. print '{:.2f}'.format(shit_fmt(cnt/total))
python
downloadable_dataset_urls = { "ag-raw-train": { "filename": "train.csv", "url": ("https://raw.githubusercontent.com/mhjabreel/CharCnn_Keras/master/" "data/ag_news_csv/train.csv"), "md5": "b1a00f826fdfbd249f79597b59e1dc12", "untar": False, "unzip": False, }, "ag-raw-test": { "filename": "test.csv", "url": ("https://raw.githubusercontent.com/mhjabreel/CharCnn_Keras/master/data/" "ag_news_csv/test.csv"), "md5": "d52ea96a97a2d943681189a97654912d", "untar": False, "unzip": False, }, "imdb-raw": { "filename": "aclImdb_v1.tar.gz", "url": "https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz", "md5": "7c2ac02c03563afcf9b574c7e56c153a", "untar": True, "unzip": False, }, "yelp-raw": { "filename": "yelp_review_polarity_csv.tgz", "url": "https://s3.amazonaws.com/fast-ai-nlp/yelp_review_polarity_csv.tgz", "md5": "0f09b3af1a79c136ef9ca5f29df9ed9a", "untar": True, "unzip": False, }, "mr-raw": { "filename": "rt-polaritydata.tar.gz", "url": "http://www.cs.cornell.edu/people/pabo/movie-review-data/rt-polaritydata.tar.gz", "md5": "50c1c2c047b4225e148e97aa7708c34e", "untar": True, "unzip": False, }, "snli-raw": { "filename": "snli_1.0.zip", "url": "https://nlp.stanford.edu/projects/snli/snli_1.0.zip", "md5": "981c3df556bbaea3f17f752456d0088c", "untar": False, "unzip": True, }, "mnli-raw": { "filename": "multinli_1.0.zip", "url": "https://cims.nyu.edu/~sbowman/multinli/multinli_1.0.zip", "md5": "0f70aaf66293b3c088a864891db51353", "untar": False, "unzip": True, }, "processed-datasets": { "filename": "datasets.tgz", "url": "https://fibber-data.s3.amazonaws.com/datasets_v0.3.tgz", "md5": "910846005ada814bcda2125435c24fd1", "untar": True, "unzip": False, }, "mr-demo": { "filename": "mr-demo.tgz", "url": "https://fibber-data.s3.amazonaws.com/mr-demo.tgz", "md5": "8ee6b6eda12a7c1282cb903c713085c5", "untar": True, "unzip": False, } }
python
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # this is a windows documentation stub, actual code lives in the .ps1 # file of the same name ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: win_wait_for_process version_added: '2.7' short_description: Waits for a process to exist or not exist before continuing. description: - Waiting for a process to start or stop. - This is useful when Windows services behave poorly and do not enumerate external dependencies in their manifest. options: process_name_exact: description: - The name of the process(es) for which to wait. type: str process_name_pattern: description: - RegEx pattern matching desired process(es). type: str sleep: description: - Number of seconds to sleep between checks. - Only applies when waiting for a process to start. Waiting for a process to start does not have a native non-polling mechanism. Waiting for a stop uses native PowerShell and does not require polling. type: int default: 1 process_min_count: description: - Minimum number of process matching the supplied pattern to satisfy C(present) condition. - Only applies to C(present). type: int default: 1 pid: description: - The PID of the process. type: int owner: description: - The owner of the process. - Requires PowerShell version 4.0 or newer. type: str pre_wait_delay: description: - Seconds to wait before checking processes. type: int default: 0 post_wait_delay: description: - Seconds to wait after checking for processes. type: int default: 0 state: description: - When checking for a running process C(present) will block execution until the process exists, or until the timeout has been reached. C(absent) will block execution untile the processs no longer exists, or until the timeout has been reached. - When waiting for C(present), the module will return changed only if the process was not present on the initial check but became present on subsequent checks. - If, while waiting for C(absent), new processes matching the supplied pattern are started, these new processes will not be included in the action. type: str default: present choices: [ absent, present ] timeout: description: - The maximum number of seconds to wait for a for a process to start or stop before erroring out. type: int default: 300 author: - Charles Crossan (@crossan007) ''' EXAMPLES = r''' - name: Wait 300 seconds for all Oracle VirtualBox processes to stop. (VBoxHeadless, VirtualBox, VBoxSVC) win_wait_for_process: process_name: 'v(irtual)?box(headless|svc)?' state: absent timeout: 500 - name: Wait 300 seconds for 3 instances of cmd to start, waiting 5 seconds between each check win_wait_for_process: process_name_exact: cmd state: present timeout: 500 sleep: 5 process_min_count: 3 ''' RETURN = r''' elapsed: description: The elapsed seconds between the start of poll and the end of the module. returned: always type: float sample: 3.14159265 matched_processes: description: List of matched processes (either stopped or started) returned: always type: complex contains: name: description: The name of the matched process returned: always type: str sample: svchost owner: description: The owner of the matched process returned: when supported by PowerShell type: str sample: NT AUTHORITY\SYSTEM pid: description: The PID of the matched process returned: always type: int sample: 7908 '''
python
from IComprehension import Comprehension from service.preprocess import ( check_answers_and_get_answer_sentence_matches, check_questions_and_get_question_tokens, _removeStopWords ) from service.qualifier import find_similarity,find_question_similarity class wiki(Comprehension): def __init__(self, para, ques, ans): self.paragraph = para.split(".") self.questions = [i for i in ques.split("\n")] self.answers = [j for j in ans.split(";")] def validate_input(self) -> bool: self.answer_matches = check_answers_and_get_answer_sentence_matches( self.paragraph, self.answers ) self.question_tokens = check_questions_and_get_question_tokens(self.questions) if self.answer_matches and self.question_tokens: return True else: return False def getQuestionMaches(self): sentenses = [] for sentense in self.paragraph: sentenses.append(_removeStopWords(str(sentense).split())) #print(sentenses) self.question_tokens = check_questions_and_get_question_tokens(self.questions) self.result = find_question_similarity(sentenses, self.question_tokens) def process_data(self) -> list: self.result = find_question_similarity(self.answer_matches, self.question_tokens) def get_results(self): return ";".join(self.result) def solve_puzzle(paragraphs, questions, answers): w = wiki(paragraphs, questions, answers) w.getQuestionMaches() #w.get_results() """ if w.validate_input(): w.process_data() result = w.get_results() return result """ paragraph1 = "Zebras are several species of African equids (horse family) united by their distinctive black and white stripes. Their stripes come in different patterns, unique to each individual. They are generally social animals that live in small harems to large herds. Unlike their closest relatives, horses and donkeys, zebras have never been truly domesticated. There are three species of zebras: the plains zebra, the Grévy's zebra and the mountain zebra. The plains zebra and the mountain zebra belong to the subgenus Hippotigris, but Grévy's zebra is the sole species of subgenus Dolichohippus. The latter resembles an ass, to which it is closely related, while the former two are more horse-like. All three belong to the genus Equus, along with other living equids. The unique stripes of zebras make them one of the animals most familiar to people. They occur in a variety of habitats, such as grasslands, savannas, woodlands, thorny scrublands, mountains, and coastal hills. However, various anthropogenic factors have had a severe impact on zebra populations, in particular hunting for skins and habitat destruction. Grévy's zebra and the mountain zebra are endangered. While plains zebras are much more plentiful, one subspecies, the quagga, became extinct in the late 19th century – though there is currently a plan, called the Quagga Project, that aims to breed zebras that are phenotypically similar to the quagga in a process called breeding back." questions1 = "Which Zebras are endangered? \ \n What is the aim of the Quagga Project? \ \n Which animals are some of their closest relatives? \ \n Which are the three species of zebras? \ \n Which subgenus do the plains zebra and the mountain zebra belong to?" answers1 = "subgenus Hippotigris;the plains zebra, the Grévy's zebra and the mountain zebra;horses and donkeys;aims to breed zebras that are phenotypically similar to the quagga;Grévy's zebra and the mountain zebra" correct_answers1 = "Grévy's zebra and the mountain zebra;aims to breed zebras that are phenotypically similar to the quagga;horses and donkeys;the plains zebra, the Grévy's zebra and the mountain zebra;subgenus Hippotigris" solve_puzzle(paragraph1, questions1, answers1)
python
from random import randint import numpy as np from qiskit import execute, BasicAer from qiskit.circuit.quantumcircuit import QuantumCircuit cards = ["H", "H", "X", "X", "CX", "RX", "RX"] def run(circuit: QuantumCircuit): # use local simulator backend = BasicAer.get_backend('qasm_simulator') results = execute(circuit, backend=backend, shots=1024).result() answer = results.get_counts() max_value = 0 max_key = "" for key, value in answer.items(): if value > max_value: max_value = value max_key = key print(answer) if max_key == "00": print("Both players stay grounded :(") return 0 elif max_key == "01": print("Player 1 is excited!") return 1 elif max_key == "10": print("Player 2 is excited!") return 2 elif max_key == "11": print("Both players are excited!") return 3 return def place_gate(player, field, qubit): card = player.pop() print(f"now inserting card {card} from player {qubit+1}") if card == "H": field.h(qubit) elif card == "X": field.x(qubit) elif card == "RX": field.rx(np.pi/2, qubit) elif card == "CX": if qubit == 0: field.cx(qubit, qubit + 1) else: field.cx(qubit, qubit - 1) return def create_playing_field(player1: list, player2: list) -> QuantumCircuit: field = QuantumCircuit(2, 2) player1.reverse() player2.reverse() while len(player1) > 0: place_gate(player1, field, 0) while len(player2) > 0: place_gate(player2, field, 1) field.measure(0, 0) field.measure(1, 1) return field def generate_deck() -> list: deck = [] for j in range(4): for i in range(len(cards)): deck.append(cards[i]) return deck def shuffle_deck(deck: list): for i in range(len(deck) * 5): j = randint(0, len(deck) - 1) k = randint(0, len(deck) - 1) temp = deck[j] deck[j] = deck[k] deck[k] = temp return def deal_starting_hands(player1: list, player2: list, deck: list): for i in range(0, 4, 2): player1.append(deck.pop()) player2.append(deck.pop()) return def draw_from_deck(deck: list) -> str: return deck.pop() def replace(replacement_choice, card, player): player.remove(replacement_choice) player.append(card) return def draw(player: list, deck: list): card = draw_from_deck(deck) print("Card drawn from deck is:" + card) user_choice = "?" while user_choice != "y" and user_choice != "n": user_choice = input("Do you want this card? (y/n)") if user_choice == "y": player.append(card) else: deck.insert(0, card) # put the card on the bottom of the deck return def fix_hand(player: list) -> list: new_hand = [] print("Your current hand is setup like this:") print(player) i = 0 while len(player) > 0: replacement_choice = input(f"Choose one of your cards to be on position {i} :") while replacement_choice not in player: replacement_choice = input(f"Choose one of your cards to be on position {i} :") new_hand.insert(len(new_hand), replacement_choice) player.remove(replacement_choice) print("Cards remaining in previous hands") print(player) i = i + 1 print("New hand") print(new_hand) print() return new_hand class Game: deck = generate_deck() shuffle_deck(deck) player1 = [] player1_wins = 0 player2 = [] player2_wins = 0 rounds = int(input("Enter number of rounds: ")) print("The exciting game begins!") current_round = 0 while current_round <= rounds: countdown = 4 print("#" * (current_round + 1), end="") print(f"ROUND {current_round}", end="") print("#" * (current_round + 1)) print() deal_starting_hands(player1, player2, deck) while countdown != 0: print("\nPlayer 1") print(player1) draw(player1, deck) print("\nPlayer 2") print(player2) draw(player2, deck) countdown = countdown - 1 print(f"{countdown} dealings remain before the players have to see who's Excited!") if countdown == 0: print("Next turn is going to be Exciting!!!") print("Both players get to fix their hands in the order they desire!") player1 = fix_hand(player1) player2 = fix_hand(player2) playing_field = create_playing_field(player1, player2) print(playing_field.draw()) round_result = run(playing_field) if round_result == "1": player1_wins = player1_wins + 1 elif round_result == "2": player2_wins = player2_wins + 1 current_round = current_round + 1 if player1_wins > player2_wins: print("PLAYER ONE WAS MOST EXCITED!") elif player2_wins > player1_wins: print("PLAYER TWO WAS MOST EXCITED!") else: print("PLAYERS WERE EQUALLY EXCITED!")
python
import unittest import datetime import pandas as pd from simple_ranker import Ranker class RankerTest(unittest.TestCase): def setUp(self): self.current_year = datetime.datetime.now().year def test_rank_by_PE_returns_lowest_first(self): pe_rank = { 'name': 'pe', 'ascending': True } data = pd.DataFrame({ 'code': ['ANZ', 'CBA', 'NAB'], 'pe': [3.0, 1.0, 2.0], }, index=pd.to_datetime( [datetime.date(self.current_year, 6, 20)] * 3), dtype=float ) ranker = Ranker(data, [pe_rank], [], limit=50) results = ranker.process() self.assertTrue(results[0:1]['code'][0] == 'CBA') def test_rank_by_ROE_return_highest_first_after_filtering(self): roe_rank = { 'name': 'roe', 'max': 0.70, 'ascending': False } data = pd.DataFrame({ 'code': ['ANZ', 'CBA', 'NAB'], 'roe': [0.70, 0.71, 0.69]}, index=pd.to_datetime( [datetime.date(self.current_year, 6, 20)] * 3 ), dtype=float ) ranker = Ranker(data, [roe_rank], [], limit=50) results = ranker.process() self.assertTrue(results[0:1]['code'][0] == 'ANZ') def test_rank_and_filter_removes_too_small_companies(self): market_cap_filter = { 'name': 'market_cap', 'min': 5000000 } roe_rank = { 'name': 'roe', 'max': 0.70, 'ascending': False } data = pd.DataFrame({ 'code': ['SMALL', 'ANZ', 'CBA', 'NAB'], 'roe': [0.50, 0.40, 0.41, 0.39], 'market_cap': [1000000] + [6000000] * 3}, index=pd.to_datetime( [datetime.date(self.current_year, 6, 20)] * 4 ), dtype=float ) ranker = Ranker(data, [roe_rank], [market_cap_filter], limit=50) results = ranker.process() self.assertTrue(results[0:1]['code'][0] == 'CBA') def test_rank_ROE_and_PE_returns_correct_top(self): roe_rank = { 'name': 'roe', 'ascending': False } pe_rank = { 'name': 'pe', 'ascending': True } data = pd.DataFrame({ 'code': ['ANZ', 'CBA', 'NAB', 'WST'], 'pe': [3, 4, 5, 6], 'roe': [0.30, 0.50, 0.80, 0.70]}, index=pd.to_datetime( [datetime.date(self.current_year, 6, 20)] * 4 ), dtype=float ) ranker = Ranker(data, [pe_rank, roe_rank], [], limit=50) results = ranker.process() # Output should look like this: # code pe_rank roe_rank total_rank # ANZ 1 4 5 # CBA 2 3 5 # NAB 3 1 4 -- first pick # WST 4 2 6 -- last pick self.assertTrue(results[0:1]['code'][0] == 'NAB') self.assertTrue(results[-1:]['code'][0] == 'WST') def test_rank_ROE_avg_3_returns_correct_top(self): roe_rank = { 'name': 'roe', 'max': 0.8, 'average': 3, 'ascending': False } # Push last 3 years into a list date_array = [ datetime.date(self.current_year - i, 6, 20) for i in range(3)] data = pd.DataFrame({ 'code': ['ANZ'] * 3 + ['CBA'] * 3 + ['NAB'] * 3, 'roe': [0.1, 0.2, 0.5] + [0.7, 0.1, 0.2] + [0.1, 0.2, 0.4]}, index=pd.to_datetime(date_array * 3), dtype=float ) ranker = Ranker(data, [roe_rank], [], limit=50) results = ranker.process() self.assertTrue(results[0:1]['code'][0] == 'CBA') self.assertTrue(results[-1:]['code'][0] == 'NAB') if __name__ == '__main__': unittest.run()
python
from . import crop from . import info from . import inpaint from . import pool from . import unstack
python
""" interchange_regression_utilities Utilities to help with running the interchange regression tests """ from setuptools import find_packages, setup setup( name="interchange_regression_utilities", author="Open Force Field Consortium", author_email="[email protected]", license="MIT", packages=find_packages(), entry_points={ "console_scripts": [ "create_openmm_systems=interchange_regression_utilities.commands." "create_openmm_systems:main", "compare_openmm_systems=interchange_regression_utilities.commands." "compare_openmm_systems:main", ], }, python_requires=">=3.6", )
python
from main.model import Font from main.views import fetch_css import requests import datetime import random import string SNAPSHOTTER_URL = "http://localhost:3000/" def populate(): with open('urls.txt', 'r') as f: urls = f.read().split('\n')[:10] for url in urls: print 'Processing', url, '...' font_string = fetch_css(url) if font_string: f = Font(name=font_string, site_url=url, image_url=get_snapshot_url(url), updated=datetime.datetime.now() ) f.save() def get_snapshot_url(url): lst = [random.choice(string.ascii_letters + string.digits) for n in xrange(12)] uniquid = "".join(lst) img = requests.get(SNAPSHOTTER_URL, params={'url' : url}).content with open('static/media/' + uniquid + '.jpg', 'w') as image: image.write(img) return '/static/media/' + uniquid + '.jpg' if __name__ == '__main__': populate()
python
import pandas as pd TITLE_NAME = "Auto List" SOURCE_NAME = "auto_list" LABELS = ["Team", "Match", "Starting position", "Plate Assignments", "Total Success", "Total Attempt and Success", "Scale Success", "Switch Success", "First Time", "Last Time", "Action 1", "Action 2", "Action 3", "Action 4", "Action 5" ] def get_rows(manager): auto_data_points = ["Auto scale", "Auto switch", "Auto scale attempt", "Auto switch attempt"] for entry in manager.entries: if not entry.board.alliance() == "N": times = {i: [] for i in auto_data_points} actions = [] for data_point in auto_data_points: for occurrence_time in entry.look(data_point): times[data_point].append(occurrence_time) actions.append((occurrence_time, data_point)) if not actions: continue actions = sorted(actions, key=lambda x: x[0]) # sort by the first item in tuple num_actions = len(actions) action_list = [] for i in range(5): if i < num_actions: action_list.append(actions[i][1]) else: action_list.append("None") switch_auto_successes = entry.count("Auto switch") scale_auto_successes = entry.count("Auto scale") switch_auto_attempts = entry.count("Auto switch attempt") scale_auto_attempts = entry.count("Auto scale attempt") starting_pos = entry.final_value("Starting position", default=0) starting_pos_str = ["None", "Left", "Center", "Right"][starting_pos] if manager.tba_available: plate_assignments = manager.tba.match(key='2018dar_qm49')['score_breakdown']['red']['tba_gameData'] if entry.board.alliance() == "R": scale_assignment = plate_assignments[1] switch_assignment = plate_assignments[0] else: for i, v in enumerate(plate_assignments): if v == "R": plate_assignments[i] = "L" elif v == "L": plate_assignments[i] = "R" plate_assignments = plate_assignments scale_assignment = plate_assignments[1] switch_assignment = plate_assignments[0] row_data = { "Team": entry.team, "Match": entry.match, "Starting position": starting_pos_str, "Scale assignment": scale_assignment, "Switch assignment": switch_assignment, "Total Success": switch_auto_successes + scale_auto_successes, "Total Attempt and Success": (switch_auto_successes + switch_auto_attempts + scale_auto_successes + scale_auto_attempts), "Scale Success": scale_auto_successes, "Switch Success": switch_auto_successes, "First Time": actions[0][0] if num_actions > 0 else 0, "Last Time": actions[-1][0] if num_actions > 0 else 0, "Action 1": action_list[0], "Action 2": action_list[1], "Action 3": action_list[2], "Action 4": action_list[3], "Action 5": action_list[4] } else: row_data = { "Team": entry.team, "Match": entry.match, "Starting position": starting_pos_str, "Plate Assignments": "", "Total Success": switch_auto_successes + scale_auto_successes, "Total Attempt and Success": (switch_auto_successes + switch_auto_attempts + scale_auto_successes + scale_auto_attempts), "Scale Success": scale_auto_successes, "Switch Success": switch_auto_successes, "First Time": actions[0][0] if num_actions > 0 else 0, "Last Time": actions[-1][0] if num_actions > 0 else 0, "Action 1": action_list[0], "Action 2": action_list[1], "Action 3": action_list[2], "Action 4": action_list[3], "Action 5": action_list[4] } yield row_data def compute_table(manager): table = pd.DataFrame(get_rows(manager), columns=LABELS)[LABELS] return table
python
#!/usr/bin/env python #!vim:fileencoding=UTF-8 import subprocess jobid = ( ("sf_0002", "A_onlyAICG"), ("sf_0004", "A_onlyAICG"), ("sf_0009", "I_ELE_HIS0_P1all"), ("sf_0010", "I_ELE_HIS0_P1all"), ("sf_0011", "G_ELE_HIS0_noP"), ("sf_0012", "G_ELE_HIS0_noP"), ("sf_0015", "J_ELE_HIS0_P2act"), ("sf_0016", "J_ELE_HIS0_P2act"), ("sf_0017", "K_ELE_HIS0_P2all"), ("sf_0018", "K_ELE_HIS0_P2all"), ("sf_0020", "A_onlyAICG"), ("sf_0021", "A_onlyAICG"), ("sf_0022", "A_onlyAICG"), ("sf_0023", "G_ELE_HIS0_noP"), ("sf_0024", "G_ELE_HIS0_noP"), ("sf_0025", "G_ELE_HIS0_noP"), ("sf_0026", "K_ELE_HIS0_P2all"), ("sf_0027", "K_ELE_HIS0_P2all"), ("sf_0028", "K_ELE_HIS0_P2all"), ("sf_0029", "J_ELE_HIS0_P2act"), ("sf_0030", "J_ELE_HIS0_P2act"), ("sf_0031", "J_ELE_HIS0_P2act"), ("sf_0032", "I_ELE_HIS0_P1all"), ("sf_0033", "I_ELE_HIS0_P1all"), ("sf_0034", "I_ELE_HIS0_P1all"), ("sf_0035", "L"), ("sf_0036", "L"), ("sf_0037", "L"), ("sf_0038", "L"), ("sf_0039", "L"), ("sf_0040", "T"), ("sf_0041", "T"), ("sf_0042", "T"), ("sf_0043", "T"), ("sf_0044", "T"), ("sf_0045", "S"), ("sf_0046", "S"), ("sf_0047", "S"), ) pathroot = "/home/hori/mapk/cafemol/" for job in jobid: jobname = job[0] group = job[1] wd = pathroot + jobname cmdline = "20130702_3.py polar_f3.out " + jobname p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait() cmdline = "gnuplot ../hist_pol.gnu; gnuplot ../hist_pol_png.gnu" p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait() cmdline = "mv hist_pol.png ../../plot/%s/%s_hist_pol.png" % (group,jobname) p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait() cmdline = "mv hist_pol_1.png ../../plot/%s/%s_hist_pol_1.png" % (group,jobname) p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait() cmdline = "mv hist_pol_2.png ../../plot/%s/%s_hist_pol_2.png" % (group,jobname) p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait() cmdline = "mv hist_pol_3.png ../../plot/%s/%s_hist_pol_3.png" % (group,jobname) p = subprocess.Popen(cmdline, shell=True, cwd=wd) p.wait()
python
"""aubergine: create REST APIs using API-first approach.""" from setuptools import setup, find_packages CLASSIFIERS = [ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Topic :: Software Development'] with open('README.rst') as readme: LONG_DESCRIPTION = readme.read() setup( name='aubergine', license='MIT', description=__doc__, use_scm_version=True, long_description=LONG_DESCRIPTION, platforms=["Linux", "Unix"], setup_requires=['setuptools_scm'], install_requires=['nadia', 'falcon', 'ymlref'], tests_require=['pytest', 'pytest-mock'], author='Konrad Jałowiecki <[email protected]>', author_email='[email protected]', packages=find_packages(exclude=['tests', 'tests.*', 'examples']), keywords='openapi rest api' )
python
import json import os from datetime import datetime, timedelta import pytz import calculate_daily_payment_data import calculate_market_data import config from manage_transactions import get_first_transaction_timestamp from util import logging STORE_FINAL_DATA_GENERAL = '/terra-data/v2/final/general' log = logging.get_custom_logger(__name__, config.LOG_LEVEL) def final_data_general(): os.makedirs(STORE_FINAL_DATA_GENERAL, exist_ok=True) max_time = datetime.utcnow() max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC) stop_processing = False date_to_process = get_first_transaction_timestamp() # date_last_processed = _get_last_processed_date() # date_to_process = max(date_to_process, date_last_processed + timedelta(days=1)) log.debug('generate final data: general') if date_to_process >= max_time: return while not stop_processing: final_data = {} payment_data = calculate_daily_payment_data.get_data_for_date(date_to_process) file_path = os.path.join(STORE_FINAL_DATA_GENERAL, date_to_process.strftime('%Y-%m-%d') + '.json') if not os.path.isfile(file_path): for symbol in payment_data.keys(): final_data[symbol] = {} log.debug('creating final general data for ' + date_to_process.strftime('%Y-%m-%d')) # Amount of Coins # Velocity market_data = calculate_market_data.get_data(symbol, date_to_process) if not market_data: return final_data[symbol]['amount_of_coins'] = market_data['circulating_supply'] final_data[symbol]['velocity_m1'] = payment_data[symbol]['total_amount'] / market_data['circulating_supply'] if len(final_data.keys()) > 0: with open(file_path, 'w') as file: file.write(json.dumps(final_data)) date_to_process += timedelta(days=1) if date_to_process >= max_time: stop_processing = True
python
import re import requests ''' 爬取校花网视频基础版 ''' response = requests.get('http://www.xiaohuar.com/v/') # print(response.status_code) # print(response.content) # print(response.text) urls = re.findall(r'class="items".*?href="(.*?)"', response.text, re.S) #re.S 把文本信息转换成1行匹配 # print(urls) url = urls[2] result = requests.get(url) mp4_url = re.findall(r'id="media".*?src="(.*?)"', result.text, re.S)[0] video = requests.get(mp4_url) with open('./a.mp4', 'wb') as f: f.write(video.content)
python
# coding: utf-8 """ Uptrends API v4 This document describes Uptrends API version 4. This Swagger environment also lets you execute API methods directly. Please note that this is not a sandbox environment: these API methods operate directly on your actual Uptrends account. For more information, please visit https://www.uptrends.com/api. # noqa: E501 OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from uptrends.api_client import ApiClient class MonitorCheckApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def monitor_check_get_account_monitor_checks(self, **kwargs): # noqa: E501 """Returns all monitor check data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_account_monitor_checks(async_req=True) >>> result = thread.get() :param async_req bool :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_account_monitor_checks_with_http_info(**kwargs) # noqa: E501 else: (data) = self.monitor_check_get_account_monitor_checks_with_http_info(**kwargs) # noqa: E501 return data def monitor_check_get_account_monitor_checks_with_http_info(self, **kwargs): # noqa: E501 """Returns all monitor check data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_account_monitor_checks_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ all_params = ['error_level', 'cursor', 'sorting', 'take', 'start', 'end', 'preset_period'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_account_monitor_checks" % key ) params[key] = val del params['kwargs'] if 'take' in params and params['take'] > 100: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_account_monitor_checks`, must be a value less than or equal to `100`") # noqa: E501 if 'take' in params and params['take'] < 0: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_account_monitor_checks`, must be a value greater than or equal to `0`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'error_level' in params: query_params.append(('ErrorLevel', params['error_level'])) # noqa: E501 if 'cursor' in params: query_params.append(('Cursor', params['cursor'])) # noqa: E501 if 'sorting' in params: query_params.append(('Sorting', params['sorting'])) # noqa: E501 if 'take' in params: query_params.append(('Take', params['take'])) # noqa: E501 if 'start' in params: query_params.append(('Start', params['start'])) # noqa: E501 if 'end' in params: query_params.append(('End', params['end'])) # noqa: E501 if 'preset_period' in params: query_params.append(('PresetPeriod', params['preset_period'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='MonitorCheckResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_http_details(self, monitor_check_id, **kwargs): # noqa: E501 """Returns HTTP details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_http_details(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: HttpDetailsResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_http_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_http_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 return data def monitor_check_get_http_details_with_http_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns HTTP details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_http_details_with_http_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: HttpDetailsResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_http_details" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_http_details`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}/Http', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='HttpDetailsResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_monitor_check(self, monitor_guid, **kwargs): # noqa: E501 """Returns monitor check data for a specific monitor. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_monitor_check(monitor_guid, async_req=True) >>> result = thread.get() :param async_req bool :param str monitor_guid: The Guid of the monitor to get monitor checks for. (required) :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_monitor_check_with_http_info(monitor_guid, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_monitor_check_with_http_info(monitor_guid, **kwargs) # noqa: E501 return data def monitor_check_get_monitor_check_with_http_info(self, monitor_guid, **kwargs): # noqa: E501 """Returns monitor check data for a specific monitor. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_monitor_check_with_http_info(monitor_guid, async_req=True) >>> result = thread.get() :param async_req bool :param str monitor_guid: The Guid of the monitor to get monitor checks for. (required) :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_guid', 'error_level', 'cursor', 'sorting', 'take', 'start', 'end', 'preset_period'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_monitor_check" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_guid' is set if ('monitor_guid' not in params or params['monitor_guid'] is None): raise ValueError("Missing the required parameter `monitor_guid` when calling `monitor_check_get_monitor_check`") # noqa: E501 if 'take' in params and params['take'] > 100: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_monitor_check`, must be a value less than or equal to `100`") # noqa: E501 if 'take' in params and params['take'] < 0: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_monitor_check`, must be a value greater than or equal to `0`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_guid' in params: path_params['monitorGuid'] = params['monitor_guid'] # noqa: E501 query_params = [] if 'error_level' in params: query_params.append(('ErrorLevel', params['error_level'])) # noqa: E501 if 'cursor' in params: query_params.append(('Cursor', params['cursor'])) # noqa: E501 if 'sorting' in params: query_params.append(('Sorting', params['sorting'])) # noqa: E501 if 'take' in params: query_params.append(('Take', params['take'])) # noqa: E501 if 'start' in params: query_params.append(('Start', params['start'])) # noqa: E501 if 'end' in params: query_params.append(('End', params['end'])) # noqa: E501 if 'preset_period' in params: query_params.append(('PresetPeriod', params['preset_period'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/Monitor/{monitorGuid}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='MonitorCheckResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_monitor_group_data(self, monitor_group_guid, **kwargs): # noqa: E501 """Returns monitor check data for a specific monitor group. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_monitor_group_data(monitor_group_guid, async_req=True) >>> result = thread.get() :param async_req bool :param str monitor_group_guid: The Guid of the monitor group to get monitor checks for. (required) :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_monitor_group_data_with_http_info(monitor_group_guid, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_monitor_group_data_with_http_info(monitor_group_guid, **kwargs) # noqa: E501 return data def monitor_check_get_monitor_group_data_with_http_info(self, monitor_group_guid, **kwargs): # noqa: E501 """Returns monitor check data for a specific monitor group. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_monitor_group_data_with_http_info(monitor_group_guid, async_req=True) >>> result = thread.get() :param async_req bool :param str monitor_group_guid: The Guid of the monitor group to get monitor checks for. (required) :param str error_level: Error level filter that should be applied. (default = NoError and above) :param str cursor: A cursor value that should be used for traversing the dataset. :param str sorting: Sorting direction based on timestamp. :param int take: The number of records to return (Max value = 100) :param datetime start: The start of a custom period (can't be used together with the PresetPeriod parameter) :param datetime end: The end of a custom period :param str preset_period: The requested time period. :return: MonitorCheckResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_group_guid', 'error_level', 'cursor', 'sorting', 'take', 'start', 'end', 'preset_period'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_monitor_group_data" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_group_guid' is set if ('monitor_group_guid' not in params or params['monitor_group_guid'] is None): raise ValueError("Missing the required parameter `monitor_group_guid` when calling `monitor_check_get_monitor_group_data`") # noqa: E501 if 'take' in params and params['take'] > 100: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_monitor_group_data`, must be a value less than or equal to `100`") # noqa: E501 if 'take' in params and params['take'] < 0: # noqa: E501 raise ValueError("Invalid value for parameter `take` when calling `monitor_check_get_monitor_group_data`, must be a value greater than or equal to `0`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_group_guid' in params: path_params['monitorGroupGuid'] = params['monitor_group_guid'] # noqa: E501 query_params = [] if 'error_level' in params: query_params.append(('ErrorLevel', params['error_level'])) # noqa: E501 if 'cursor' in params: query_params.append(('Cursor', params['cursor'])) # noqa: E501 if 'sorting' in params: query_params.append(('Sorting', params['sorting'])) # noqa: E501 if 'take' in params: query_params.append(('Take', params['take'])) # noqa: E501 if 'start' in params: query_params.append(('Start', params['start'])) # noqa: E501 if 'end' in params: query_params.append(('End', params['end'])) # noqa: E501 if 'preset_period' in params: query_params.append(('PresetPeriod', params['preset_period'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/MonitorGroup/{monitorGroupGuid}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='MonitorCheckResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_multistep_details(self, monitor_check_id, **kwargs): # noqa: E501 """Returns Multi-Step API details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_multistep_details(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: MsaDetailsResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_multistep_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_multistep_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 return data def monitor_check_get_multistep_details_with_http_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns Multi-Step API details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_multistep_details_with_http_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: MsaDetailsResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_multistep_details" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_multistep_details`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}/MultiStepAPI', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='MsaDetailsResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_screenshots(self, monitor_check_id, screenshot_id, **kwargs): # noqa: E501 """Gets a specific screenshot for a specified monitor check # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_screenshots(monitor_check_id, screenshot_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the screenshot data for. (required) :param str screenshot_id: The screenshot Id of the screenshot to get. (required) :return: ScreenshotResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_screenshots_with_http_info(monitor_check_id, screenshot_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_screenshots_with_http_info(monitor_check_id, screenshot_id, **kwargs) # noqa: E501 return data def monitor_check_get_screenshots_with_http_info(self, monitor_check_id, screenshot_id, **kwargs): # noqa: E501 """Gets a specific screenshot for a specified monitor check # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_screenshots_with_http_info(monitor_check_id, screenshot_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the screenshot data for. (required) :param str screenshot_id: The screenshot Id of the screenshot to get. (required) :return: ScreenshotResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id', 'screenshot_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_screenshots" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_screenshots`") # noqa: E501 # verify the required parameter 'screenshot_id' is set if ('screenshot_id' not in params or params['screenshot_id'] is None): raise ValueError("Missing the required parameter `screenshot_id` when calling `monitor_check_get_screenshots`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 if 'screenshot_id' in params: path_params['screenshotId'] = params['screenshot_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}/Screenshot/{screenshotId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ScreenshotResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_single_monitor_check(self, monitor_check_id, **kwargs): # noqa: E501 """Returns a single monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_single_monitor_check(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The Id of the monitor check to get the data for. (required) :return: SingleMonitorCheckResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_single_monitor_check_with_http_info(monitor_check_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_single_monitor_check_with_http_info(monitor_check_id, **kwargs) # noqa: E501 return data def monitor_check_get_single_monitor_check_with_http_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns a single monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_single_monitor_check_with_http_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The Id of the monitor check to get the data for. (required) :return: SingleMonitorCheckResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_single_monitor_check" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_single_monitor_check`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SingleMonitorCheckResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_transaction_details(self, monitor_check_id, **kwargs): # noqa: E501 """Returns transaction step details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_transaction_details(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: TransactionDetailsResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_transaction_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_transaction_details_with_http_info(monitor_check_id, **kwargs) # noqa: E501 return data def monitor_check_get_transaction_details_with_http_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns transaction step details for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_transaction_details_with_http_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :return: TransactionDetailsResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_transaction_details" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_transaction_details`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}/Transaction', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TransactionDetailsResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def monitor_check_get_waterfall_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns waterfall information for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_waterfall_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :param int step: For transaction waterfalls only: the transaction step to get the waterfall for. :return: WaterfallResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.monitor_check_get_waterfall_info_with_http_info(monitor_check_id, **kwargs) # noqa: E501 else: (data) = self.monitor_check_get_waterfall_info_with_http_info(monitor_check_id, **kwargs) # noqa: E501 return data def monitor_check_get_waterfall_info_with_http_info(self, monitor_check_id, **kwargs): # noqa: E501 """Returns waterfall information for a monitor check. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.monitor_check_get_waterfall_info_with_http_info(monitor_check_id, async_req=True) >>> result = thread.get() :param async_req bool :param int monitor_check_id: The monitor check Id to get the detailed data for. (required) :param int step: For transaction waterfalls only: the transaction step to get the waterfall for. :return: WaterfallResponse If the method is called asynchronously, returns the request thread. """ all_params = ['monitor_check_id', 'step'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method monitor_check_get_waterfall_info" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'monitor_check_id' is set if ('monitor_check_id' not in params or params['monitor_check_id'] is None): raise ValueError("Missing the required parameter `monitor_check_id` when calling `monitor_check_get_waterfall_info`") # noqa: E501 collection_formats = {} path_params = {} if 'monitor_check_id' in params: path_params['monitorCheckId'] = params['monitor_check_id'] # noqa: E501 query_params = [] if 'step' in params: query_params.append(('step', params['step'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['basicauth'] # noqa: E501 return self.api_client.call_api( '/MonitorCheck/{monitorCheckId}/Waterfall', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='WaterfallResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
python
"""Compose new Django User models that follow best-practices for international names and authenticate via email instead of username.""" # This file: # 1. define directory as module # 2. set default app config # pylint: disable=invalid-name __version__ = "2.0a1" # https://docs.djangoproject.com/en/stable/ref/applications/#configuring-applications default_app_config = "improved_user.apps.ImprovedUserConfig" # pylint: enable=invalid-name
python
import utm as UTM import math import unittest class UTMTestCase(unittest.TestCase): def assert_utm_equal(self, a, b, precision=6): self.assertAlmostEqual(a[0], b[0], precision) self.assertAlmostEqual(a[1], b[1], precision) self.assertEqual(a[2], b[2]) self.assertEqual(a[3].upper(), b[3].upper()) def assert_latlon_equal(self, a, b, precision=5): self.assertAlmostEqual(a[0], b[0], precision) self.assertAlmostEqual(a[1], b[1], precision) class KnownValuesGRS80(UTMTestCase): # Known UTM values were projected from latitude and longitude values # using GeographicLib (onto GRS80 ellipsoid!). As this library has a # much higher series expansion and a different implementation we can # assume they are more accurate and use this as reference. known_values = [ # Aachen, Germany ( (50.77534556, 6.08388667), (294408.662941387, 5628897.512984829, 32, 'U'), {'northern': True}, ), # New York, USA ( (40.71435000, -74.00597000), (583959.959045332, 4507523.086854665, 18, 'T'), {'northern': True}, ), # Wellington, New Zealand ( (-41.28646000, 174.77623611), (313783.980049117, 5427057.313755062, 60, 'G'), {'northern': False}, ), # Capetown, South Africa ( (-33.92486889, 18.42405500), (261877.350976653, 6243185.700844696, 34, 'H'), {'northern': False}, ), # Mendoza, Argentina ( (-32.89018000, -68.84405000), (514586.227836383, 6360876.825073616, 19, 'h'), {'northern': False}, ), # Fairbanks, Alaska, USA ( (64.83777806, -147.71638889), (466013.322449279, 7190567.781669118, 6, 'W'), {'northern': True}, ), # Ben Nevis, Scotland, UK ( (56.79680000, -5.00601000), (377485.765670114, 6296561.854117111, 30, 'V'), {'northern': True}, ), # Latitude 84 ( (84, -5.00601), (476594.34011230164, 9328501.361833721, 30, 'X'), {'northern': True}, ), ] def test_from_latlon(self): '''from_latlon should give known result with known input''' for latlon, utm, _ in self.known_values: result = UTM.from_latlon(*latlon) self.assert_utm_equal(utm, result) def test_to_latlon(self): '''to_latlon should give known result with known input''' for latlon, utm, utm_kw in self.known_values: result = UTM.to_latlon(*utm) self.assert_latlon_equal(latlon, result) result = UTM.to_latlon(*utm[0:3], **utm_kw) self.assert_latlon_equal(latlon, result) def test_from_latlon_roundtrip(self): '''from_latlon look how good roundtrip fits''' for latlon, utm, utm_kw in self.known_values: utmr = UTM.from_latlon(*latlon) result = UTM.to_latlon(*utmr[0:3], **utm_kw) # we should get the same values as the initial input self.assert_latlon_equal(latlon, result, 5) def test_to_latlon_roundtrip(self): '''to_latlon look how good roundtrip fits''' for latlon, utm, utm_kw in self.known_values: latlonr = UTM.to_latlon(*utm) # disable strict lat/lon range check, because roundtrip # of "Latitude 84" is 84.00000000000011... which is outside range result = UTM.from_latlon(*latlonr, strict=False) # we should get the same values as the initial input self.assert_latlon_equal(utm, result, 5) class BadInput(UTMTestCase): def test_from_latlon_range_checks(self): '''from_latlon should fail with out-of-bounds input''' self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, -100, 0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, -80.1, 0) # test valid range for i in range(-8000, 8400 + 1): UTM.from_latlon(i / 100.0, 0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 84.1, 0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 100, 0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, -300) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, -180.1) # test valid range for i in range(-18000, 18000): UTM.from_latlon(0, i / 100.0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 180) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 180.1) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 300) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, -100, -300) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 100, -300) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, -100, 300) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 100, 300) # test range check for parameter force_zone_number UTM.from_latlon(0, 45, force_zone_number=1) UTM.from_latlon(0, 45, force_zone_number=60) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 45, force_zone_number=0) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 45, force_zone_number=-1) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 45, force_zone_number=61) # test range check for parameter strict # test out of range longitudes self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, -180.1, strict=False) UTM.from_latlon(0, -180, strict=False) UTM.from_latlon(0, 0, strict=False) UTM.from_latlon(0, 45, strict=False) UTM.from_latlon(0, 179.999, strict=False) self.assertRaises(UTM.OutOfRangeError, UTM.from_latlon, 0, 180, strict=False) # test out of range latitudes UTM.from_latlon(85.0, 0, strict=False) UTM.from_latlon(84.0, 0, strict=False) UTM.from_latlon(-80.0, 0, strict=False) UTM.from_latlon(-81.0, 0, strict=False) def test_to_latlon_range_checks(self): '''to_latlon should fail with out-of-bounds input''' # validate input self.assertRaises( ValueError, UTM.to_latlon, 500000, 100000, 32, 'U', northern=True) self.assertRaises( ValueError, UTM.to_latlon, 500000, 100000, 32, '') self.assert_latlon_equal((0.904730614584, 9.0), UTM.to_latlon(500000, 100000, 32, '', northern=True)) self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 100000, 32, 'UU') # test easting range self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 0, 5000000, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 99999, 5000000, 32, 'U') # valid range for i in range(100000, 999999 + 1, 1000): UTM.to_latlon(i, 5000000, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 1000000, 5000000, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 100000000000, 5000000, 32, 'U') # test northing range self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, -100000, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, -1, 32, 'U') # valid range for i in range(10, 10000000 + 1, 1000): UTM.to_latlon(500000, i, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 10000001, 32, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 50000000, 32, 'U') # test zone numbers self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, -1, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 0, 'U') # valid range for i in range(1, 60 + 1): UTM.to_latlon(500000, 5000000, i, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 61, 'U') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 1000, 'U') # test zone letters self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'A') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'B') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'I') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'O') # there are no zone numbers 32, 34 and 36 in X self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'X') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 34, 'X') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 36, 'X') # valid range for i in range(ord('C'), ord('X') + 1): i = chr(i) if i != 'I' and i != 'O': UTM.to_latlon(500000, 5000000, 31, i) self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'Y') self.assertRaises( UTM.OutOfRangeError, UTM.to_latlon, 500000, 5000000, 32, 'Z') class SpecialZones(unittest.TestCase): def assert_zone_equal(self, result, expected_number, expected_letter): self.assertEqual(result[2], expected_number) self.assertEqual(result[3].upper(), expected_letter.upper()) # test 31X, 33X, 35X, 37X def test_zones_X(self): # test lower left and upper left self.assert_zone_equal(UTM.from_latlon(72, 0), 31, 'X') self.assert_zone_equal(UTM.from_latlon(72, 9), 33, 'X') self.assert_zone_equal(UTM.from_latlon(72, 21), 35, 'X') self.assert_zone_equal(UTM.from_latlon(72, 33), 37, 'X') self.assert_zone_equal(UTM.from_latlon(72, 42), 38, 'X') self.assert_zone_equal(UTM.from_latlon(84, 0), 31, 'X') self.assert_zone_equal(UTM.from_latlon(84, 9), 33, 'X') self.assert_zone_equal(UTM.from_latlon(84, 21), 35, 'X') self.assert_zone_equal(UTM.from_latlon(84, 33), 37, 'X') self.assert_zone_equal(UTM.from_latlon(84, 42), 38, 'X') # test inside self.assert_zone_equal(UTM.from_latlon(72, 6), 31, 'X') self.assert_zone_equal(UTM.from_latlon(72, 12), 33, 'X') self.assert_zone_equal(UTM.from_latlon(72, 18), 33, 'X') self.assert_zone_equal(UTM.from_latlon(72, 24), 35, 'X') self.assert_zone_equal(UTM.from_latlon(72, 30), 35, 'X') self.assert_zone_equal(UTM.from_latlon(72, 36), 37, 'X') # test 31V and 32V def test_inside(self): # test 31V self.assert_zone_equal(UTM.from_latlon(56, 0), 31, 'V') self.assert_zone_equal(UTM.from_latlon(56, 2.999999), 31, 'V') # test 32V self.assert_zone_equal(UTM.from_latlon(56, 3), 32, 'V') self.assert_zone_equal(UTM.from_latlon(56, 6), 32, 'V') self.assert_zone_equal(UTM.from_latlon(56, 9), 32, 'V') self.assert_zone_equal(UTM.from_latlon(56, 11.999999), 32, 'V') self.assert_zone_equal(UTM.from_latlon(60, 3), 32, 'V') self.assert_zone_equal(UTM.from_latlon(60, 6), 32, 'V') self.assert_zone_equal(UTM.from_latlon(60, 9), 32, 'V') self.assert_zone_equal(UTM.from_latlon(60, 11.999999), 32, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 3), 32, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 6), 32, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 9), 32, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 11.999999), 32, 'V') def test_left_of(self): self.assert_zone_equal(UTM.from_latlon(55.999999, 2.999999), 31, 'U') self.assert_zone_equal(UTM.from_latlon(56, 2.999999), 31, 'V') self.assert_zone_equal(UTM.from_latlon(60, 2.999999), 31, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 2.999999), 31, 'V') self.assert_zone_equal(UTM.from_latlon(64, 2.999999), 31, 'W') def test_right_of(self): self.assert_zone_equal(UTM.from_latlon(55.999999, 12), 33, 'U') self.assert_zone_equal(UTM.from_latlon(56, 12), 33, 'V') self.assert_zone_equal(UTM.from_latlon(60, 12), 33, 'V') self.assert_zone_equal(UTM.from_latlon(63.999999, 12), 33, 'V') self.assert_zone_equal(UTM.from_latlon(64, 12), 33, 'W') def test_below(self): self.assert_zone_equal(UTM.from_latlon(55.999999, 3), 31, 'U') self.assert_zone_equal(UTM.from_latlon(55.999999, 6), 32, 'U') self.assert_zone_equal(UTM.from_latlon(55.999999, 9), 32, 'U') self.assert_zone_equal(UTM.from_latlon(55.999999, 11.999999), 32, 'U') self.assert_zone_equal(UTM.from_latlon(55.999999, 12), 33, 'U') def test_above(self): self.assert_zone_equal(UTM.from_latlon(64, 3), 31, 'W') self.assert_zone_equal(UTM.from_latlon(64, 6), 32, 'W') self.assert_zone_equal(UTM.from_latlon(64, 9), 32, 'W') self.assert_zone_equal(UTM.from_latlon(64, 11.999999), 32, 'W') self.assert_zone_equal(UTM.from_latlon(64, 12), 33, 'W') class TestProject(unittest.TestCase): def test_version(self): self.assertTrue(isinstance(UTM.__version__, str) and '.' in UTM.__version__) class TestForcingAntiMeridian(unittest.TestCase): def assert_equal_lon(self, result, expected_lon): _, lon = UTM.to_latlon(*result[:4], strict=False) self.assertAlmostEqual(lon, expected_lon, 4) def test_force_east(self): # Force point just west of anti-meridian to east zone 1 self.assert_equal_lon( UTM.from_latlon(0, 179.9, 1, 'N'), 179.9) def test_force_west(self): # Force point just east of anti-meridian to west zone 60 self.assert_equal_lon( UTM.from_latlon(0, -179.9, 60, 'N'), -179.9) class TestModAngle(unittest.TestCase): def test_modangle(self): # range: [-pi, pi) # lower bound self.assertAlmostEqual(UTM.mod_angle(-math.pi), -math.pi, 6) self.assertAlmostEqual(UTM.mod_angle(-math.pi + 0.1), -math.pi + 0.1, 6) self.assertAlmostEqual(UTM.mod_angle(-math.pi / 2.0), -math.pi / 2.0, 6) # upper bound self.assertAlmostEqual(UTM.mod_angle(math.pi / 2.0), math.pi / 2.0, 6) self.assertAlmostEqual(UTM.mod_angle(math.pi - 0.1), math.pi - 0.1, 6) self.assertAlmostEqual(UTM.mod_angle(math.pi - 0.00001), math.pi - 0.00001, 6) self.assertAlmostEqual(UTM.mod_angle(math.pi), -math.pi, 6) if __name__ == '__main__': unittest.main() # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
python
# Software License Agreement (Apache 2.0 License) # # Copyright (c) 2021, The Ohio State University # Center for Design and Manufacturing Excellence (CDME) # The Artificially Intelligent Manufacturing Systems Lab (AIMS) # All rights reserved. # # Author: Adam Exley from typing import Union import numpy as np from klampt import WorldModel from ..urdf import URDFReader class ForwardKinematics(): """Calculates the forward kinematics of the robot in the active URDF""" def __init__(self) -> None: self.load() def load(self): u_reader = URDFReader() self.world = WorldModel(u_reader.path) self.robot = self.world.robot(0) # Get link IDs link_ids = [self.robot.link(idx).getName() for idx in range(self.robot.numLinks())] # Get mapping self.link_map = {k:link_ids.index(k) for k in u_reader.mesh_names} self.link_idxs = [x for x in self.link_map.values()] def calc(self, p_in: Union[list, np.ndarray]): """Calculate mesh poses based on joint angles""" angs = np.zeros(self.robot.numLinks()) angs[self.link_idxs[1:]] = p_in # base link does not have angle # Set angles self.robot.setConfig(angs) poses = np.zeros((7,4,4)) # Get pose for idx,i in zip(self.link_idxs, range(len(self.link_idxs))): trans = self.robot.link(idx).getTransform() poses[i,3,3] = 1 poses[i,:3,3] = trans[1] poses[i,:3,:3] = np.reshape(trans[0],(3,3),'F') # Use Fortran mapping for reshape # Returns N x 4 x 4 array return poses
python
import os from pwn import * class tools(): def __init__(self, binary, crash): self.binary = binary self.crash = crash self.core_list = filter(lambda x:"core" in x, os.listdir('.')) self.core = self.core_list[0] def gdb(self, command): popen=os.popen('gdb '+self.binary+' '+self.core+' --batch -ex "'+command+'"') return popen.read() def ROPsearch(self, register): popen = os.popen('ROPgadget --binary '+self.binary+' |grep ": call '+register+'"|cut -d \' \' -f1') s = popen.read() if (s != ''): rop = p32(int(s,16)) else: popen = os.popen('ROPgadget --binary '+self.binary+' |grep ": jmp '+register+'"|cut -d \' \' -f1') s = popen.read() if (s != ''): rop = p32(int(s,16)) else: log.info('Can\'t find jmp|call '+register+'') rop = -1 return rop def get_data(self, size, addr): data = str() s = self.gdb('x /'+str(size)+'gx '+hex(addr)) i = size j = 1 while(i): aline = s.split(':\t')[j].split('\n')[0] if aline == '': break if(i>1): data += p64(int(aline.split('\t')[0],16)) data += p64(int(aline.split('\t')[1],16)) i -= 2 if(j <= size/2): j += 1 else: data += p64(int(aline,16)) i -= 1 return data
python
# -*- coding: utf-8 -*- """ Created on Sat Jun 19 10:36:38 2021 @author: mahdi """ import numpy as np from scipy.linalg import toeplitz import matplotlib.pyplot as plt from matplotlib import cm from matplotlib import rc from matplotlib.pyplot import figure import matplotlib.colors as mcolors import matplotlib as mpl from numpy import linalg as LA # %% Figure settings # figure(num=None, figsize=(8, 7), dpi=100, facecolor='w', edgecolor='k') # plt.rcParams['figure.figsize'] = (13, 9) plt.style.use(['default']) # plt.style.use('dracula.mplstyle') rc('font', **{'family': 'serif', 'serif': ['Times']}) font = {'size': 9} mpl.rc('font', **font) plt.rcParams['font.size'] = '9' plt.rcParams["font.family"] = "Times New Roman" # %% Functions def generate_A(n): """ Generate the A toeplitz matrix of input. Parameters ---------- n : int Length of the input data. Returns ------- A: numpy arra The toeplitz input matrix. """ # Bernouli sequence as input U = np.random.binomial(size=n, n=1, p=0.5) # U = np.arange(1, 6) for i in range(len(U)): if U[i] == 0: U[i] = -1 A = toeplitz(U) n_row = A.shape[1] for i in range(n_row): A[i+1:, i] = 0 A = np.transpose(A) return A * 10 def parameters_t(m): """ Generate the paramters vector. Parameters ---------- m : int length of the parameter. Returns ------- None. """ param_vec = np.zeros(m) for i in range(m-1): param_vec[i+1] = 0.3 * np.power(0.5, i) + 3 * i * np.power(0.8, i) return param_vec # %% data_length = 100 A = generate_A(data_length) theta_vec = parameters_t(data_length) fig, ax = plt.subplots() ax.stem(theta_vec) db_r = 15 # SRN in dB y_bar = A @ theta_vec sigma_2 = ((np.sum(np.power(y_bar, 2)))/len(y_bar))/np.power(10, db_r/10) sigma = np.sqrt(sigma_2) w = np.random.normal(0, sigma, len(y_bar)) y = y_bar + w # %% setting parameters m_steps = 10 # m in the paper, range of the maximum order searching n_trials = 3 # number of trials to average over alpha = 4 beta = 4 db_vec = np.arange(0, 20, 0.5) Zsm_upmat = np.zeros((m_steps, len(db_vec)), dtype=np.csingle) Zsm_lomat = np.zeros((m_steps, len(db_vec)), dtype=np.csingle) c = 0 # Zsm_mat[0, :] = np.transpose(db_vec) for db in db_vec: # db_temp = 10 sigma_2 = ((np.sum(np.power(y_bar, 2)))/len(y_bar))/(np.power(10, db/10)) sigma = np.sqrt(sigma_2) Xsm_vec = np.zeros((m_steps, n_trials), dtype=np.csingle) Jsm_vec = np.zeros((m_steps, 1), dtype=np.csingle) Zsm_upvec = np.zeros((m_steps, n_trials), dtype=np.csingle) Zsm_lovec = np.zeros((m_steps, n_trials), dtype=np.csingle) for m in range(n_trials): for i in range(m_steps): Asm = A[:, 0:i+1] theta_m = theta_vec[0:i+1] theta_hat = np.linalg.inv(Asm.transpose() @ Asm) @ Asm.transpose()\ @ y theta_hat.resize(len(y_bar)) # Asm_temp = np.hstack((Asm, np.zeros((data_length, # data_length-(i+1))))) y_hat = A @ theta_hat Xsm = (np.power(LA.norm((y - y_hat), 2), 2))/data_length Xsm_vec[i, m] = Xsm Jsm = np.power(LA.norm((theta_hat - theta_vec), 2), 2) Jsm_vec[i] = Jsm mw = (1 - ((i+1)/data_length)) * sigma_2 Ksm = (2 * alpha * sigma / np.sqrt(data_length)) * np.sqrt( np.power((alpha*sigma), 2) + Xsm - (mw/2) + 0.j) Usm = Xsm - mw + (2 * np.power((alpha * sigma), 2)/data_length)\ + Ksm Lsm = Xsm - mw + (2 * np.power((alpha * sigma), 2)/data_length)\ - Ksm Zsm_up = Usm + (i+1)/data_length * sigma_2 +\ beta * np.sqrt(2 * m) * sigma_2 / data_length Zsm_lo = Lsm + (i+1)/data_length * sigma_2 -\ beta * np.sqrt(2 * m) * sigma_2 / data_length Zsm_upvec[i, m] = Zsm_up Zsm_lovec[i, m] = Zsm_lo Xsm_mean = np.mean(Xsm_vec, axis=1) Zsm_upmean = np.mean(Zsm_upvec, axis=1) Zsm_lomean = np.mean(Zsm_lovec, axis=1) Zsm_upmat[:, c] = Zsm_upmean Zsm_lomat[:, c] = Zsm_lomean c = c+1
python
import warnings from sympy.testing.pytest import ( raises, warns, ignore_warnings, warns_deprecated_sympy, Failed, ) from sympy.utilities.exceptions import SymPyDeprecationWarning # Test callables def test_expected_exception_is_silent_callable(): def f(): raise ValueError() raises(ValueError, f) # Under pytest raises will raise Failed rather than AssertionError def test_lack_of_exception_triggers_AssertionError_callable(): try: raises(Exception, lambda: 1 + 1) assert False except Failed as e: assert "DID NOT RAISE" in str(e) def test_unexpected_exception_is_passed_through_callable(): def f(): raise ValueError("some error message") try: raises(TypeError, f) assert False except ValueError as e: assert str(e) == "some error message" # Test with statement def test_expected_exception_is_silent_with(): with raises(ValueError): raise ValueError() def test_lack_of_exception_triggers_AssertionError_with(): try: with raises(Exception): 1 + 1 assert False except Failed as e: assert "DID NOT RAISE" in str(e) def test_unexpected_exception_is_passed_through_with(): try: with raises(TypeError): raise ValueError("some error message") assert False except ValueError as e: assert str(e) == "some error message" # Now we can use raises() instead of try/catch # to test that a specific exception class is raised def test_second_argument_should_be_callable_or_string(): raises(TypeError, lambda: raises("irrelevant", 42)) def test_warns_catches_warning(): with warnings.catch_warnings(record=True) as w: with warns(UserWarning): warnings.warn("this is the warning message") assert len(w) == 0 def test_warns_raises_without_warning(): with raises(Failed): with warns(UserWarning): pass def test_warns_hides_other_warnings(): # This isn't ideal but it's what pytest's warns does: with warnings.catch_warnings(record=True) as w: with warns(UserWarning): warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) assert len(w) == 0 def test_warns_continues_after_warning(): with warnings.catch_warnings(record=True) as w: finished = False with warns(UserWarning): warnings.warn("this is the warning message") finished = True assert finished assert len(w) == 0 def test_warns_many_warnings(): # This isn't ideal but it's what pytest's warns does: with warnings.catch_warnings(record=True) as w: finished = False with warns(UserWarning): warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) warnings.warn("this is the other message", RuntimeWarning) finished = True assert finished assert len(w) == 0 def test_warns_match_matching(): with warnings.catch_warnings(record=True) as w: with warns(UserWarning, match="this is the warning message"): warnings.warn("this is the warning message", UserWarning) assert len(w) == 0 def test_warns_match_non_matching(): with warnings.catch_warnings(record=True) as w: with raises(Failed): with warns(UserWarning, match="this is the warning message"): warnings.warn("this is not the expected warning message", UserWarning) assert len(w) == 0 def _warn_sympy_deprecation(): SymPyDeprecationWarning( feature="foo", useinstead="bar", issue=1, deprecated_since_version="0.0.0" ).warn() def test_warns_deprecated_sympy_catches_warning(): with warnings.catch_warnings(record=True) as w: with warns_deprecated_sympy(): _warn_sympy_deprecation() assert len(w) == 0 def test_warns_deprecated_sympy_raises_without_warning(): with raises(Failed): with warns_deprecated_sympy(): pass def test_warns_deprecated_sympy_hides_other_warnings(): # This isn't ideal but it's what pytest's deprecated_call does: with warnings.catch_warnings(record=True) as w: with warns_deprecated_sympy(): _warn_sympy_deprecation() warnings.warn("this is the other message", RuntimeWarning) assert len(w) == 0 def test_warns_deprecated_sympy_continues_after_warning(): with warnings.catch_warnings(record=True) as w: finished = False with warns_deprecated_sympy(): _warn_sympy_deprecation() finished = True assert finished assert len(w) == 0 def test_warns_deprecated_sympy_many_warnings(): # This isn't ideal but it's what pytest's warns_deprecated_sympy does: with warnings.catch_warnings(record=True) as w: finished = False with warns_deprecated_sympy(): _warn_sympy_deprecation() warnings.warn("this is the other message", RuntimeWarning) _warn_sympy_deprecation() warnings.warn("this is the other message", RuntimeWarning) warnings.warn("this is the other message", RuntimeWarning) finished = True assert finished assert len(w) == 0 def test_ignore_ignores_warning(): with warnings.catch_warnings(record=True) as w: with ignore_warnings(UserWarning): warnings.warn("this is the warning message") assert len(w) == 0 def test_ignore_does_not_raise_without_warning(): with warnings.catch_warnings(record=True) as w: with ignore_warnings(UserWarning): pass assert len(w) == 0 def test_ignore_allows_other_warnings(): with warnings.catch_warnings(record=True) as w: # This is needed when pytest is run as -Werror # the setting is reverted at the end of the catch_Warnings block. warnings.simplefilter("always") with ignore_warnings(UserWarning): warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) assert len(w) == 1 assert isinstance(w[0].message, RuntimeWarning) assert str(w[0].message) == "this is the other message" def test_ignore_continues_after_warning(): with warnings.catch_warnings(record=True) as w: finished = False with ignore_warnings(UserWarning): warnings.warn("this is the warning message") finished = True assert finished assert len(w) == 0 def test_ignore_many_warnings(): with warnings.catch_warnings(record=True) as w: # This is needed when pytest is run as -Werror # the setting is reverted at the end of the catch_Warnings block. warnings.simplefilter("always") with ignore_warnings(UserWarning): warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) warnings.warn("this is the warning message", UserWarning) warnings.warn("this is the other message", RuntimeWarning) warnings.warn("this is the other message", RuntimeWarning) assert len(w) == 3 for wi in w: assert isinstance(wi.message, RuntimeWarning) assert str(wi.message) == "this is the other message"
python
# Verhalten sich wie 'Mengen' aus der Mathematik # Werte müssen einmalig sein # Kann verwendet werden um Daten aus einer Liste mit doppelungen einmalig zu machen # Wird oft für das Nachschlagen von Werten verwendet, da sets schneller arbeiten als Listen # sets können bei bedarf wachsen und schrumpfen # leere Instanz erzeugen # hier ein set mit vokalen # So kann man schnell ein neues Set mit einzelnen Buchstaben aus einem String erzeugen # Differenzmenge (difference) # Schnittmenge (intersection) # Vereinigungsmenge (union)
python
# convert2.py # A program to convert Celsius tempts to Fahrenheit # This version issues heat and cold warnings. def main(): celsius = float(input("What is the Celsius temperature?")) fahrenheit = 9/5 * celsius + 32 print("The temperature is", fahrenheit, "degrees fahrenheit.") # Print warnings for extreme temps if fahrenheit > 90: print("It's really hot out there. Be careful!") if fahrenheit < 30: print("Brrrrr. Be sure to dress warmly!") main()
python
import pickle import os from pprint import pprint with open('data.pk', 'rb') as f: data = pickle.load(f) data.reset_index(inplace=True, drop=True) user_list = set(data['name']) authors = data.groupby('name') # pprint(authors.groups) # print(type(authors.groups)) authors_list = {} for user, index in authors.groups.items(): user = user.replace('/',' ').split(' ')[1] # parse nick_name from full_name if user is '': continue # print(user) # authors_list[user.split('/')[0].replace(' ','')] = list(index) authors_list[user]=list(index) # print(authors_list) content_count = {} existed_user = [] if os.path.exists('user.txt'): with open('user.txt', 'r', encoding='utf-8') as fp: # open and read previus user while True: user = fp.readline() if not user: break existed_user.append(user.replace('\n','')) for user in authors_list.keys(): # if user in deleted_user or user is '': if user is '': continue else: content_count[user] = len(authors_list[user]) res = sorted(content_count.items(), key=(lambda x:x[1]), reverse=True) last = 0 last_rank = 0 last_cnt = 0 joint_rank_cnt = 0 for i, content in enumerate(res): content = list(content) if content[1] == last_cnt: print(str(last_rank) +'위', content[0], str(content[1]) + '회') joint_rank_cnt+=1 else: last_rank+=joint_rank_cnt+1 joint_rank_cnt = 0 last_cnt = content[1] print(str(last_rank) +'위 ', content[0], str(content[1])+'회') last = last_rank for user in existed_user: if user not in authors_list.keys(): # If there's someone who didn't say a word print(str(last+1+joint_rank_cnt)+'위 ', user, '0회') # print(data['name'][5202]) with open('user.txt','w',encoding='utf-8') as fp: # save user_list to user.txt for user in authors_list.keys(): fp.write(user+'\n') for user in existed_user: if user not in authors_list.keys(): fp.write(user+'\n')
python
#Faça um programa que leia um número de 0 a 9999 e mostre na tela cada um dos dígitos separados. '''num = str(input('Digite um número de 0 a 9999: ')) print( 'O número: {} está dividido entre as casas:\n' 'unidade: {}\n' 'dezena: {}\n' 'centena: {}\n' 'milhar: {}\n'.format(num, num[3], num[2], num[1], num[0]) ) ''' num = int(input('Digite um número de 0 a 9999: ')) n = int(num) u = n // 1 % 10 d = n // 10 % 10 c = n // 100 % 10 m = n // 1000 % 10 print( '\033[4;35mO número: \033[31m{}\033[4;35m está dividido entre as casas:\033[m\n' 'unidade: \033[31m{}\033[m\n' 'dezena: \033[31m{}\033[m\n' 'centena: \033[31m{}\033[m\n' 'milhar: \033[31m{}\033[m\n'.format(num, u, d, c, m) )
python
from .vault import kubeconfig_context_entry def test_kubeconfig_context_entry_minikube(): mock_context_entry = { 'name': 'minikube', 'context': { 'cluster': 'minikube-cluster', 'user': 'minikube-user', } } assert kubeconfig_context_entry('minikube') == mock_context_entry
python
from dagster import repository from simple_lakehouse.pipelines import simple_lakehouse_pipeline @repository def simple_lakehouse(): return [simple_lakehouse_pipeline]
python
# -*- coding: UTF-8 -*- __license__=""" Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) This software is dual-licenced under the Apache 2.0 and the 2-clauses BSD license. For details, see license.txt """ __version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' __doc__='Dictionary files'
python
from __future__ import absolute_import from __future__ import print_function from keras.datasets import stock_one from keras.models import Sequential from keras.layers.core import Dense, TimeDistributedDense, Dropout, Activation, Merge from keras.regularizers import l2, l1 from keras.constraints import maxnorm from keras.optimizers import SGD, Adam, RMSprop from keras.layers.embeddings import Embedding from keras.layers.recurrent import LSTM, GRU from keras.utils import np_utils from keras.objectives import to_categorical, categorical_crossentropy from keras.datasets import tianchi import numpy as np batch_size = 128 nb_epoch = 2000 norm = 'minmax' hidden_units = 256 step = 1 nb_sample = 100 test_days = 30 n = 2 train_days = 427 - test_days*n tg=-1 train_split = 0.8 features= [0,1] np.random.seed(1337) # for reproducibility def load_data(sz, train_split, norm, step, features): # the data, shuffled and split between tran and test sets (X, Y, mins, maxs) = tianchi.load_data(csv_path='/home/zhaowuxia/dl_tools/datasets/tianchi/total_itp_pca2.csv', norm = norm, sz = sz, maxlen = None, step=step, reverse=False) print(X.shape, Y.shape) sz = X.shape[0] train_sz = max(1, int(sz * train_split)) X_train = X[:train_sz, :, features] y_train = Y[:train_sz, :, features] X_test = X[train_sz:, :, features] y_test = Y[train_sz:, :, features] print(X_train.shape, y_train.shape) print(X_test.shape, y_test.shape) mins = mins[:, features] maxs = maxs[:, features] print(np.fabs(y_train - X_train).mean(), np.fabs(y_test - X_test).mean()) return (X_train, y_train, X_test, y_test, mins, maxs) def build_model(): model = Sequential() #model.add(Embedding(bins, 256)) model.add(LSTM(input_dim=len(features), output_dim=hidden_units,init='glorot_normal', return_sequences=True, truncate_gradient=tg)) #model.add(LSTM(input_dim=hidden_units, output_dim=hidden_units,init='glorot_normal', return_sequences=True, truncate_gradient=tg)) #model.add(Dropout(0.5)) #model.add(LSTM(input_dim=hidden_units, output_dim=hidden_units,init='glorot_normal', return_sequences=True, truncate_gradient=tg)) #model.add(Dropout(0.5)) model.add(TimeDistributedDense(hidden_units, len(features))) #model.add(Activation('relu')) #sgd=SGD(lr=1e-3, momentum=0.95, nesterov=True, clipnorm=5.0) #rms = RMSprop(clipnorm=5.0) model.compile(loss='mae', optimizer='adam') return model def write_csv(save_path, gnd, pred): # gnd: [T, 1] # pred: [T, 1] T = pred.shape[0] with open(save_path, 'w') as f: for j in range(len(features)): f.write('pred,gnd,') f.write('\n') for i in range(T): if i >= len(gnd): for j in range(len(features)): f.write('%.4f,0,'%pred[i][j]) f.write('\n') else: for j in range(len(features)): f.write('%.4f,%.4f,'%(pred[i][j], gnd[i][j])) f.write('\n') def write_ans(save_path, pred): print(pred.shape) T = pred.shape[0] with open(save_path, 'w') as f: for i in range(T): f.write('201409%02d,%d,%d\n'%(i+1, pred[i][0], pred[i][1])) def recurrent_predict(model, x_history, pred_step, return_sequences=True): # x_history : [nb_sample, T, 1] # pred_step : int print('Predicting...') print(x_history.shape, pred_step) T = x_history.shape[1] nb_samples = x_history.shape[0] x = np.zeros([nb_samples, T+pred_step, len(features)]) x[:, :T] = x_history y = [] for i in range(pred_step): if i > 0 and i % 100 == 0: print('%d steps finishes'%i) y=model.predict(x[:, :T+i, :], verbose=0) if return_sequences: x[:, T+i, :] = y[:, T+i-1, :] else: x[:, T+i, :] = y.reshape(x[:, T+i, :].shape) if return_sequences: x[:, 1:T, :] = y[:, :T-1, :] print('Finish predicting') return x def compute_loss(gnd, pred): # gnd: [T, k] # pred: [T, k] error = np.fabs(gnd-pred)/gnd mean_error = error.mean(0) for i in mean_error: print('%.4f'%i) return mean_error if __name__=='__main__': (X_train, y_train, X_test, y_test, mins, maxs) = load_data(nb_sample, train_split, norm, step, features) X = X_test.copy().mean(0, keepdims=True) y = y_test.copy().mean(0, keepdims=True) X_train = X_train[:, :train_days] y_train = y_train[:, :train_days] X_test = X_test[:, :train_days+test_days] y_test = y_test[:, :train_days+test_days] print(X_train.shape, y_train.shape) print(X_test.shape, y_test.shape) #write_csv('csv2/train2_1lstm_sz%d.csv'%(nb_sample), X[0], X[0]) model = build_model() #model.load_weights('models2/2fea/train2_1lstm%d_model_mae_sz%d_%d'%(hidden_units, nb_sample, train_days)) model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, validation_data=(X_test, y_test), save_path='models2/2fea/train2_1lstm%d_model_mae_sz%d_%d'%(hidden_units, nb_sample, train_days)) model.save_weights('models2/2fea/train2_1lstm%d_model_mae_sz%d_%d_final'%(hidden_units, nb_sample, train_days), overwrite=True) model.load_weights('models2/2fea/train2_1lstm%d_model_mae_sz%d_%d'%(hidden_units, nb_sample, train_days)) score = model.evaluate(X, y, batch_size=batch_size) print('Test score:', score) gnd = np.concatenate((X, y[:,-1:,:]), axis=1).mean(0, keepdims=True) gndo = (gnd[0]+1)/2*(maxs-mins)+mins pred1 = recurrent_predict(model, X[:, :train_days+test_days], 2*test_days, return_sequences=True) write_csv('csv2/2fea/train2_1lstm%d_mae_%d_%d_%d.csv'%(hidden_units, nb_sample, train_days+test_days, 2*test_days), gnd[0], pred1[0]) pred2 = recurrent_predict(model, X[:, :train_days/2], train_days/2+2*test_days, return_sequences=True) write_csv('csv2/2fea/train2_1lstm%d_mae_%d_%d_%d.csv'%(hidden_units, nb_sample, train_days/2, train_days/2+2*test_days), gnd[0], pred2[0]) for step in range(0, test_days*(n-1), test_days): pred = recurrent_predict(model, gnd[:, step:train_days+test_days+step], test_days, return_sequences=True) pred = (pred[0]+1)/2*(maxs-mins)+mins error1 = compute_loss(gndo[train_days+test_days+step:train_days+2*test_days+step], gndo[train_days+step:train_days+test_days+step]) error2 = compute_loss(gndo[train_days+test_days+step:train_days+2*test_days+step], pred[train_days+test_days:train_days+2*test_days]) print('move step = ', step, ': T-1 loss = ', error1, ', rel error = ', error2)
python
import numpy as np import math from scipy.optimize import linear_sum_assignment from contourMergeTrees_helpers import * def branchMappingDistance(nodes1,topo1,rootID1,nodes2,topo2,rootID2,editCost,traceback=False): memT = dict() #=================================================================== # Recursive helper function that computes edit distance between two subtrees rooted in (parent1,curr1),(parent2,curr2) def editDistance_branch(curr1,parent1,curr2,parent2): #=============================================================================== # if both trees are empty, return 0 if(curr1<0 and curr2<0): return 0 #=============================================================================== # If first tree empty, delete entire second subtree if(curr1<0): if((curr1,parent1,curr2,parent2) not in memT): #----------------------------------------------------------------------- # If second subtree has only one branch, return deletion cost of this branch if(len(topo2[curr2])==0): memT[(curr1,parent1,curr2,parent2)] = editCost(None,None,nodes2[curr2],nodes2[parent2]) #----------------------------------------------------------------------- # If second subtree has more than one branch, try all decompositions else: c = float("inf") for child2_mb in topo2[curr2]: c_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue c_ += editDistance_branch(curr1,parent1,child2,curr2) c = min(c,c_) memT[(curr1,parent1,curr2,parent2)] = c return memT[(curr1,parent1,curr2,parent2)] #=============================================================================== # If second tree empty, delete entire first subtree if(curr2<0): if((curr1,parent1,curr2,parent2) not in memT): #----------------------------------------------------------------------- # If first subtree has only one branch, return deletion cost of this branch if(len(topo1[curr1])==0): memT[(curr1,parent1,curr2,parent2)] = editCost(nodes1[curr1],nodes1[parent1],None,None) #----------------------------------------------------------------------- # If first subtree has more than one branch, try all decompositions else: c = float("inf") for child1_mb in topo1[curr1]: c_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue c_ += editDistance_branch(child1,curr1,curr2,parent2) c = min(c,c_) memT[(curr1,parent1,curr2,parent2)] = c return memT[(curr1,parent1,curr2,parent2)] #=============================================================================== # If both trees not empty, find optimal edit operation if((curr1,parent1,curr2,parent2) not in memT): #--------------------------------------------------------------------------- # If both trees only have one branch, return edit cost between the two branches if(len(topo1[curr1])==0 and len(topo2[curr2])==0): memT[(curr1,parent1,curr2,parent2)] = editCost(nodes1[curr1],nodes1[parent1],nodes2[curr2],nodes2[parent2]) #--------------------------------------------------------------------------- # If first tree only has one branch, try all decompositions of second tree elif(len(topo1[curr1])==0): d = float("inf") for child2_mb in topo2[curr2]: d_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue d_ += editDistance_branch(-1,-1,child2,curr2) d = min(d,d_) memT[(curr1,parent1,curr2,parent2)] = d #--------------------------------------------------------------------------- # If second tree only has one branch, try all decompositions of first tree elif(len(topo2[curr2])==0): d = float("inf") for child1_mb in topo1[curr1]: d_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue d_ += editDistance_branch(child1,curr1,-1,-1) d = min(d,d_) memT[(curr1,parent1,curr2,parent2)] = d #--------------------------------------------------------------------------- # If both trees have more than one branch, try all decompositions of both trees else: d = float("inf") #----------------------------------------------------------------------- # Try all possible main branches of first tree (child1_mb) and all possible main branches of second tree (child2_mb) # Then try all possible matchings of subtrees # Special case of binary trees is treated differently for performance if(len(topo1[curr1])==2 and len(topo2[curr2])==2): child11 = topo1[curr1][0] child12 = topo1[curr1][1] child21 = topo2[curr2][0] child22 = topo2[curr2][1] d = min(d,editDistance_branch(child11,parent1,child21,parent2) + editDistance_branch(child12,curr1,child22,curr2)) d = min(d,editDistance_branch(child12,parent1,child22,parent2) + editDistance_branch(child11,curr1,child21,curr2)) d = min(d,editDistance_branch(child11,parent1,child22,parent2) + editDistance_branch(child12,curr1,child21,curr2)) d = min(d,editDistance_branch(child12,parent1,child21,parent2) + editDistance_branch(child11,curr1,child22,curr2)) # For non-binary trees use compute distance through maximum matching else: for child1_mb in topo1[curr1]: topo1_ = topo1[curr1].copy() topo1_.remove(child1_mb) for child2_mb in topo2[curr2]: d_ = editDistance_branch(child1_mb,parent1,child2_mb,parent2) topo2_ = topo2[curr2].copy() topo2_.remove(child2_mb) deg = max(len(topo1_),len(topo2_)) matchMatrix = np.zeros((deg,deg)) for i in range(deg): child1 = topo1_[i] if i<len(topo1_) else -1 for j in range(deg): child2 = topo2_[j] if j<len(topo2_) else -1 matchMatrix[i,j] = editDistance_branch(child1,curr1,child2,curr2) row_ind, col_ind = linear_sum_assignment(matchMatrix) d_ += matchMatrix[row_ind, col_ind].sum() d = min(d,d_) #----------------------------------------------------------------------- # Try to continue main branch on one child of first tree and delete all other subtrees # Then match continued branch to current branch in second tree for child1_mb in topo1[curr1]: d_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1 == child1_mb): continue d_ += editDistance_branch(child1,curr1,-1,-1) d = min(d,d_) #----------------------------------------------------------------------- # Try to continue main branch on one child of second tree and delete all other subtrees # Then match continued branch to current branch in first tree for child2_mb in topo2[curr2]: d_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2 == child2_mb): continue d_ += editDistance_branch(-1,-1,child2,curr2) d = min(d,d_) memT[(curr1,parent1,curr2,parent2)] = d return memT[(curr1,parent1,curr2,parent2)] #=================================================================== # Recursive helper function that computes the optimal edit mapping between two subtrees rooted in (parent1,curr1),(parent2,curr2) given the memoization table from distance computation def editDistance_branch_traceback(curr1,parent1,curr2,parent2): #=============================================================================== # base case if(curr1<0 and curr2<0): return [] #=============================================================================== # base case (first tree null) if(curr1<0): if(len(topo2[curr2])==0): return [((-1,-1),(curr2,parent2))] else: c = memT[(curr1,parent1,curr2,parent2)] for child2_mb in topo2[curr2]: c_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue c_ += editDistance_branch(curr1,parent1,child2,curr2) if(c==c_): match = editDistance_branch_traceback(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue match += editDistance_branch_traceback(curr1,parent1,child2,curr2) return match #=============================================================================== # base case (second tree null) if(curr2<0): if(len(topo1[curr1])==0): return [((curr1,parent1),(-1,-1))] else: c = memT[(curr1,parent1,curr2,parent2)] for child1_mb in topo1[curr1]: c_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue c_ += editDistance_branch(child1,curr1,curr2,parent2) if(c==c_): match = editDistance_branch_traceback(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue match += editDistance_branch_traceback(child1,curr1,curr2,parent2) return match #=============================================================================== # both trees not null #------------------------------------------------ # both trees leaves if(len(topo1[curr1])==0 and len(topo2[curr2])==0): #print((curr1,parent1)," ",(curr2,parent2)) return [((curr1,parent1),(curr2,parent2))] #------------------------------------------------ # first tree leave elif(len(topo1[curr1])==0): d = memT[(curr1,parent1,curr2,parent2)] for child2_mb in topo2[curr2]: d_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue d_ += editDistance_branch(-1,-1,child2,curr2) if(d==d_): match = editDistance_branch_traceback(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2==child2_mb): continue match += editDistance_branch_traceback(-1,-1,child2,curr2) return match #------------------------------------------------ # second tree leave elif(len(topo2[curr2])==0): d = memT[(curr1,parent1,curr2,parent2)] for child1_mb in topo1[curr1]: d_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue d_ += editDistance_branch(child1,curr1,-1,-1) if(d==d_): match = editDistance_branch_traceback(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1==child1_mb): continue match += editDistance_branch_traceback(child1,curr1,-1,-1) return match #------------------------------------------------ # both trees inner nodes else: d = memT[(curr1,parent1,curr2,parent2)] if(len(topo1[curr1])==2 and len(topo2[curr2])==2): child11 = topo1[curr1][0] child12 = topo1[curr1][1] child21 = topo2[curr2][0] child22 = topo2[curr2][1] if(d == editDistance_branch(child11,parent1,child21,parent2) + editDistance_branch(child12,curr1,child22,curr2)): return editDistance_branch_traceback(child11,parent1,child21,parent2) + editDistance_branch_traceback(child12,curr1,child22,curr2) if(d == editDistance_branch(child12,parent1,child22,parent2) + editDistance_branch(child11,curr1,child21,curr2)): return editDistance_branch_traceback(child12,parent1,child22,parent2) + editDistance_branch_traceback(child11,curr1,child21,curr2) if(d == editDistance_branch(child11,parent1,child22,parent2) + editDistance_branch(child12,curr1,child21,curr2)): return editDistance_branch_traceback(child11,parent1,child22,parent2) + editDistance_branch_traceback(child12,curr1,child21,curr2) if(d == editDistance_branch(child12,parent1,child21,parent2) + editDistance_branch(child11,curr1,child22,curr2)): return editDistance_branch_traceback(child12,parent1,child21,parent2) + editDistance_branch_traceback(child11,curr1,child22,curr2) else: for child1_mb in topo1[curr1]: topo1_ = topo1[curr1].copy() topo1_.remove(child1_mb) for child2_mb in topo2[curr2]: d_ = editDistance_branch(child1_mb,parent1,child2_mb,parent2) topo2_ = topo2[curr2].copy() topo2_.remove(child2_mb) deg = max(len(topo1_),len(topo2_)) matchMatrix = np.zeros((deg,deg)) for i in range(deg): child1 = topo1_[i] if i<len(topo1_) else -1 for j in range(deg): child2 = topo2_[j] if j<len(topo2_) else -1 matchMatrix[i,j] = editDistance_branch(child1,curr1,child2,curr2) row_ind, col_ind = linear_sum_assignment(matchMatrix) d_ += matchMatrix[row_ind, col_ind].sum() if(d == d_): match = editDistance_branch_traceback(child1_mb,parent1,child2_mb,parent2) for i in range(len(row_ind)): child1 = topo1_[row_ind[i]] if row_ind[i]<len(topo1_) else -1 child2 = topo2_[col_ind[i]] if col_ind[i]<len(topo2_) else -1 match += editDistance_branch_traceback(child1,curr1,child2,curr2) return match for child1_mb in topo1[curr1]: d_ = editDistance_branch(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1 == child1_mb): continue d_ += editDistance_branch(child1,curr1,-1,-1) if(d==d_): match_ = editDistance_branch_traceback(child1_mb,parent1,curr2,parent2) for child1 in topo1[curr1]: if(child1 == child1_mb): continue match_ += editDistance_branch_traceback(child1,curr1,-1,-1) return match_ for child2_mb in topo2[curr2]: d_ = editDistance_branch(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2 == child2_mb): continue d_ += editDistance_branch(-1,-1,child2,curr2) if(d==d_): match_ = editDistance_branch_traceback(curr1,parent1,child2_mb,parent2) for child2 in topo2[curr2]: if(child2 == child2_mb): continue match_ += editDistance_branch_traceback(-1,-1,child2,curr2) return match_ #=================================================================== # if traceback flag set, return distance and mapping, otherwise only distance if(traceback): return editDistance_branch(topo1[rootID1][0],rootID1,topo2[rootID2][0],rootID2),editDistance_branch_traceback(topo1[rootID1][0],rootID1,topo2[rootID2][0],rootID2) else: return editDistance_branch(topo1[rootID1][0],rootID1,topo2[rootID2][0],rootID2)
python
from django.contrib import admin from .models import Customer, User admin.site.register(Customer) admin.site.register(User)
python
import numpy as np from models.robots.robot import MujocoRobot from utils.mjcf_utils import xml_path_completion class Sawyer(MujocoRobot): """ Sawyer is a witty single-arm robot designed by Rethink Robotics. """ def __init__( self, pos=[0, 0, 0.913], rot=[0, 0, 0], xml_path="robots/sawyer/robot.xml" ): super().__init__(xml_path_completion(xml_path)) self._setup_base_pose(pos, rot) @property def bottom_offset(self): return np.array([0, 0, 0]) @property def dof(self): return 7 @property def bodies(self): return ["sawyer_link_{}".format(x) for x in range(1, 8)] @property def joints(self): return ["sawyer_joint_{}".format(x) for x in range(1, 8)] @property def actuators(self): return ["sawyer_torq_j{}".format(x) for x in range(1, 8)] @property def contact_geoms(self): return ["sawyer_link_{}_collision".format(x) for x in range(8)] @property def visual_geoms(self): return ["sawyer_link_{}_visual".format(x) for x in range(8)] @property def init_qpos(self): return np.array([0, 0, -1.18, 0.00, 2.18, 0.00, 0.57, -1.57]) @property def base_name(self): return 'sawyer_base' @property def eef_name(self): return "sawyer_right_hand"
python
from tool.runners.python import SubmissionPy from collections import defaultdict import operator class JulesSubmission(SubmissionPy): def run(self, s): def find_nearest(points, x, y): min_distance = 1000 curr_nearest_point = -1 number_having_min_distance = 0 for point in points: distance = abs(x - point[0]) + abs(y - point[1]) if distance == min_distance: number_having_min_distance += 1 if distance < min_distance: min_distance = distance number_having_min_distance = 1 curr_nearest_point = points.index(point) assert curr_nearest_point != -1 return curr_nearest_point, number_having_min_distance def calculate_grid(offset): grid = defaultdict(int) for x in range(min_x - offset, max_x + offset): for y in range(min_y - offset, max_y + offset): point, number_having_min_distance = find_nearest(points, x, y) if number_having_min_distance == 1: grid[point] += 1 return grid points = [] for line in s.split('\n'): x, y = line.split(', ') points.append([int(x), int(y)]) x_list = [x[0] for x in points] y_list = [x[1] for x in points] min_x, max_x = min(x_list), max(x_list) min_y, max_y = min(y_list), max(y_list) offset = 1 grid = calculate_grid(offset) grid2 = calculate_grid(offset + 1) x = max_x y = max_y old_score = 0 score = 1 while old_score != score: old_score = score score_grid = 0 score_grid2 = 1 while score_grid != score_grid2: id_point = max(grid.items(), key=operator.itemgetter(1))[0] score_grid = grid[id_point] score_grid2 = grid2[id_point] del grid[id_point] x, y = points[id_point] return score_grid
python
from django.contrib import messages from django.shortcuts import render, get_object_or_404, redirect from applications.filetracking.models import File, Tracking from applications.ps1.models import IndentFile,StockEntry from applications.globals.models import ExtraInfo, HoldsDesignation, Designation from django.template.defaulttags import csrf_token from django.http import HttpResponse, HttpResponseRedirect, JsonResponse from django.contrib.auth.decorators import login_required from django.db import IntegrityError from django.core import serializers from django.contrib.auth.models import User from timeit import default_timer as time from notification.views import office_module_notif @login_required(login_url = "/accounts/login/") def ps1(request): """ The function is used to create indents by faculty. It adds the indent datails to the indet_table of Purchase and Store module @param: request - trivial. @variables: uploader - Employee who creates file. subject - Title of the file. description - Description of the file. upload_file - Attachment uploaded while creating file. file - The file object. extrainfo - The Extrainfo object. holdsdesignations - The HoldsDesignation object. context - Holds data needed to make necessary changes in the template. item_name- Name of the item to be procured quantity - Qunat of the item to be procured present_stock=request.POST.get('present_stock') estimated_cost=request.POST.get('estimated_cost') purpose=request.POST.get('purpose') specification=request.POST.get('specification') indent_type=request.POST.get('indent_type') nature=request.POST.get('nature') indigenous=request.POST.get('indigenous') replaced =request.POST.get('replaced') budgetary_head=request.POST.get('budgetary_head') expected_delivery=request.POST.get('expected_delivery') sources_of_supply=request.POST.get('sources_of_supply') head_approval=False director_approval=False financial_approval=False purchased =request.POST.get('purchased') """ des = HoldsDesignation.objects.all().select_related().filter(user = request.user).first() if str(des.designation) == "student": return redirect('/dashboard') if request.user.extrainfo.id == '132': return redirect("/purchase-and-store/entry/") if request.method =="POST": try: if 'save' in request.POST: uploader = request.user.extrainfo subject = request.POST.get('title') description = request.POST.get('desc') design = request.POST.get('design') designation = Designation.objects.get(id = HoldsDesignation.objects.select_related('user','working','designation').get(id = design).designation_id) upload_file = request.FILES.get('myfile') item_name=request.POST.get('item_name') quantity= request.POST.get('quantity') present_stock=request.POST.get('present_stock') estimated_cost=request.POST.get('estimated_cost') purpose=request.POST.get('purpose') specification=request.POST.get('specification') indent_type=request.POST.get('indent_type') nature=request.POST.get('nature') indigenous=request.POST.get('indigenous') replaced =request.POST.get('replaced') budgetary_head=request.POST.get('budgetary_head') expected_delivery=request.POST.get('expected_delivery') sources_of_supply=request.POST.get('sources_of_supply') head_approval=False director_approval=False financial_approval=False purchased =False file=File.objects.create( uploader=uploader, description=description, subject=subject, designation=designation, upload_file=upload_file ) IndentFile.objects.create( file_info=file, item_name= item_name, quantity=quantity, present_stock=present_stock, estimated_cost=estimated_cost, purpose=purpose, specification=specification, indent_type=indent_type, nature=nature, indigenous=indigenous, replaced = replaced , budgetary_head=budgetary_head, expected_delivery=expected_delivery, sources_of_supply=sources_of_supply, head_approval=head_approval, director_approval=director_approval, financial_approval=financial_approval, purchased =purchased, ) if 'send' in request.POST: uploader = request.user.extrainfo subject = request.POST.get('title') description = request.POST.get('desc') design = request.POST.get('design') designation = Designation.objects.get(id = HoldsDesignation.objects.select_related('user','working','designation').get(id = design).designation_id) upload_file = request.FILES.get('myfile') item_name=request.POST.get('item_name') quantity= request.POST.get('quantity') present_stock=request.POST.get('present_stock') estimated_cost=request.POST.get('estimated_cost') purpose=request.POST.get('purpose') specification=request.POST.get('specification') indent_type=request.POST.get('indent_type') nature=request.POST.get('nature') indigenous=request.POST.get('indigenous') replaced =request.POST.get('replaced') budgetary_head=request.POST.get('budgetary_head') expected_delivery=request.POST.get('expected_delivery') sources_of_supply=request.POST.get('sources_of_supply') head_approval=False director_approval=False financial_approval=False purchased = False file = File.objects.create( uploader=uploader, description=description, subject=subject, designation=designation, upload_file=upload_file ) IndentFile.objects.create( file_info=file, item_name= item_name, quantity=quantity, present_stock=present_stock, estimated_cost=estimated_cost, purpose=purpose, specification=specification, indent_type=indent_type, nature=nature, indigenous=indigenous, replaced = replaced , budgetary_head=budgetary_head, expected_delivery=expected_delivery, sources_of_supply=sources_of_supply, head_approval=head_approval, director_approval=director_approval, financial_approval=financial_approval, purchased =purchased, ) current_id = request.user.extrainfo remarks = request.POST.get('remarks') sender = request.POST.get('design') current_design = HoldsDesignation.objects.select_related('user','working','designation').get(id=sender) receiver = request.POST.get('receiver') try: receiver_id = User.objects.get(username=receiver) except Exception as e: messages.error(request, 'Enter a valid Username') return redirect('/filetracking/') receive = request.POST.get('recieve') try: receive_design = Designation.objects.get(name=receive) except Exception as e: messages.error(request, 'Enter a valid Designation') return redirect('/ps1/') upload_file = request.FILES.get('myfile') Tracking.objects.create( file_id=file, current_id=current_id, current_design=current_design, receive_design=receive_design, receiver_id=receiver_id, remarks=remarks, upload_file=upload_file, ) office_module_notif(request.user, receiver_id) messages.success(request,'Indent Filed Successfully!') finally: message = "FileID Already Taken.!!" file = File.objects.select_related('uploader__user','uploader__department','designation').all() extrainfo = ExtraInfo.objects.select_related('user','department').all() holdsdesignations = HoldsDesignation.objects.select_related('user','working','designation').all() designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user = request.user) context = { 'file': file, 'extrainfo': extrainfo, 'holdsdesignations': holdsdesignations, 'designations': designations, } return render(request, 'ps1/composeIndent.html', context) # @login_required(login_url = "/accounts/login") # def compose_indent(request): # file = File.objects.select_related('uploader__user','uploader__department','designation').all() # extrainfo = ExtraInfo.objects.select_related('user','department').all() # holdsdesignations = HoldsDesignation.objects.select_related('user','working','designation').all() # designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user = request.user) # context = { # 'file': file, # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, # 'designations': designations, # } # return render(request, 'ps1/composeIndent.html', context) @login_required(login_url = "/accounts/login") def composed_indents(request): """ The function is used to get all the files created by user(employee). It gets all files created by user by filtering file(table) object by user i.e, uploader. It displays user and file details of a file(table) of filetracking(model) in the template of 'Saved files' tab. @param: request - trivial. @variables: draft - The File object filtered by uploader(user). extrainfo - The Extrainfo object. context - Holds data needed to make necessary changes in the template. """ # draft = File.objects.filter(uploader=request.user.extrainfo) # draft = File.objects.filter(uploader=request.user.extrainfo).order_by('-upload_date') # print(File.objects) # extrainfo = ExtraInfo.objects.all() # designation = Designation.objects.get(id=HoldsDesignation.objects.get(user=request.user).designation_id) designation = HoldsDesignation.objects.filter(user=request.user) context = { # 'draft': draft, # 'extrainfo': extrainfo, 'designation': designation, } return render(request, 'ps1/composed_indents.html', context) def drafts(request): """ The function is used to get all the files created by user(employee). It gets all files created by user by filtering file(table) object by user i.e, uploader. It displays user and file details of a file(table) of filetracking(model) in the template of 'Saved files' tab. @param: request - trivial. @variables: draft - The File object filtered by uploader(user). extrainfo - The Extrainfo object. context - Holds data needed to make necessary changes in the template. """ # draft = File.objects.filter(uploader=request.user.extrainfo) # draft = File.objects.filter(uploader=request.user.extrainfo).order_by('-upload_date') # print(File.objects) # extrainfo = ExtraInfo.objects.all() # designation = Designation.objects.get(id=HoldsDesignation.objects.get(user=request.user).designation_id) designation = HoldsDesignation.objects.filter(user=request.user) context = { # 'draft': draft, # 'extrainfo': extrainfo, 'designation': designation, } return render(request, 'ps1/drafts.html', context) @login_required(login_url = "/accounts/login") def indentview(request,id): tracking_objects=Tracking.objects.all() tracking_obj_ids=[obj.file_id for obj in tracking_objects] draft_indent = IndentFile.objects.filter(file_info__in=tracking_obj_ids) draft=[indent.file_info.id for indent in draft_indent] draft_files=File.objects.filter(id__in=draft).order_by('-upload_date') indents=[file.indentfile for file in draft_files] extrainfo = ExtraInfo.objects.all() abcd = HoldsDesignation.objects.get(pk=id) s = str(abcd).split(" - ") designations = s[1] context = { 'indents' : indents, 'extrainfo': extrainfo, 'designations': designations, } return render(request, 'ps1/indentview.html', context) @login_required(login_url = "/accounts/login") def draftview(request,id): indents= IndentFile.objects.filter(file_info__in=request.user.extrainfo.uploaded_files.all()).select_related('file_info') indent_ids=[indent.file_info for indent in indents] filed_indents=Tracking.objects.filter(file_id__in=indent_ids) filed_indent_ids=[indent.file_id for indent in filed_indents] draft = list(set(indent_ids) - set(filed_indent_ids)) draft_indent=IndentFile.objects.filter(file_info__in=draft).values("file_info") draft_files=File.objects.filter(id__in=draft_indent).order_by('-upload_date') extrainfo = ExtraInfo.objects.all() abcd = HoldsDesignation.objects.get(pk=id) s = str(abcd).split(" - ") designations = s[1] context = { 'draft': draft_files, 'extrainfo': extrainfo, 'designations': designations, } return render(request, 'ps1/draftview.html', context) @login_required(login_url = "/accounts/login") def indentview2(request,id): indent_files = IndentFile.objects.all().values('file_info') print(indent_files) in_file = Tracking.objects.filter(file_id__in=indent_files,receiver_id=request.user).order_by("-receive_date") #print (File.designation) abcd = HoldsDesignation.objects.get(pk=id) s = str(abcd).split(" - ") designations = s[1] context = { 'in_file': in_file, 'designations': designations, } return render(request, 'ps1/indentview2.html', context) @login_required(login_url = "/accounts/login") def inward(request): """ The function is used to get all the Indent files received by user(employee) from other employees which are filtered from Tracking(table) objects by current user i.e.receiver_id. It displays files received by user from other employees of a Tracking(table) of filetracking(model) in the 'Inbox' tab of template. @param: request - trivial. @variables: in_file - The Tracking object filtered by receiver_id i.e, present working user. context - Holds data needed to make necessary changes in the template. """ designation = HoldsDesignation.objects.filter(user=request.user) in_file=Tracking.objects.filter(receiver_id=request.user).order_by('-receive_date') context = { 'in_file': in_file, 'designation': designation, } return render(request, 'ps1/inwardIndent.html', context) @login_required(login_url = "/accounts/login") def confirmdelete(request,id): file = File.objects.get(pk = id) context = { 'j': file, } return render(request, 'ps1/confirmdelete.html',context) @login_required(login_url = "/accounts/login") def forwardindent(request, id): """ The function is used to forward Indent files received by user(employee) from other employees which are filtered from Tracking(table) objects by current user i.e. receiver_id to other employees. It also gets track of file created by uploader through all users involved in file along with their remarks and attachments It displays details file of a File(table) and remarks and attachments of user involved in file of Tracking(table) of filetracking(model) in the template. @param: request - trivial. id - id of the file object which the user intends to forward to other employee. @variables: file - The File object. track - The Tracking object. remarks = Remarks posted by user. receiver = Receiver to be selected by user for forwarding file. receiver_id = Receiver_id who has been selected for forwarding file. upload_file = File attached by user. extrainfo = ExtraInfo object. holdsdesignations = HoldsDesignation objects. context - Holds data needed to make necessary changes in the template. """ # start = timer() # end = timer() indent=IndentFile.objects.select_related('file_info').get(file_info=id) file=indent.file_info # start = timer() track = Tracking.objects.select_related('file_id__uploader__user','file_id__uploader__department','file_id__designation','current_id__user','current_id__department', 'current_design__user','current_design__working','current_design__designation','receiver_id','receive_design').filter(file_id=file) # end = timer() if request.method == "POST": if 'finish' in request.POST: file.complete_flag = True file.save() if 'send' in request.POST: current_id = request.user.extrainfo remarks = request.POST.get('remarks') sender = request.POST.get('sender') current_design = HoldsDesignation.objects.select_related('user','working','designation').get(id=sender) receiver = request.POST.get('receiver') try: receiver_id = User.objects.get(username=receiver) except Exception as e: messages.error(request, 'Enter a valid destination') designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations': designations, 'file': file, 'track': track, } return render(request, 'ps1/forwardindent.html', context) receive = request.POST.get('recieve') try: receive_design = Designation.objects.get(name=receive) except Exception as e: messages.error(request, 'Enter a valid Designation') designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations': designations, 'file': file, 'track': track, } return render(request, 'ps1/forwardindent.html', context) # receive_design = receive_designation[0] upload_file = request.FILES.get('myfile') # return HttpResponse ("success") Tracking.objects.create( file_id=file, current_id=current_id, current_design=current_design, receive_design=receive_design, receiver_id=receiver_id, remarks=remarks, upload_file=upload_file, ) check=str(request.user) val=str(request.POST.get('approval')) # if val=="accept": # print("correct") # if check=="ptandon" or check=="atul" or check=="prabin16" or check=="subirs" or check=="prabir": # indent.head_approval=True # elif check=="director": # indent.director_approval=True # elif check=="rizwan": # indent.financial_approval=True # else: # if check=="ptandon" or check=="atul" or check=="prabin16" or check=="subirs" or check=="prabir": # indent.head_approval=False # elif check=="director": # indent.director_approval=False # elif check=="rizwan": # indent.financial_approval=False designs =[] designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) for designation in designations : s = str(designation).split(" - ") designs.append(s[1]) if val=="accept": if any(d in designs for d in ("HOD (ME)", "HOD (ECE)", "CSE HOD", "HOD (Design)", "HOD (NS)")): indent.head_approval=True elif "Director" in designs: indent.director_approval=True indent.financial_approval=True else: if any(d in designs for d in ("HOD (ME)", "HOD (ECE)", "CSE HOD", "HOD (Design)", "HOD (NS)")): indent.head_approval=False elif "Director" in designs: indent.director_approval=False indent.financial_approval=False indent.save() messages.success(request, 'Indent File sent successfully') # start = timer() extrainfo = ExtraInfo.objects.select_related('user','department').all() holdsdesignations = HoldsDesignation.objects.select_related('user','working','designation').all() designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations':designations, 'file': file, 'track': track, 'indent':indent, } return render(request, 'ps1/forwardindent.html', context) @login_required(login_url = "/accounts/login") def createdindent(request, id): """ The function is used to forward created indent files by user(employee) . @param: request - trivial. id - id of the file object which the user intends to forward to other employee. @variables: file - The File object. track - The Tracking object. remarks = Remarks posted by user. receiver = Receiver to be selected by user for forwarding file. receiver_id = Receiver_id who has been selected for forwarding file. upload_file = File attached by user. extrainfo = ExtraInfo object. holdsdesignations = HoldsDesignation objects. context - Holds data needed to make necessary changes in the template. """ # start = timer() # end = timer() indent=IndentFile.objects.select_related('file_info').get(file_info=id) file=indent.file_info # start = timer() track = Tracking.objects.select_related('file_id__uploader__user','file_id__uploader__department','file_id__designation','current_id__user','current_id__department', 'current_design__user','current_design__working','current_design__designation','receiver_id','receive_design').filter(file_id=file) # end = timer() if request.method == "POST": if 'finish' in request.POST: file.complete_flag = True file.save() if 'send' in request.POST: current_id = request.user.extrainfo remarks = request.POST.get('remarks') sender = request.POST.get('sender') current_design = HoldsDesignation.objects.select_related('user','working','designation').get(id=sender) receiver = request.POST.get('receiver') try: receiver_id = User.objects.get(username=receiver) except Exception as e: messages.error(request, 'Enter a valid destination') designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations': designations, 'file': file, 'track': track, } return render(request, 'ps1/createdindent.html', context) receive = request.POST.get('recieve') try: receive_design = Designation.objects.get(name=receive) except Exception as e: messages.error(request, 'Enter a valid Designation') designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations': designations, 'file': file, 'track': track, } return render(request, 'ps1/createdindent.html', context) # receive_design = receive_designation[0] upload_file = request.FILES.get('myfile') # return HttpResponse ("success") Tracking.objects.create( file_id=file, current_id=current_id, current_design=current_design, receive_design=receive_design, receiver_id=receiver_id, remarks=remarks, upload_file=upload_file, ) messages.success(request, 'Indent File sent successfully') # start = timer() extrainfo = ExtraInfo.objects.select_related('user','department').all() holdsdesignations = HoldsDesignation.objects.select_related('user','working','designation').all() designations = HoldsDesignation.objects.select_related('user','working','designation').filter(user=request.user) context = { # 'extrainfo': extrainfo, # 'holdsdesignations': holdsdesignations, 'designations':designations, 'file': file, 'track': track, 'indent':indent, } return render(request, 'ps1/createdindent.html', context) def AjaxDropdown1(request): print('brefore post') if request.method == 'POST': value = request.POST.get('value') # print(value) hold = Designation.objects.filter(name__startswith=value) # for h in hold: # print(h) print('secnod method') holds = serializers.serialize('json', list(hold)) context = { 'holds' : holds } return HttpResponse(JsonResponse(context), content_type='application/json') def AjaxDropdown(request): print('asdasdasdasdasdasdasdas---------------\n\n') # Name = ['student','co-ordinator','co co-ordinator'] # design = Designation.objects.filter(~Q(name__in=(Name))) # hold = HoldsDesignation.objects.filter(Q(designation__in=(design))) # arr = [] # for h in hold: # arr.append(ExtraInfo.objects.filter(user=h.user)) if request.method == 'POST': value = request.POST.get('value') # print(value) users = User.objects.filter(username__startswith=value) users = serializers.serialize('json', list(users)) context = { 'users': users } return HttpResponse(JsonResponse(context), content_type='application/json') def test(request): return HttpResponse('success') @login_required(login_url = "/accounts/login") def delete(request,id): file = File.objects.get(pk = id) file.delete() # Not required #draft = File.objects.filter(uploader=request.user.extrainfo) #extrainfo = ExtraInfo.objects.all() #context = { # 'draft': draft, # 'extrainfo': extrainfo, #} #problem over here no need of render since it doesnot affect the url #return render(request, 'filetracking/drafts.html', context) return redirect('/ps1/composed_indents/') @login_required(login_url = "/accounts/login") def Stock_Entry(request): if request.method=='GET' : return HttpResponseRedirect('../stock_view') if request.method =="POST": #dealing_assistant_id=request.POST.get('dealing_assistant_id') id=request.POST.get('id') temp1=File.objects.get(id=id) temp=IndentFile.objects.get(file_info=temp1) dealing_assistant_id=request.user.extrainfo item_id=temp item_name=request.POST.get('item_name') vendor=request.POST.get('vendor') current_stock=request.POST.get('current_stock') recieved_date=request.POST.get('recieved_date') bill=request.FILES.get('bill') # staff=Staff.objects.get(id=request.user.extrainfo) StockEntry.objects.create(item_id=item_id,item_name= item_name,vendor=vendor,current_stock=current_stock,dealing_assistant_id=dealing_assistant_id,bill=bill,recieved_date=recieved_date,) IndentFile.objects.filter(file_info=temp).update(purchased=True) return HttpResponseRedirect('../stock_view') @login_required(login_url = "/accounts/login") def stock_edit(request): # stocks=StockEntry.objects.get(pk=id) # return render(request,'ps1/stock_edit.html',{'StockEntry':stocks}) if request.method =="POST": id=request.POST.get('id') temp=File.objects.get(id=id) temp1=IndentFile.objects.get(file_info=temp) stocks=StockEntry.objects.get(item_id=temp1) return render(request,'ps1/stock_edit.html',{'StockEntry':stocks}) # if 'save' in request.POST: # stocks.item_name=request.POST.get('item_name') # stocks.vendor=request.POST.get('vendor') # stocks.current_stock=request.POST.get('current_stock') # stocks.recieved_date=request.POST.get('recieved_date') # stocks.bill=request.FILES.get('bill') # stocks.save() return HttpResponseRedirect('../stock_view') #else: # print("ELSE") # return render(request,'ps1/stock_edit.html',{'StockEntry':stocks}) def stock_update(request): if request.method =="POST": if 'save' in request.POST: id=request.POST.get('id') temp=File.objects.get(id=id) temp1=IndentFile.objects.get(file_info=temp) stocks=StockEntry.objects.get(item_id=temp1) stocks.item_name=request.POST.get('item_name') stocks.vendor=request.POST.get('vendor') stocks.current_stock=request.POST.get('current_stock') #stocks.recieved_date=request.POST.get('recieved_date') stocks.bill=request.FILES.get('bill') stocks.save() return HttpResponseRedirect('../stock_view') # def stock_view(request): # sto=StockEntry.objects.all() # return render(request,'ps1/stock_view.html',{'StockEntry':sto}) # @login_required(login_url = "/accounts/login") def stock_view(request): sto=StockEntry.objects.all() if sto: temp=sto.first() if temp.item_id.purchased: print("Purchase Succesful") print() print() return render(request,'ps1/stock_view.html',{'sto':sto}) @login_required(login_url = "/accounts/login") def stock_delete(request): if request.method=='POST': id=request.POST.get('id') #temp1=IndentFile.objects.get(id=id) temp=File.objects.get(id=id) temp1=IndentFile.objects.get(file_info=temp) stocks=StockEntry.objects.get(item_id=temp1) stocks.delete() return HttpResponseRedirect('../stock_view') @login_required(login_url = "/accounts/login") def entry(request): if request.method=='POST': id=request.POST.get('id') temp=File.objects.get(id=id) temp1=IndentFile.objects.get(file_info=temp) return render(request,'ps1/StockEntry.html',{'id':id, 'indent':temp1}) ent=IndentFile.objects.all() return render(request,'ps1/entry.html',{'ent':ent}) def dealing_assistant(request): print(request.user.extrainfo.id) print(type(request.user.extrainfo.id)) if request.user.extrainfo.id=='132' : return redirect('/ps1/entry/') else: return redirect('/ps1')
python
from pygame.mixer import Channel from pygame_menu import Menu from pygame_menu.themes import Theme from pygame_menu.baseimage import BaseImage from pygame_menu.baseimage import IMAGE_MODE_SIMPLE from pygame_menu.widgets import MENUBAR_STYLE_NONE from pygame_menu.widgets.selection.none import NoneSelection from pygame_menu.sound import Sound from pygame_menu.sound import SOUND_TYPE_CLICK_MOUSE from pygame_menu.sound import SOUND_TYPE_WIDGET_SELECTION from pygame_menu import events import serious_pysam.config as c class MainMenu(Menu): """Menu class for main menu and pause menu. This class depends on pygame_menu. For more details, see the docs: https://github.com/ppizarror/pygame-menu """ def __init__(self): """ _base_image - image for background _selection - selection mode _theme - how the menu will look event_quit - event for quit from menu event_back - event for back to previous menu _menu_sound - object for menu music """ _base_image = BaseImage(image_path=c.MENU_BACKGROUND_IMAGE, drawing_mode=IMAGE_MODE_SIMPLE) _selection = NoneSelection() _theme = Theme(background_color=_base_image, title_shadow=False, title_background_color=c.BLACK_COLOR, title_bar_style=MENUBAR_STYLE_NONE, selection_color=c.MENU_SELECTION_COLOR, widget_font=c.LABEL_FONT_NAME, widget_font_color=c.MENU_FONT_COLOR, widget_font_size=c.MENU_FONT_SIZE, widget_selection_effect=_selection ) Menu.__init__(self, c.WINDOW_HEIGHT, c.WINDOW_WIDTH, c.MENU_TITLE, mouse_motion_selection=True, theme=_theme, center_content=True) self.event_quit = events.EXIT self.event_back = events.BACK _menu_sound = MenuSound() self.set_sound(_menu_sound, recursive=True) class MenuSound(Sound): """Class for turning on music during menu.""" def __init__(self): """ _channel - pygame channel for music """ Sound.__init__(self) self._channel = Channel(3) self.set_sound(SOUND_TYPE_CLICK_MOUSE, c.MENU_SOUND_CLICK, volume=1.0) self.set_sound(SOUND_TYPE_WIDGET_SELECTION, c.MENU_SOUND_SELECT, volume=1.0)
python
import logging import tensorflow as tf import ray from replay.func import create_local_buffer from algo.apex.actor import Monitor logger = logging.getLogger(__name__) def disable_info_logging(config, display_var=False, save_code=False, logger=False, writer=False): config['display_var'] = display_var config['save_code'] = save_code config['logger'] = logger config['writer'] = writer return config def ray_remote_config(config, name, default_cpus=None, default_gpus=None): ray_config = {} if config.setdefault(f'n_{name}_cpus', default_cpus): ray_config['num_cpus'] = config[f'n_{name}_cpus'] if name.lower() == 'learner': # for learner, we set the default number of gpus # to the maximum number of gpus available if # default_gpus is not specified n_gpus = config.setdefault(f'n_{name}_gpus', default_gpus or len(tf.config.list_physical_devices('GPU'))) else: n_gpus = config.setdefault(f'n_{name}_gpus', default_gpus) if n_gpus: ray_config['num_gpus'] = n_gpus return ray_config def create_monitor(config): config = config.copy() RayMonitor = Monitor.as_remote() monitor = RayMonitor.remote(config=config) return monitor def create_learner( Learner, model_fn, replay, config, model_config, env_config, replay_config): config = config.copy() model_config = model_config.copy() env_config = env_config.copy() replay_config = replay_config.copy() config = disable_info_logging(config, display_var=True) # avoids additional workers created by RayEnvVec env_config['n_workers'] = 1 ray_config = ray_remote_config(config, 'learner') RayLearner = Learner.as_remote(**ray_config) learner = RayLearner.remote( model_fn=model_fn, replay=replay, config=config, model_config=model_config, env_config=env_config, replay_config=replay_config) ray.get(learner.save_config.remote(dict( env=env_config, model=model_config, agent=config, replay=replay_config ))) return learner def create_worker( Worker, worker_id, model_fn, config, model_config, env_config, buffer_config): config = config.copy() model_config = model_config.copy() env_config = env_config.copy() buffer_config = buffer_config.copy() config = disable_info_logging(config) buffer_fn = create_local_buffer if 'seed' in env_config: env_config['seed'] += worker_id * 100 # avoids additional workers created by RayEnvVec env_config['n_workers'] = 1 ray_config = ray_remote_config(config, 'worker') RayWorker = Worker.as_remote(**ray_config) worker = RayWorker.remote( worker_id=worker_id, config=config, model_config=model_config, env_config=env_config, buffer_config=buffer_config, model_fn=model_fn, buffer_fn=buffer_fn) return worker def create_evaluator(Evaluator, model_fn, config, model_config, env_config): config = config.copy() model_config = model_config.copy() env_config = env_config.copy() config = disable_info_logging(config) config['schedule_act_eps'] = False config['schedule_act_temp'] = False if 'seed' in env_config: env_config['seed'] += 999 env_config['n_workers'] = 1 env_config['n_envs'] = env_config.pop('n_eval_envs', 4) RayEvaluator = Evaluator.as_remote(num_cpus=1) evaluator = RayEvaluator.remote( config=config, model_config=model_config, env_config=env_config, model_fn=model_fn) return evaluator
python
import PIL from PIL import Image import os #5:7 Aspect ratio that is larger than cardface pngs CARD_SIZE = (260, 364) #adds background to transparent card faces found in /card_faces def add_background(path): img = Image.open(path) dimensions = img.size background = Image.open('card_background.png') bg_w, bg_h = background.size #centers cardface on card offset = ((bg_w - dimensions[0]) // 2, (bg_h - dimensions[1]) // 2) background.paste(img, offset, img) img = background dimensions = img.size img.save(f'cards/{path.split("/")[-1]}') l = os.listdir('card_faces') for card in l: add_background(f'card_faces/{card}')
python
# -*- coding: utf-8 -*- # Copyright (c) 2012-2020, Anima Istanbul # # This module is part of anima-tools and is released under the MIT # License: http://www.opensource.org/licenses/MIT import logging import unittest import sys from anima.ui import IS_PYSIDE, IS_PYQT4, reference_editor logger = logging.getLogger('anima.ui.reference_editor') if IS_PYSIDE(): logger.debug('environment is set to pyside, importing pyside') from PySide import QtCore, QtGui elif IS_PYQT4(): logger.debug('environment is set to pyqt4, importing pyqt4') import sip sip.setapi('QString', 2) sip.setapi('QVariant', 2) from PyQt4 import QtCore, QtGui class ReferenceEditorTestCase(unittest.TestCase): def setUp(self): """set up the test environment """ if not QtGui.QApplication.instance(): logger.debug('creating a new QApplication') self.app = QtGui.QApplication(sys.argv) else: logger.debug('using the present QApplication: %s' % QtGui.qApp) # self.app = QtGui.qApp self.app = QtGui.QApplication.instance() def tearDown(self): """clean up the test environment """ pass def show_dialog(self, dialog): """show the given dialog """ dialog.show() self.app.exec_() self.app.connect( self.app, QtCore.SIGNAL("lastWindowClosed()"), self.app, QtCore.SLOT("quit()") ) def test_close_button_closes_the_UI(self): """testing if the close button is closing the UI when clicked """ dialog = reference_editor.MainDialog() self.show_dialog(dialog) #QTest.mouseClick(dialog.button_box.buttons()[0], Qt.LeftButton) self.assertFalse(dialog.isVisible())
python
#!/usr/bin/python3 # # Read multiple yaml files output one combined json file # # This source file is Copyright (c) 2021, FERMI NATIONAL # ACCELERATOR LABORATORY. All rights reserved. import os import sys import yaml import json prog = 'parseconfig.py' def efatal(msg, e, code=1): print(prog + ': ' + msg + ': ' + str(e), file=sys.stderr) sys.exit(code) def debug(msg): # print(msg) return combined = {} def merge(old, new): debug('type old: ' + str(type(old)) + ', type new: ' + str(type(new))) if old is None: return new if new is None: return old if type(new) is dict: if type(old) is not dict: raise Exception('type ' + str(type(new)) + ' does not match type ' + str(type(old))) for key in old: debug('old has key ' + key) for key in new: debug('checking new key ' + key) val = new[key] if key in old: try: old[key] = merge(old[key], new[key]) except Exception as e: raise Exception('error merging ' + key + ': ' + str(e)) else: old[key] = new[key] for key in old: debug('combined has key ' + key) return old if type(new) is list: if type(old) is not list: raise Exception('type ' + str(type(new)) + ' does not match type ' + str(type(old))) combinedlist = [] knownnames = set() for oldval in old: if type(oldval) is dict and 'name' in oldval: for newval in new: if 'name' in newval and newval['name'] == oldval['name']: knownnames.add(newval['name']) try: debug('merging ' + newval['name']) combinedlist.append(merge(oldval, newval)) except Exception as e: raise Exception('error merging ' + newval['name'] + ': ' + str(e)) if oldval['name'] not in knownnames: debug('adding unmerged ' + oldval['name']) knownnames.add(oldval['name']) combinedlist.append(oldval) else: debug('adding non-named dict') combinedlist.append(oldval) for newval in new: if type(newval) is not dict or 'name' not in newval or newval['name'] not in knownnames: debug('adding new item ' + str(newval) + ' to ' + str(knownnames)) combinedlist.append(newval) return combinedlist debug('returning non-dict non-list ' + str(new)) return new files = [] for f in sys.argv[1:]: if os.path.isdir(f): for f2 in sorted(os.listdir(f)): files.append(f + '/' + f2) else: files.append(f) for f in files: if f[-5:] != '.yaml': continue try: with open(f) as fd: data = yaml.load(fd) except Exception as e: efatal('error loading yaml in ' + f, e) debug('merging ' + f +': ' + str(json.dumps(data))) try: combined = merge(combined, data) except Exception as e: efatal('error merging data from ' + f, e) debug('combined: ' + str(json.dumps(combined))) print(str(json.dumps(combined, indent=4, sort_keys=True)))
python
#!/usr/bin/env python # -*- coding:utf-8 -*- from __future__ import print_function import os import sys from PIL import Image if __name__ == "__main__": infile = sys.argv[1] outfile = os.path.splitext(infile)[0] + ".transpose.png" if infile != outfile: try: with Image.open(infile) as im: # im = im.resize((128, 128)) # im = im.rotate(45) # im = im.transpose(Image.FLIP_LEFT_RIGHT) # im = im.transpose(Image.FLIP_TOP_BOTTOM) # im = im.transpose(Image.ROTATE_90) im = im.transpose(Image.ROTATE_180) # im = im.transpose(Image.ROTATE_270) im.save(outfile) except IOError: print("cannot convert", infile)
python
A_1101_10 = {0: {'A': 1.5, 'C': -1.0, 'E': -2.3, 'D': -2.3, 'G': 0.0, 'F': -2.4, 'I': 0.5, 'H': -1.5, 'K': -2.3, 'M': -1.4, 'L': -2.9, 'N': -2.0, 'Q': 0.6, 'P': -2.2, 'S': 1.5, 'R': -2.3, 'T': -1.8, 'W': -1.3, 'V': -2.2, 'Y': -1.9}, 1: {'A': 0.3, 'C': -1.2, 'E': -2.7, 'D': -2.6, 'G': -2.9, 'F': -2.0, 'I': 0.0, 'H': -1.8, 'K': -2.6, 'M': -1.1, 'L': -0.5, 'N': -2.3, 'Q': -2.1, 'P': -2.5, 'S': 0.4, 'R': -2.5, 'T': 1.7, 'W': -1.1, 'V': 1.0, 'Y': 1.1}, 2: {'A': -3.1, 'C': 1.2, 'E': -3.2, 'D': -3.3, 'G': -0.3, 'F': 2.0, 'I': -2.4, 'H': -1.8, 'K': -3.1, 'M': 0.9, 'L': 0.1, 'N': -2.9, 'Q': 0.9, 'P': 1.0, 'S': -3.1, 'R': -3.1, 'T': -2.9, 'W': -0.9, 'V': -2.7, 'Y': 1.2}, 3: {'A': -2.6, 'C': -1.5, 'E': 0.4, 'D': 1.0, 'G': -3.0, 'F': -1.9, 'I': -2.3, 'H': -1.5, 'K': -2.4, 'M': -1.1, 'L': 0.6, 'N': -2.0, 'Q': 0.4, 'P': -2.6, 'S': 1.0, 'R': -2.4, 'T': 0.1, 'W': -1.1, 'V': -2.4, 'Y': 1.6}, 4: {'A': -2.6, 'C': -1.8, 'E': -2.5, 'D': 0.1, 'G': 1.1, 'F': -2.8, 'I': -3.0, 'H': -1.8, 'K': 0.0, 'M': -1.8, 'L': -0.5, 'N': -2.1, 'Q': 1.0, 'P': 0.7, 'S': 0.5, 'R': 0.9, 'T': -2.4, 'W': 2.0, 'V': -3.1, 'Y': -2.4}, 5: {'A': -0.2, 'C': 1.2, 'E': -2.7, 'D': 0.6, 'G': -3.2, 'F': 0.8, 'I': -2.8, 'H': -2.0, 'K': -0.1, 'M': 0.9, 'L': -0.5, 'N': -2.3, 'Q': -2.2, 'P': 1.3, 'S': 0.3, 'R': 0.0, 'T': 0.0, 'W': 1.5, 'V': -2.9, 'Y': -2.0}, 6: {'A': -2.6, 'C': -1.5, 'E': -2.7, 'D': -2.8, 'G': -0.2, 'F': 0.9, 'I': 0.7, 'H': -1.9, 'K': 0.9, 'M': 1.1, 'L': -0.4, 'N': -2.4, 'Q': -2.1, 'P': 0.1, 'S': 0.3, 'R': -2.3, 'T': 0.0, 'W': 1.6, 'V': 0.5, 'Y': -1.8}, 7: {'A': -2.6, 'C': -1.2, 'E': -2.7, 'D': 0.0, 'G': -3.2, 'F': 0.9, 'I': 0.2, 'H': -1.8, 'K': -2.5, 'M': -0.7, 'L': 0.8, 'N': 0.8, 'Q': -2.1, 'P': -2.7, 'S': -2.6, 'R': 0.2, 'T': 0.3, 'W': -1.0, 'V': 1.3, 'Y': -1.6}, 8: {'A': 0.3, 'C': -1.2, 'E': -2.6, 'D': 0.0, 'G': -2.8, 'F': 1.3, 'I': -2.0, 'H': -1.6, 'K': 0.0, 'M': -1.0, 'L': 0.4, 'N': -2.2, 'Q': -2.0, 'P': -2.5, 'S': 0.8, 'R': -2.4, 'T': 0.6, 'W': -0.9, 'V': 0.0, 'Y': 0.9}, 9: {'A': -2.7, 'C': -1.9, 'E': -2.0, 'D': -2.3, 'G': -2.9, 'F': -3.4, 'I': -3.1, 'H': -1.4, 'K': 2.6, 'M': -1.8, 'L': -3.6, 'N': -1.9, 'Q': -1.2, 'P': -2.3, 'S': -2.4, 'R': 0.8, 'T': -2.4, 'W': -1.5, 'V': -3.2, 'Y': -2.1}}
python
# Generated by Django 1.11.3 on 2017-07-07 19:21 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): # noqa initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slug', models.SlugField()), ('coordinator', models.CharField(max_length=100)), ('restaurant_name', models.CharField(max_length=250)), ('restaurant_url', models.URLField(blank=True)), ('state', models.CharField(choices=[('preparing', 'Order is prepared, order items can be modified.'), ('ordering', 'Order is locked and sent to delivery service by coordinator.'), ('ordered', 'Order has been sent to delivery service.'), ('delivered', 'Delivery has arrived.'), ('canceled', 'Order has been canceled due to some reason.')], default='preparing', max_length=16)), ('created_at', models.DateTimeField(auto_now_add=True)), ('preparation_expires_after', models.DurationField(blank=True, help_text='How long the order is allowed to be prepared.', null=True)), ], options={ 'ordering': ('history__created_at',), }, ), migrations.CreateModel( name='OrderItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slug', models.SlugField()), ('participant', models.CharField(max_length=100)), ('description', models.CharField(max_length=250)), ('price', models.DecimalField(decimal_places=2, max_digits=5)), ('amount', models.PositiveIntegerField(default=1)), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='orders.Order')), ], ), migrations.CreateModel( name='OrderStateChange', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('old_state', models.CharField(choices=[('preparing', 'Order is prepared, order items can be modified.'), ('ordering', 'Order is locked and sent to delivery service by coordinator.'), ('ordered', 'Order has been sent to delivery service.'), ('delivered', 'Delivery has arrived.'), ('canceled', 'Order has been canceled due to some reason.')], max_length=16)), ('new_state', models.CharField(choices=[('preparing', 'Order is prepared, order items can be modified.'), ('ordering', 'Order is locked and sent to delivery service by coordinator.'), ('ordered', 'Order has been sent to delivery service.'), ('delivered', 'Delivery has arrived.'), ('canceled', 'Order has been canceled due to some reason.')], max_length=16)), ('reason', models.CharField(max_length=1000, null=True)), ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to='orders.Order')), ], ), migrations.AlterUniqueTogether( name='order', unique_together=set([('coordinator', 'restaurant_name')]), ), migrations.AlterUniqueTogether( name='orderitem', unique_together=set([('order', 'participant', 'description')]), ), ]
python
from django.http.response import HttpResponse from django.shortcuts import render, redirect from .models import Pet from .forms import PetForm from users.models import User def All(request): if not request.user.is_authenticated: print("This is a not logged user bro:") return redirect('/accounts/login/') else: print("successfully logged") user = User.objects.all() return render(request, 'system/personal.html', {"user" : user}) def insertPets(request): user = User.objects.get(pk=request.user.id) user.save() errors = {} errors = { 'has_errors' : 0 } if(user.cpf == '' or user.cpf == None or user.full_name == '' or user.full_name == None or user.mobile_phone == '' or user.mobile_phone == None ): errors = { 'has_errors' : 1 } errors['error'] = {} if(user.cpf == '' or user.cpf == None): errors['error'].update({ 0 : 'Antes de cadastrar um pet para adoção é necessário que você insira seu CPF'}) if(user.full_name == '' or user.full_name == None ): errors['error'].update({ 1 : 'Antes de cadastrar um pet para adoção é necessário que você insira seu nome!'}) if(user.mobile_phone == '' or user.mobile_phone == None): errors['error'].update({ 2 : 'Antes de cadastrar um pet para adoção é necessário que insira seu Telefone!'}) if request.method == 'POST' and errors['has_errors'] == 0: form = PetForm(request.POST, request.FILES) if form.is_valid(): form.instance.user_id = request.user.id form.save() # return redirect('/system/pets/my') elif(request.method == 'POST' and errors['has_errors'] == 1): errors['error'].update({ 0 : 'Complete seu cadastro!'}) else: form = PetForm() return render(request, 'system/insertpets.html', {'form' : form , 'errors' : errors}) #get pets by logged user def userPets(request): pets = Pet.objects.all() pets = pets.filter(user_id=request.user.id) print(pets) for pet in pets: print(pet.name) return render(request, 'system/myPetsRecords.html', {"pets" : pets}) def editPet(request,id): if not request.user.is_authenticated: print("This is a not logged user bro:") return redirect('/accounts/login/') else: print("successfully logged") pet = Pet.objects.get(id=id) form = PetForm(instance=pet) errors = {} errors = { 'has_errors' : 0 } errors['error'] = {} if(pet.user_id != request.user.id): errors['has_errors'] = 1 errors['error'].update({ 0 : 'Esse pet nao te pertence... Ainda.'}) if request.method == 'POST': form = PetForm(request.POST,request.FILES, instance=pet) form.save() return redirect('/system/pets/my' , flag='success') return render(request, 'system/editPet.html', {'pet':pet, 'errors':errors , 'form':form}) def petDelete(request, id): Pet.objects.filter(id=id).delete() return redirect('/system/pets/my') def adopted(request,id): pet = Pet.objects.get(id=id) pet.save() if(request.user.id == pet.user_id): pet.isAdopted = True pet.save() HttpResponse('success') else: HttpResponse('Este pet nao te pertence... ainda!') return redirect('/system/pets/my') def notAdopted(request,id): pet = Pet.objects.get(id=id) pet.save() if(request.user.id == pet.user_id): pet.isAdopted = False pet.save() HttpResponse('success') else: HttpResponse('Este pet nao te pertence... ainda!') return redirect('/system/pets/my') def success(request): return HttpResponse('successfully uploaded')
python
from controller.qt.controller import QtGameController
python
from django.test import TestCase from django.contrib.auth import get_user_model class ModelTest(TestCase): def test_create_user_with_email(self): """ test creating user with email address """ email = "[email protected]" password = "testpassword" user = get_user_model().objects.create_user( email=email, password=password ) self.assertEqual(user.email, email) self.assertTrue(user.check_password(password), password) def test_normalize_user_email(self): """ test normalizing user email address """ email = "[email protected]" password = "testpassword" user = get_user_model().objects.create_user( email=email, password=password ) self.assertEqual(user.email, email.lower()) def test_invalid_email(self): """ test invalid email address """ password = "testpassword" with self.assertRaises(ValueError): get_user_model().objects.create_user(None, password) def test_create_superuser(self): """ test create super user """ email = "[email protected]" password = "testpassword" user = get_user_model().objects.create_superuser( email=email, password=password ) self.assertTrue(user.is_superuser, True) self.assertTrue(user.is_staff, True)
python
import copy from typing import List def selection_sort(x: List) -> List: """Selection sort repeatedly swaps the minimum element of a list with the left-most unsorted element, building up a new list that's fully sorted. It has an average time complexity of Θ(n^2) due to the nesting of its two loops. Time complexity for the worst case, when the list is sorted in reverse order, is O(n^2). Time complexity for the best case, when the list is already sorted in the correct order, is Ω(n^2). >>> selection_sort([4, 2, 3, 1, 0, 5]) [0, 1, 2, 3, 4, 5] :param x: list to be sorted :return: new sorted list """ a_list = copy.deepcopy(x) # To avoid modifying the original list length = len(a_list) for i in range(length): unsorted_min_idx = i for idx, element in enumerate(a_list[i:]): if element < a_list[unsorted_min_idx]: unsorted_min_idx += idx a_list[i], a_list[unsorted_min_idx] = a_list[unsorted_min_idx], a_list[i] return a_list
python
from typing import Union, Callable, Any, Optional, Dict import os import logging import hashlib from pathlib import Path import numpy as np try: import soundfile as sf from espnet2.bin.tts_inference import Text2Speech as _Text2SpeechModel except OSError as ose: logging.exception( "`libsndfile` not found, it's probably not installed. The node will most likely crash. " "Please install soundfile's dependencies (https://python-soundfile.readthedocs.io/en/latest/)" ) from pydub import AudioSegment from haystack.errors import AudioNodeError from haystack.modeling.utils import initialize_device_settings class TextToSpeech: """ This class converts text into audio using text-to-speech models. NOTE: This is NOT a node. Use AnswerToSpeech or DocumentToSpeech. """ def __init__( self, model_name_or_path: Union[str, Path], use_gpu: bool = True, transformers_params: Optional[Dict[str, Any]] = None, ): """ :param model_name_or_path: The text to speech model, for example `espnet/kan-bayashi_ljspeech_vits`. :param use_gpu: Whether to use GPU (if available). Defaults to True. :param transformers_params: Parameters to pass over to the `Text2Speech.from_pretrained()` call. """ super().__init__() devices, _ = initialize_device_settings(use_cuda=use_gpu, multi_gpu=False) self.model = _Text2SpeechModel.from_pretrained( model_name_or_path, device=devices[0].type, **(transformers_params or {}) ) def text_to_audio_file( self, text: str, generated_audio_dir: Path, audio_format: str = "wav", subtype: str = "PCM_16", sample_width: int = 2, channels_count: int = 1, bitrate: str = "320k", normalized=True, audio_naming_function: Callable = lambda text: hashlib.md5(text.encode("utf-8")).hexdigest(), ) -> Path: """ Convert an input string into an audio file containing the same string read out loud. :param text: The text to convert into audio. :param generated_audio_dir: The folder to save the audio file to. :param audio_format: The format to save the audio into (wav, mp3, ...). Supported formats: - Uncompressed formats thanks to `soundfile` (see `libsndfile documentation <https://libsndfile.github.io/libsndfile/api.html>`_ for a list of supported formats) - Compressed formats thanks to `pydub` (uses FFMPEG: run `ffmpeg -formats` in your terminal to see the list of supported formats). :param subtype: Used only for uncompressed formats. See https://libsndfile.github.io/libsndfile/api.html for the complete list of available subtypes. :param sample_width: Used only for compressed formats. The sample width of your audio. Defaults to 2. :param channels count: Used only for compressed formats. THe number of channels your audio file has: 1 for mono, 2 for stereo. Depends on the model, but it's often mono so it defaults to 1. :param bitrate: Used only for compressed formats. The desired bitrate of your compressed audio. Defaults to '320k'. :param normalized: Used only for compressed formats. Normalizes the audio before compression (range 2^15) or leaves it untouched. :param audio_naming_function: A function mapping the input text into the audio file name. By default, the audio file gets the name from the MD5 sum of the input text. :return: The path to the generated file. """ if not os.path.exists(generated_audio_dir): os.mkdir(generated_audio_dir) filename = audio_naming_function(text) file_path = generated_audio_dir / f"{filename}.{audio_format}" # To save time, we avoid regenerating if a file with the same name is already in the folder. # This happens rather often in text from AnswerToSpeech. if not os.path.exists(file_path): audio_data = self.text_to_audio_data(text) if audio_format.upper() in sf.available_formats().keys(): sf.write( data=audio_data, file=file_path, format=audio_format, subtype=subtype, samplerate=self.model.fs ) else: self.compress_audio( data=audio_data, path=file_path, format=audio_format, sample_rate=self.model.fs, sample_width=sample_width, channels_count=channels_count, bitrate=bitrate, normalized=normalized, ) return file_path def text_to_audio_data(self, text: str, _models_output_key: str = "wav") -> np.array: """ Convert an input string into a numpy array representing the audio. :param text: The text to convert into audio. :param _models_output_key: The key in the prediction dictionary that contains the audio data. Defaults to 'wav'. :return: A numpy array representing the audio generated by the model. """ prediction = self.model(text) if not prediction: raise AudioNodeError( f"The model returned no predictions. Make sure you selected a valid text-to-speech model." ) output = prediction.get(_models_output_key, None) if output is None: raise AudioNodeError( f"The model returned no output under the {_models_output_key} key. The available output keys are {prediction.keys()}. Make sure you selected the right key." ) return output.cpu().numpy() def compress_audio( self, data: np.array, path: Path, format: str, sample_rate: int, sample_width: int = 2, channels_count: int = 1, bitrate: str = "320k", normalized=True, ): """ Export a numpy array into a compressed audio file of the desired format. :param data: The audio data to compress. :param path: The path to save the compressed audio at. :param format: The format to compress the data into ('mp3', 'wav', 'raw', 'ogg' or other ffmpeg/avconv supported files). :param sample_rate: The sample rate of the audio. Depends on the model. :param sample_width: The sample width of your audio. Defaults to 2. :param channels count: The number of channels your audio file has: 1 for mono, 2 for stereo. Depends on the model, but it's often mono so it defaults to 1. :param bitrate: The desired bitrate of your compressed audio. Default to '320k'. :param normalized: Normalizes the audio before compression (range 2^15) or leaves it untouched. """ data = np.int16((data * 2**15) if normalized else data) audio = AudioSegment(data.tobytes(), frame_rate=sample_rate, sample_width=sample_width, channels=channels_count) audio.export(path, format=format, bitrate=bitrate)
python
""" iorodeo-potentiostat --------------------- Python interface to LTU Electrocheminiluminescence(ECL)/Potentiometer Shield for the teensy 3.6 development board. Based upon the IO Rodeostat potentiometer (Will Dickson, http://stuff.iorodeo.com/docs/potentiostat). """ from setuptools import setup, find_packages from os import path here = path.abspath(path.dirname(__file__)) with open("README.md", "r") as fh: long_description = fh.read() setup( name='eclometer', version='0.0.3', description='ECLometer serial interface, CLI and GUI app.', long_description=__doc__, url='https://github.com/GVRX/potentiostat', author='Grant van Riessen et al.', author_email='[email protected]', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Chemistry', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7',#added gvr 'Programming Language :: Python :: 3.8',#added gvr 'Programming Language :: Python :: 3.9',#added gvr 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', ], keywords='ECL ECLometer potentiostat' , packages=find_packages(exclude=['docs', 'tests']), install_requires=['pyserial', 'progressbar33', 'argparse', 'json-python-module', 'json_tricks', 'drawnow', 'matplotlib', 'numpy', 'gooey', ], )
python
# TODO: Implement this script fpr as5048aencoder = Runtime.start("as5048aencoder","As5048AEncoder")...
python
# Copyright 2022 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A module extension to bring in the remote coverage tools under @remote_coverage_tools.""" load("//tools/build_defs/repo:http.bzl", "http_archive") def _remote_coverage_tools_extension_impl(ctx): http_archive( name = "remote_coverage_tools", sha256 = "7006375f6756819b7013ca875eab70a541cf7d89142d9c511ed78ea4fefa38af", urls = [ "https://mirror.bazel.build/bazel_coverage_output_generator/releases/coverage_output_generator-v2.6.zip", ], ) remote_coverage_tools_extension = module_extension( implementation = _remote_coverage_tools_extension_impl, )
python
from data_structures.queue.queue import Queue # You have a group of people # One person is holding a hot potato # Each turn the person passes the potato to the person in the left # Then the person gives the potato to his left and then leaves def play_hot_potato_game(items, reps): queue = Queue() # O(n) for item in items: queue.enqueue(item) # O(n - 1) while queue.size() > 1: # O(#reps) for i in range(reps): first = queue.dequeue() print(first) queue.enqueue(first) print('-' * 10) print('Removing {}'.format(queue.dequeue())) return queue.dequeue() if __name__ == "__main__": people = ['A', 'B', 'C', 'D', 'E', 'F', 'G'] num = 5 print('Winner is: {}'.format(play_hot_potato_game(people, num))) # Final complexity: O(n) + O(n-1) + O(n) = 3O(n) = O(n)
python
from gaia_sdk.graphql.request.type.BuildInEvaluation import BuildInEvaluation from gaia_sdk.graphql.request.type.SkillEvaluation import SkillEvaluation from typing import Callable, List from gaia_sdk.api.VariableRegistry import VariableRegistry from gaia_sdk.graphql.request.enumeration.Order import Order from gaia_sdk.graphql.request.enumeration.OrderByField import OrderByField from gaia_sdk.graphql.request.enumeration.EdgeOrderByField import EdgeOrderByField from gaia_sdk.graphql.request.enumeration.EdgeType import EdgeType class Evaluation(list): def skill(self, config: Callable[['SkillEvaluation'], None]): def callback(registry: VariableRegistry): entity = SkillEvaluation() config(entity) return "skill {" + entity.render(registry) + "}" self.append(callback) def build_in(self, config: Callable[['BuildInEvaluation'], None]): def callback(registry: VariableRegistry): entity = BuildInEvaluation() config(entity) return "build_in {" + entity.render(registry) + "}" self.append(callback) def render(self, registry: VariableRegistry): return " ".join(map(lambda e: e(registry), self))
python
""" Finds and stores the voting data for each candidate in every district in the Russia 2018 Presidential election. """ import re from os import stat import time from selenium import webdriver from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from bs4 import BeautifulSoup def get_vote_counts(page_html: str) -> str: """ Takes the html source of the page with vote counts and collects all of the votes for each candidate from that page into a string to be put into a csv file. Args: page_html: a string representing the html page source containing the vote counts. Returns: A string of data representing the vote counts for each candidate in each region and district to be put into a csv file. The data is formatted as follows: candidate, votes, region, oblast """ soup = BeautifulSoup(page_html) tables = soup.find_all("table") rows = tables[-2].find_all("tr")[13:] candidates_and_votes = [r.text.split("\n")[1][2:] for r in rows] separate_candidate_votes_regex = re.compile("([^0-9]+)([0-9]+)") candidates_and_votes = [ separate_candidate_votes_regex.match(cav).groups() for cav in candidates_and_votes ] location = tables[1].find_all("tr")[0].find("td").text.split(" > ") print(f"location: {location}") if len(location) > 2: region_oblast = ",".join([location[1], location[2][:-1]]) elif len(location) > 1: region_oblast = ",".join([location[1][:-1], location[1][:-1]]) else: region_oblast = "N/A" oblast_csv = ( "\n".join( [ re.sub( "(,[^,]*),", r"\1 ", ",".join([cav[0], cav[1], region_oblast]), ) for cav in candidates_and_votes ] ) + "\n" ) return oblast_csv def save_csv(votes_data: str, path: str, column_names: str): """ Adds a string of data to the end of a csv file. Args: votes_data: a string representing the votes data collected in the format 'candidate, votes, city, oblast'. path: a string representing the name of the path to the file to store the data column_names: a string representing the titles of each column in the csv file separated by a comma, for example 'candidate,votes,region,oblast' """ file = open(path, "a", encoding="utf-8") if stat(path).st_size == 0: file.write(f"{column_names}+\n") file.write(votes_data) file.close() def get_election_data(): """ Iterates through a website containing the election data for the Russia 2018 Presidential Election, grabs the votes for each candidate in each region, and stored that data in a csv file. """ url = "http://www.vybory.izbirkom.ru/region/izbirkom?action=show& \ root_a=null&vrn=100100084849062&region=0&global=true& \ type=0&prver=0&pronetvd=null" # Using Chrome version 89 and chromedriver version 89 (important that they # match) driver = webdriver.Chrome() driver.get(url) # 10 seconds to manually enter code to proceed time.sleep(10) # wait until page loads, then select the page with the table of data # only need to do this once as the configurations save table_format = WebDriverWait(driver, 10).until( EC.presence_of_element_located((By.LINK_TEXT, "Результаты выборов")) ) table_format.click() dropdown_regions = driver.find_element_by_name("gs") election_regions = dropdown_regions.find_elements_by_tag_name("option") for k in range(1, len(election_regions)): dropdown_regions = driver.find_element_by_name("gs") election_regions = dropdown_regions.find_elements_by_tag_name("option") # navigate to the page with data for the region election_regions[k].click() select_button = driver.find_element_by_name("go") select_button.click() try: dropdown_oblast = driver.find_element_by_name("gs") election_oblast = dropdown_oblast.find_elements_by_tag_name( "option" ) for i in range(1, len(election_oblast)): dropdown_oblast = driver.find_element_by_name("gs") election_oblast = dropdown_oblast.find_elements_by_tag_name( "option" ) # navigate to the page for an oblast in that city election_oblast[i].click() select_button = driver.find_element_by_name("go") select_button.click() oblast_data = get_vote_counts(driver.page_source) save_csv( oblast_data, "data/2018-Russia-election-data.csv", "candidate,votes,region,oblast", ) driver.back() except NoSuchElementException: oblast_data = get_vote_counts(driver.page_source) save_csv( oblast_data, "data/2018-Russia-election-data.csv", "candidate,votes,region,oblast", ) driver.back() driver.quit() if __name__ == "__main__": get_election_data()
python
# coding:utf-8 # @Time : 2021/6/29 # @Author : fisher yu # @File : file_hash.py """ file hash: v0.0.1 """ import argparse import hashlib import os chunkSize = 8 * 1024 def valid_file(file_path): if os.path.exists(file_path) and os.path.isfile(file_path): return True return False def file_md5(file_path, block_size=chunkSize): if not valid_file(file_path): return None, None md5tool = hashlib.md5() with open(file_path, 'rb') as fn: while True: data = fn.read(block_size) if not data: break md5tool.update(data) md5value = md5tool.hexdigest() # md5b64 = base64.b64encode(md5tool.digest()) return md5value def file_sha1(file_path, block_size=chunkSize): if not valid_file(file_path): return None, None sha1tool = hashlib.sha1() with open(file_path, 'rb') as fn: while True: data = fn.read(block_size) if not data: break sha1tool.update(data) sha1value = sha1tool.hexdigest() # sha1b64 = base64.b64encode(sha1tool.digest()) return sha1value def batch_md5(files: list): md5dict = {} for file in files: md5value = file_md5(file) # Thread here if not md5value: continue if file not in md5dict: md5dict[file] = {} # md5dict[file]['md5b64'] = md5b64 md5dict[file]['md5value'] = md5value return md5dict def batch_sha1(files: list): sha1dict = {} for file in files: sha1value = file_sha1(file) if not sha1value: continue if file not in sha1dict: sha1dict[file] = {} # sha1dict[file]['sha1b64'] = sha1b64 sha1dict[file]['sha1value'] = sha1value return sha1dict def merge_digest(sha1dict: dict, md5dict: dict): digest_dict = {} for file in sha1dict: if file not in digest_dict: digest_dict[file] = {} # digest_dict[file]['sha1b64'] = sha1dict[file]['sha1b64'] digest_dict[file]['sha1value'] = sha1dict[file]['sha1value'] for file in md5dict: if file not in digest_dict: digest_dict[file] = {} # digest_dict[file]['md5b64'] = md5dict[file]['md5b64'] digest_dict[file]['md5value'] = md5dict[file]['md5value'] return digest_dict def main(): parser = argparse.ArgumentParser(description='Compute the file digest.') parser.add_argument('paths', metavar='/path/to/file', type=str, nargs='*', help='A file path') parser.add_argument('--sha1', dest='sha1', action='store_true', help='Show sha1 digest') parser.add_argument('--md5', dest='md5', action='store_true', help='Show md5 digest') parser.add_argument('-dup', '--find-duplicate', dest='duplicate', action='store_true', help='Find Duplicate file') parser.add_argument('-i', '--input-file', dest='input', type=str, help='A file stores some file waiting hash') args = parser.parse_args() if not args.paths and not args.input: print('[-]Error: One file path at least.') exit(0) if args.input and not os.path.exists(args.input) and not os.path.isfile(args.input): print('[-]Error: input file not exists or not a file.') exit(0) paths = args.paths if args.paths else [] if args.input: with open(args.input, 'r') as fn: for line in fn.readlines(): formatted_line = line.strip('\r').strip('\n').strip('') if formatted_line: paths.append(formatted_line) sha1dict = {} if args.sha1: sha1dict = batch_sha1(paths) md5dict = batch_md5(paths) digest_dict = merge_digest(sha1dict, md5dict) if args.duplicate: hash_dict = {} for file, file_hash in digest_dict.items(): hash_key = file_hash['md5value'] if hash_key not in hash_dict: hash_dict[hash_key] = {} length = len(hash_dict[hash_key]) file_key = 'file{}'.format(str(length)) hash_dict[hash_key][file_key] = file for hash_key in hash_dict.keys(): if len(hash_dict[hash_key]) >= 2: print('file md5: {}'.format(hash_key)) for value in hash_dict[hash_key].values(): print('\t{}'.format(value)) if args.md5 and args.sha1: print(digest_dict) elif args.md5: print(md5dict) elif args.sha1: print(sha1dict) if __name__ == '__main__': main()
python
{ 'targets': [ { 'target_name': 'binding', 'sources': [ 'binding.cc' ], 'libraries': ['-lzmq'], 'cflags!': ['-fno-exceptions'], 'cflags_cc!': ['-fno-exceptions'], 'conditions': [ ['OS=="mac"', { 'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES' } }] ] } ] }
python
# -*- coding: utf-8 -*- def main(): s = input() t = s[::-1] n = len(s) // 2 count = 0 for i in range(n): if s[i] != t[i]: count += 1 print(count) if __name__ == '__main__': main()
python
try: import config_local as config except: import config import requests headers = {"User-Agent": "http-url-test"} response = requests.get(config.url, headers=headers) print('Response URL:', response.url) print(response.text)
python
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-05-28 23:39 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import multiselectfield.db.fields class Migration(migrations.Migration): dependencies = [ ('registration', '0004_auto_20170518_0332'), ] operations = [ migrations.AddField( model_name='member', name='involvement', field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('ac6922146d', 'General (receive email)'), ('3a5a719017', 'Volunteering'), ('0ebb0b5468', 'Events'), ('84309225e7', 'Workshops'), ('c96d389517', 'Shop')], max_length=54, null=True), ), migrations.AlterField( model_name='member', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
python
from unyt._unit_lookup_table import *
python
from datetime import datetime, timedelta from msl.qt import QtCore, QtGui, QtWidgets, Button from ...log import log from ...constants import FONTSIZE def chop_microseconds(delta): return delta - timedelta(microseconds=delta.microseconds) class WaitUntilTimeDisplay(QtWidgets.QDialog): def __init__(self, loop_delay=1000, message=None, title=None, parent=None, font_family='Helvetica'): """This widget counts down to a target time, and displays the time remaining until then. Parameters ---------- loop_delay : int update interval in ms message : str, optional message to display to explain what will happen when the countdown is reached title : str title for dialog window parent : QtWidget or app ?, optional font_family : str, optional """ super().__init__(parent=parent) if title is None: title = f"Delay Start" self.setWindowTitle(title) font = QtGui.QFont(font_family, pointSize=FONTSIZE) layout = QtWidgets.QVBoxLayout() # display a message if one has been passed if message is not None: log.info(message) msg = QtWidgets.QLabel(message) msg.setWordWrap(True) msg.setFont(font) layout.addWidget(msg) # make a date and time edit box for the target time self.intro = QtWidgets.QLabel("Waiting until:") self.intro.setFont(font) layout.addWidget(self.intro) self.dte = QtWidgets.QDateTimeEdit() self.dte.setFont(font) self.dte.setDateTime(QtCore.QDateTime.currentDateTime().addSecs(3600)) layout.addWidget(self.dte) # show how long it will wait for self.status = QtWidgets.QLabel() self.status.setFont(font) self.loop() layout.addWidget(self.status) # add an override to start the weighing now start_now = Button(text="Start now", left_click=self.start_now) start_now.setFont(font) layout.addWidget(start_now) self.setLayout(layout) self.go = False self._loop_delay = loop_delay self._loop_timer = QtCore.QTimer() self._loop_timer.timeout.connect(self.loop) self._loop_timer.start(self._loop_delay) # allow user to change the time? self.closeEvent = self._shutdown @property def target_time(self): """Return displayed time as normal datetime type""" try: # PyQt dto = self.dte.dateTime().toPyDateTime() except: # PySide dto = self.dte.dateTime().toPython() return dto @property def loop_delay(self): """:class:`int`: The time delay, in milliseconds, between successive calls to the :meth:`loop`.""" return self._loop_delay @property def loop_timer(self): """:class:`QtCore.QTimer`: The reference to the :meth:`loop`\'s timer.""" return self._loop_timer def _stop_timers(self): """Stop and delete the timers.""" if self._loop_timer: self._loop_timer.stop() self._loop_timer = None def time_remaining(self): """Work out the remaining time""" now = datetime.now() time_remaining = self.target_time - now return time_remaining def loop(self): """Update the label and determine if the target time has been reached""" tr = self.time_remaining() self.status.setText( f"Time remaining: {chop_microseconds(tr)}\n" ) if tr.total_seconds() < 0: self.start_now() def start_now(self): """Exit out of the dialog, setting the go attribute to True""" self.go = True self.close() def _shutdown(self, event): """Abort the loop""" self._stop_timers() event.accept()
python
import os import glob import pandas as pd import xml.etree.ElementTree as ET def xml_to_csv(path): xml_list = [] # 讀取標註檔案 for xml_file in glob.glob(path + '/*.xml'): tree = ET.parse(xml_file) root = tree.getroot() for member in root.findall('object'): value = (str(root.find('filename').text), int(root.find('size')[0].text), int(root.find('size')[1].text), member[0].text, int(member[4][0].text), int(member[4][1].text), int(member[4][2].text), int(member[4][3].text) ) xml_list.append(value) column_name = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax'] # 將數據分成樣本及驗證 train_list = xml_list[0: int(len(xml_list) * 0.67)] eval_list = xml_list[int(len(xml_list) * 0.67) + 1: ] train_df = pd.DataFrame(xml_list, columns=column_name) eval_df = pd.DataFrame(eval_list, columns=column_name) train_df.to_csv('data/train.csv', index=None) eval_df.to_csv('data/eval.csv', index=None) def main(): image_path = os.path.join(os.getcwd(), 'annotations') xml_to_csv(image_path) print('Successfully converted xml to csv.') main()
python
from __future__ import absolute_import from __future__ import division from __future__ import print_function import ray from ray.rllib.evaluation.postprocessing import compute_advantages, \ Postprocessing from ray.rllib.policy.tf_policy_template import build_tf_policy from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.utils import try_import_tf tf = try_import_tf() # The basic policy gradients loss def policy_gradient_loss(policy, batch_tensors): actions = batch_tensors[SampleBatch.ACTIONS] advantages = batch_tensors[Postprocessing.ADVANTAGES] return -tf.reduce_mean(policy.action_dist.logp(actions) * advantages) # This adds the "advantages" column to the sample batch. def postprocess_advantages(policy, sample_batch, other_agent_batches=None, episode=None): return compute_advantages( sample_batch, 0.0, policy.config["gamma"], use_gae=False) PGTFPolicy = build_tf_policy( name="PGTFPolicy", get_default_config=lambda: ray.rllib.agents.pg.pg.DEFAULT_CONFIG, postprocess_fn=postprocess_advantages, loss_fn=policy_gradient_loss)
python