commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
6aaa08a48dade981de18b117363357fdffaeb641 | add python_capstone_setup.py | techvoltage/capstone,bughoho/capstone,bughoho/capstone,techvoltage/capstone,bigendiansmalls/capstone,angelabier1/capstone,bughoho/capstone,dynm/capstone,pombredanne/capstone,AmesianX/capstone,zuloloxi/capstone,bughoho/capstone,nplanel/capstone,bigendiansmalls/capstone,bSr43/capstone,sephiroth99/capstone,capturePointer/capstone,sephiroth99/capstone,bigendiansmalls/capstone,angelabier1/capstone,dynm/capstone,code4bones/capstone,bughoho/capstone,sephiroth99/capstone,code4bones/capstone,bigendiansmalls/capstone,pranith/capstone,nplanel/capstone,AmesianX/capstone,dynm/capstone,bSr43/capstone,8l/capstone,code4bones/capstone,bowlofstew/capstone,07151129/capstone,bughoho/capstone,07151129/capstone,code4bones/capstone,bSr43/capstone,NeilBryant/capstone,NeilBryant/capstone,07151129/capstone,krytarowski/capstone,AmesianX/capstone,AmesianX/capstone,angelabier1/capstone,krytarowski/capstone,zuloloxi/capstone,AmesianX/capstone,8l/capstone,krytarowski/capstone,bowlofstew/capstone,NeilBryant/capstone,techvoltage/capstone,bowlofstew/capstone,bSr43/capstone,bowlofstew/capstone,07151129/capstone,pombredanne/capstone,sephiroth99/capstone,capturePointer/capstone,capturePointer/capstone,pranith/capstone,pranith/capstone,sephiroth99/capstone,krytarowski/capstone,techvoltage/capstone,bigendiansmalls/capstone,krytarowski/capstone,nplanel/capstone,angelabier1/capstone,07151129/capstone,8l/capstone,capturePointer/capstone,pombredanne/capstone,krytarowski/capstone,bSr43/capstone,zuloloxi/capstone,dynm/capstone,zuloloxi/capstone,8l/capstone,8l/capstone,code4bones/capstone,bowlofstew/capstone,nplanel/capstone,techvoltage/capstone,krytarowski/capstone,pombredanne/capstone,zuloloxi/capstone,pombredanne/capstone,angelabier1/capstone,07151129/capstone,techvoltage/capstone,bowlofstew/capstone,NeilBryant/capstone,bigendiansmalls/capstone,NeilBryant/capstone,bowlofstew/capstone,dynm/capstone,techvoltage/capstone,NeilBryant/capstone,bigendiansmalls/capstone,zuloloxi/capstone,sephiroth99/capstone,NeilBryant/capstone,pranith/capstone,code4bones/capstone,8l/capstone,sephiroth99/capstone,AmesianX/capstone,bSr43/capstone,pranith/capstone,bughoho/capstone,capturePointer/capstone,angelabier1/capstone,07151129/capstone,dynm/capstone,bSr43/capstone,pranith/capstone,nplanel/capstone,nplanel/capstone,zuloloxi/capstone,pombredanne/capstone,pombredanne/capstone,dynm/capstone,capturePointer/capstone,capturePointer/capstone,code4bones/capstone,nplanel/capstone,pranith/capstone,8l/capstone,angelabier1/capstone,AmesianX/capstone | suite/python_capstone_setup.py | suite/python_capstone_setup.py | #!/bin/sh
# this prints out Capstone setup & core+Python-binding versions
python -c "import capstone; print capstone.debug()"
| bsd-3-clause | Python |
|
077cf46ab42c76bf3a854142a4f530625a377837 | Create tutorial2.py | dina-hertog/empty-app | tutorial2.py | tutorial2.py | mit | Python |
||
1bb1ececfcd548d52a28b713f4ee7eb4e710da85 | Add an example of using fchollet multi_gpu_model on InceptionV3. | rossumai/keras-multi-gpu,rossumai/keras-multi-gpu | keras_tf_multigpu/examples/fchollet_inception3_multigpu.py | keras_tf_multigpu/examples/fchollet_inception3_multigpu.py | import tensorflow as tf
from keras.applications import InceptionV3
from keras.utils import multi_gpu_model
import numpy as np
num_samples = 1000
height = 224
width = 224
num_classes = 1000
gpu_count = 2
# Instantiate the base model
# (here, we do it on CPU, which is optional).
with tf.device('/cpu:0' if gpu_count > 1 else '/gpu:0'):
model = InceptionV3(weights=None,
input_shape=(height, width, 3),
classes=num_classes)
# Replicates the model on N GPUs.
# This assumes that your machine has N available GPUs.
if gpu_count > 1:
parallel_model = multi_gpu_model(model, gpus=gpu_count)
else:
parallel_model = model
parallel_model.compile(loss='categorical_crossentropy',
optimizer='rmsprop')
# Generate dummy data.
x = np.random.random((num_samples, height, width, 3))
y = np.random.random((num_samples, num_classes))
# This `fit` call will be distributed on N GPUs.
# Since the batch size is N*32, each GPU will process 32 samples.
parallel_model.fit(x, y, epochs=20, batch_size=32 * gpu_count)
| mit | Python |
|
9ffafa9c11e71c176adb4056fbc780e450cc0d82 | Add experimental queries module. | ericdill/databroker,ericdill/databroker | databroker/queries.py | databroker/queries.py | """
This module is experimental.
"""
import collections.abc
import abc
class Query(collections.abc.Mapping):
"""
This represents a MongoDB query.
MongoDB queries are typically encoded as simple dicts. This object supports
the dict interface in a read-only fashion. Subclassses add a nice __repr__
and mutable attributes from which the contents of the dict are derived.
"""
@abc.abstractproperty
def _query(self):
...
def __iter__(self):
return iter(self._query)
def __getitem__(self, key):
return self._query[key]
def __len__(self):
return len(self._query)
class TimeRange(Query):
"""
A search query representing a time range.
"""
def __init__(self, since=None, until=None):
self.since = since
self.until = until
@property
def _query(self):
query = {'time': {}}
if self.since is not None:
query['time']['$gte'] = self.since
if self.until is not None:
query['time']['$lt'] = self.until
return query
def __repr__(self):
return f"{type(self).__name__}(since={self.since}, until={self.until})"
| bsd-3-clause | Python |
|
177f198a1efb99da592d96a2d5d259722b8a47ee | Add a test of switching back and forth between Decider() values (specifically 'MD5' and 'timestamp-match'), copied from back when this functionality was configured with the SourceSignatures() function. | timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons | test/Decider/switch-rebuild.py | test/Decider/switch-rebuild.py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that switching Decider() types between MD5 and timestamp-match
does not cause unnecessary rebuilds.
"""
import TestSCons
test = TestSCons.TestSCons(match=TestSCons.match_re_dotall)
base_sconstruct_contents = """\
Decider('%s')
def build(env, target, source):
open(str(target[0]), 'wt').write(open(str(source[0]), 'rt').read())
B = Builder(action=build)
env = Environment(BUILDERS = { 'B' : B })
env.B(target='switch.out', source='switch.in')
"""
def write_SConstruct(test, sig_type):
contents = base_sconstruct_contents % sig_type
test.write('SConstruct', contents)
# Build first MD5 checksums.
write_SConstruct(test, 'MD5')
test.write('switch.in', "switch.in\n")
switch_out_switch_in = test.wrap_stdout(r'build\(\["switch.out"\], \["switch.in"\]\)\n')
test.run(arguments='switch.out', stdout=switch_out_switch_in)
test.up_to_date(arguments='switch.out')
# Now rebuild with timestamp-match. Because we always store timestamps,
# even when making the decision based on MD5 checksums, the build is
# still up to date.
write_SConstruct(test, 'timestamp-match')
test.up_to_date(arguments='switch.out')
# Now switch back to MD5 checksums. When we rebuilt with the timestamp,
# it wiped out the MD5 value (because the point of timestamps is to not
# open up and checksum the contents), so the file is considered *not*
# up to date and must be rebuilt to generate a checksum.
write_SConstruct(test, 'MD5')
test.not_up_to_date(arguments='switch.out')
# And just for good measure, make sure that we now rebuild in response
# to a content change.
test.write('switch.in', "switch.in 2\n")
test.run(arguments='switch.out', stdout=switch_out_switch_in)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | Python |
|
666cd734270f7d7487815c7805940cb8a2d99c2c | Add script for downloading and patching the TIGER corpus | ooz/Confopy,ooz/Confopy | confopy/localization/de/corpus_de/tiger_dl_patch.py | confopy/localization/de/corpus_de/tiger_dl_patch.py | #!/usr/bin/python
# coding: utf-8
'''
File: tiger_release_aug07.corrected.16012013_patch.py
Author: Oliver Zscheyge
Description:
Fixes wrong morph values in the TIGER corpus:
tiger_release_aug07.corrected.16012013.xml
Also converts XML file to utf-8 encoding.
'''
import urllib
import tarfile
import codecs
import fileinput
import os
TIGER_URL = "http://www.ims.uni-stuttgart.de/forschung/ressourcen/korpora/TIGERCorpus/download/tigercorpus-2.2.xml.tar.gz"
TIGER_PKG_FILE = "tiger.tar.gz"
TIGER_FILE = "tiger_release_aug07.corrected.16012013.xml"
TIGER_FILE_UTF8 = "tiger_release_aug07.corrected.16012013_utf8.xml"
TIGER_FILE_UTF8_PATCHED = "tiger_release_aug07.corrected.16012013_utf8_patched.xml"
SOURCE_ENC = "iso-8859-1"
TARGET_ENC = "utf-8"
def main():
print("Downloading and extracting TIGER corpus...")
download_extract()
print("Converting the corpus to UTF-8 and fixing strings...")
convert_to_utf8()
fix_strings()
print("Cleaning up downloaded and generated files...")
cleanup()
print("Done!")
def download_extract():
urllib.urlretrieve(TIGER_URL, TIGER_PKG_FILE)
tar = tarfile.open(TIGER_PKG_FILE)
tar.extractall()
tar.close()
def convert_to_utf8():
"""Converting the TIGER_FILE to utf-8 encoding.
Taken from:
http://stackoverflow.com/questions/191359/how-to-convert-a-file-to-utf-8-in-python
"""
BLOCKSIZE = 1048576 # or some other, desired size in bytes
with codecs.open(TIGER_FILE, "r", SOURCE_ENC) as sourceFile:
with codecs.open(TIGER_FILE_UTF8, "w", TARGET_ENC) as targetFile:
while True:
contents = sourceFile.read(BLOCKSIZE)
if not contents:
break
targetFile.write(contents)
def fix_strings():
replacements = {
1 : [u"ISO-8859-1", u"utf-8"]
, 293648 : [u"Pl.1.Pres.Ind", u"1.Pl.Pres.Ind"]
, 543756 : [u"Pl.3.Pres.Ind", u"3.Pl.Pres.Ind"]
, 1846632 : [u"Pl.3.Pres.Ind", u"3.Pl.Pres.Ind"]
, 2634040 : [u"Pl.3.Pres.Ind", u"3.Pl.Pres.Ind"]
}
linenr = 1
with codecs.open(TIGER_FILE_UTF8_PATCHED, "w", TARGET_ENC) as outfile:
with codecs.open(TIGER_FILE_UTF8, "r", TARGET_ENC) as infile:
for line in infile:
line = unicode(line).replace(u"\r", u"") # Replace Window's carriage returns
replacement = replacements.get(linenr, [])
if replacement != []:
line = line.replace(replacement[0], replacement[1], 1)
linenr += 1
outfile.write(line)
# for line in fileinput.input(TIGER_FILE_FIXED, inplace=True):
# replacement = replacements.get(fileinput.filelineno(), [])
# if replacement == []:
# print line,
# else:
# print line.replace(replacement[0], replacement[1], 1),
def cleanup():
os.remove(TIGER_PKG_FILE)
os.remove(TIGER_FILE)
os.remove(TIGER_FILE_UTF8)
if __name__ == '__main__':
main()
| mit | Python |
|
8c7fc2382db0ec9c901f6c2c2b00971f3ee7c3cc | Add tests for custom authentication backend | randomic/aniauth-tdd,randomic/aniauth-tdd | logintokens/tests/test_backends.py | logintokens/tests/test_backends.py | """logintokens app unittests for backends
"""
from time import sleep
from django.test import TestCase, Client
from django.contrib.auth import get_user_model, authenticate
from logintokens.tokens import default_token_generator
USER = get_user_model()
class EmailOnlyAuthenticationBackendTest(TestCase):
"""Tests for email only authentication backend
"""
def setUp(self):
self.client = Client()
self.generator = default_token_generator
self.new_username = 'newvisitor'
self.existing_user = USER._default_manager.create_user('existinguser')
def test_different_tokens_usable(self):
"""Two differing tokens should both be usabe to authenticate.
"""
username = self.existing_user.get_username()
token1 = self.generator.make_token(username)
sleep(1)
token2 = self.generator.make_token(username)
self.assertNotEqual(token1, token2)
self.assertEqual(authenticate(token=token1), self.existing_user)
self.assertEqual(authenticate(token=token2), self.existing_user)
def test_login_invalidates_tokens(self):
"""Tokens generated before a successful login should become invalid.
"""
username = self.existing_user.get_username()
token1 = self.generator.make_token(username)
sleep(1)
token2 = self.generator.make_token(username)
self.assertNotEqual(token1, token2)
self.client.force_login(self.existing_user)
self.assertIsNone(authenticate(token=token1))
self.assertIsNone(authenticate(token=token2))
def test_new_visitor_creates_user(self):
"""Using a token from a new visitor should create their user object.
"""
token = self.generator.make_token(self.new_username)
user = authenticate(token=token)
self.assertIsInstance(user, USER)
| mit | Python |
|
f746c2a8a59342060d404944a586b11e1f46df5a | Merge with lp:openobject-addons | hifly/OpenUpgrade,fuselock/odoo,kybriainfotech/iSocioCRM,nexiles/odoo,kirca/OpenUpgrade,oliverhr/odoo,Danisan/odoo-1,rgeleta/odoo,dezynetechnologies/odoo,OpenUpgrade-dev/OpenUpgrade,christophlsa/odoo,luistorresm/odoo,slevenhagen/odoo-npg,savoirfairelinux/odoo,cysnake4713/odoo,synconics/odoo,OSSESAC/odoopubarquiluz,rowemoore/odoo,kifcaliph/odoo,ApuliaSoftware/odoo,florian-dacosta/OpenUpgrade,pedrobaeza/OpenUpgrade,JonathanStein/odoo,agrista/odoo-saas,Danisan/odoo-1,OpenUpgrade-dev/OpenUpgrade,TRESCLOUD/odoopub,x111ong/odoo,BT-ojossen/odoo,cpyou/odoo,slevenhagen/odoo,frouty/odoogoeen,hmen89/odoo,damdam-s/OpenUpgrade,lightcn/odoo,abstract-open-solutions/OCB,0k/OpenUpgrade,eino-makitalo/odoo,shingonoide/odoo,joariasl/odoo,rubencabrera/odoo,markeTIC/OCB,papouso/odoo,tarzan0820/odoo,lgscofield/odoo,janocat/odoo,mszewczy/odoo,kittiu/odoo,andreparames/odoo,Grirrane/odoo,lsinfo/odoo,PongPi/isl-odoo,feroda/odoo,odoousers2014/odoo,RafaelTorrealba/odoo,Noviat/odoo,NeovaHealth/odoo,alqfahad/odoo,draugiskisprendimai/odoo,doomsterinc/odoo,pplatek/odoo,jusdng/odoo,jiangzhixiao/odoo,sadleader/odoo,Gitlab11/odoo,provaleks/o8,patmcb/odoo,dalegregory/odoo,jiangzhixiao/odoo,Drooids/odoo,cloud9UG/odoo,Codefans-fan/odoo,apocalypsebg/odoo,markeTIC/OCB,stephen144/odoo,ccomb/OpenUpgrade,alqfahad/odoo,gdgellatly/OCB1,takis/odoo,AuyaJackie/odoo,jusdng/odoo,mmbtba/odoo,nhomar/odoo-mirror,AuyaJackie/odoo,nexiles/odoo,ChanduERP/odoo,acshan/odoo,cedk/odoo,datenbetrieb/odoo,nitinitprof/odoo,ehirt/odoo,bealdav/OpenUpgrade,Daniel-CA/odoo,srimai/odoo,makinacorpus/odoo,ChanduERP/odoo,OSSESAC/odoopubarquiluz,mlaitinen/odoo,credativUK/OCB,lsinfo/odoo,apanju/odoo,ovnicraft/odoo,joshuajan/odoo,jpshort/odoo,VielSoft/odoo,zchking/odoo,RafaelTorrealba/odoo,mlaitinen/odoo,OpenPymeMx/OCB,avoinsystems/odoo,rahuldhote/odoo,addition-it-solutions/project-all,credativUK/OCB,JonathanStein/odoo,incaser/odoo-odoo,hubsaysnuaa/odoo,nuuuboo/odoo,JonathanStein/odoo,codekaki/odoo,aviciimaxwell/odoo,fossoult/odoo,ojengwa/odoo,laslabs/odoo,hifly/OpenUpgrade,grap/OCB,thanhacun/odoo,Maspear/odoo,tinkerthaler/odoo,funkring/fdoo,fdvarela/odoo8,stonegithubs/odoo,vnsofthe/odoo,GauravSahu/odoo,Eric-Zhong/odoo,gvb/odoo,SerpentCS/odoo,fdvarela/odoo8,jpshort/odoo,odoo-turkiye/odoo,papouso/odoo,aviciimaxwell/odoo,mvaled/OpenUpgrade,MarcosCommunity/odoo,mlaitinen/odoo,abdellatifkarroum/odoo,tvtsoft/odoo8,frouty/odoo_oph,diagramsoftware/odoo,bobisme/odoo,Nowheresly/odoo,florian-dacosta/OpenUpgrade,ThinkOpen-Solutions/odoo,luiseduardohdbackup/odoo,stephen144/odoo,CubicERP/odoo,takis/odoo,jusdng/odoo,ApuliaSoftware/odoo,Kilhog/odoo,hoatle/odoo,ojengwa/odoo,GauravSahu/odoo,rgeleta/odoo,christophlsa/odoo,tinkerthaler/odoo,agrista/odoo-saas,xzYue/odoo,incaser/odoo-odoo,kybriainfotech/iSocioCRM,fossoult/odoo,ramitalat/odoo,hbrunn/OpenUpgrade,dllsf/odootest,mustafat/odoo-1,bplancher/odoo,cysnake4713/odoo,pplatek/odoo,lombritz/odoo,VitalPet/odoo,fossoult/odoo,leorochael/odoo,feroda/odoo,0k/odoo,hopeall/odoo,papouso/odoo,laslabs/odoo,stonegithubs/odoo,synconics/odoo,OpenUpgrade/OpenUpgrade,tvibliani/odoo,nuuuboo/odoo,gdgellatly/OCB1,zchking/odoo,SAM-IT-SA/odoo,Danisan/odoo-1,synconics/odoo,dariemp/odoo,doomsterinc/odoo,damdam-s/OpenUpgrade,hifly/OpenUpgrade,Gitlab11/odoo,colinnewell/odoo,tarzan0820/odoo,hifly/OpenUpgrade,hanicker/odoo,QianBIG/odoo,odoousers2014/odoo,gavin-feng/odoo,0k/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,ojengwa/odoo,ccomb/OpenUpgrade,glovebx/odoo,nuncjo/odoo,windedge/odoo,inspyration/odoo,savoirfairelinux/odoo,javierTerry/odoo,shaufi10/odoo,rahuldhote/odoo,patmcb/odoo,bguillot/OpenUpgrade,pedrobaeza/odoo,csrocha/OpenUpgrade,spadae22/odoo,VitalPet/odoo,leoliujie/odoo,vrenaville/ngo-addons-backport,Daniel-CA/odoo,incaser/odoo-odoo,leorochael/odoo,kybriainfotech/iSocioCRM,lombritz/odoo,odootr/odoo,bwrsandman/OpenUpgrade,OpenPymeMx/OCB,dgzurita/odoo,fossoult/odoo,simongoffin/website_version,provaleks/o8,dezynetechnologies/odoo,shaufi/odoo,eino-makitalo/odoo,odoo-turkiye/odoo,JCA-Developpement/Odoo,dalegregory/odoo,abstract-open-solutions/OCB,apocalypsebg/odoo,joshuajan/odoo,prospwro/odoo,goliveirab/odoo,bealdav/OpenUpgrade,bplancher/odoo,odooindia/odoo,odooindia/odoo,provaleks/o8,shingonoide/odoo,nuuuboo/odoo,rubencabrera/odoo,Kilhog/odoo,fjbatresv/odoo,AuyaJackie/odoo,guewen/OpenUpgrade,avoinsystems/odoo,xzYue/odoo,fuselock/odoo,ujjwalwahi/odoo,AuyaJackie/odoo,BT-astauder/odoo,ygol/odoo,ramadhane/odoo,papouso/odoo,sergio-incaser/odoo,BT-fgarbely/odoo,Kilhog/odoo,hoatle/odoo,xzYue/odoo,florentx/OpenUpgrade,nhomar/odoo,demon-ru/iml-crm,javierTerry/odoo,bkirui/odoo,gdgellatly/OCB1,shingonoide/odoo,SAM-IT-SA/odoo,joshuajan/odoo,fossoult/odoo,thanhacun/odoo,MarcosCommunity/odoo,brijeshkesariya/odoo,windedge/odoo,Nowheresly/odoo,VitalPet/odoo,Daniel-CA/odoo,sinbazhou/odoo,camptocamp/ngo-addons-backport,CopeX/odoo,steedos/odoo,Kilhog/odoo,chiragjogi/odoo,ihsanudin/odoo,nuuuboo/odoo,dezynetechnologies/odoo,leorochael/odoo,ihsanudin/odoo,NL66278/OCB,blaggacao/OpenUpgrade,juanalfonsopr/odoo,Antiun/odoo,rschnapka/odoo,datenbetrieb/odoo,janocat/odoo,Endika/OpenUpgrade,vnsofthe/odoo,Ernesto99/odoo,srsman/odoo,microcom/odoo,nagyistoce/odoo-dev-odoo,ApuliaSoftware/odoo,ujjwalwahi/odoo,ecosoft-odoo/odoo,zchking/odoo,apocalypsebg/odoo,rahuldhote/odoo,christophlsa/odoo,patmcb/odoo,cdrooom/odoo,SerpentCS/odoo,gvb/odoo,BT-rmartin/odoo,dariemp/odoo,bobisme/odoo,ShineFan/odoo,vrenaville/ngo-addons-backport,elmerdpadilla/iv,slevenhagen/odoo,Codefans-fan/odoo,GauravSahu/odoo,SerpentCS/odoo,laslabs/odoo,charbeljc/OCB,hanicker/odoo,n0m4dz/odoo,bobisme/odoo,pedrobaeza/odoo,fuhongliang/odoo,joariasl/odoo,deKupini/erp,ccomb/OpenUpgrade,OpenPymeMx/OCB,kifcaliph/odoo,nitinitprof/odoo,hmen89/odoo,BT-fgarbely/odoo,steedos/odoo,Gitlab11/odoo,slevenhagen/odoo-npg,elmerdpadilla/iv,SerpentCS/odoo,kittiu/odoo,KontorConsulting/odoo,BT-fgarbely/odoo,savoirfairelinux/OpenUpgrade,diagramsoftware/odoo,kittiu/odoo,tangyiyong/odoo,tinkhaven-organization/odoo,acshan/odoo,Endika/odoo,pedrobaeza/OpenUpgrade,tvibliani/odoo,pplatek/odoo,storm-computers/odoo,ecosoft-odoo/odoo,jolevq/odoopub,havt/odoo,numerigraphe/odoo,CatsAndDogsbvba/odoo,poljeff/odoo,factorlibre/OCB,markeTIC/OCB,sinbazhou/odoo,bguillot/OpenUpgrade,rowemoore/odoo,nhomar/odoo-mirror,andreparames/odoo,pedrobaeza/odoo,kybriainfotech/iSocioCRM,JGarcia-Panach/odoo,minhtuancn/odoo,codekaki/odoo,damdam-s/OpenUpgrade,osvalr/odoo,pplatek/odoo,ubic135/odoo-design,prospwro/odoo,markeTIC/OCB,gsmartway/odoo,shaufi10/odoo,srsman/odoo,OpusVL/odoo,FlorianLudwig/odoo,guewen/OpenUpgrade,florentx/OpenUpgrade,feroda/odoo,credativUK/OCB,rgeleta/odoo,lgscofield/odoo,optima-ict/odoo,waytai/odoo,Ernesto99/odoo,codekaki/odoo,syci/OCB,odooindia/odoo,wangjun/odoo,cloud9UG/odoo,matrixise/odoo,aviciimaxwell/odoo,nhomar/odoo-mirror,arthru/OpenUpgrade,erkrishna9/odoo,nhomar/odoo,patmcb/odoo,sebalix/OpenUpgrade,Adel-Magebinary/odoo,OpenUpgrade-dev/OpenUpgrade,sergio-incaser/odoo,doomsterinc/odoo,NeovaHealth/odoo,abstract-open-solutions/OCB,chiragjogi/odoo,frouty/odoogoeen,slevenhagen/odoo-npg,ramadhane/odoo,MarcosCommunity/odoo,cloud9UG/odoo,steedos/odoo,mszewczy/odoo,datenbetrieb/odoo,alexcuellar/odoo,avoinsystems/odoo,OpenUpgrade/OpenUpgrade,oasiswork/odoo,Elico-Corp/odoo_OCB,tangyiyong/odoo,TRESCLOUD/odoopub,sve-odoo/odoo,havt/odoo,demon-ru/iml-crm,ujjwalwahi/odoo,sv-dev1/odoo,lsinfo/odoo,rahuldhote/odoo,Maspear/odoo,Endika/odoo,wangjun/odoo,hmen89/odoo,hopeall/odoo,odootr/odoo,alqfahad/odoo,takis/odoo,Grirrane/odoo,mvaled/OpenUpgrade,slevenhagen/odoo,numerigraphe/odoo,ramitalat/odoo,draugiskisprendimai/odoo,mkieszek/odoo,n0m4dz/odoo,ClearCorp-dev/odoo,odoo-turkiye/odoo,acshan/odoo,Nick-OpusVL/odoo,hubsaysnuaa/odoo,addition-it-solutions/project-all,jiangzhixiao/odoo,lgscofield/odoo,gdgellatly/OCB1,ShineFan/odoo,credativUK/OCB,Endika/odoo,KontorConsulting/odoo,nhomar/odoo,ihsanudin/odoo,aviciimaxwell/odoo,storm-computers/odoo,shingonoide/odoo,ojengwa/odoo,alhashash/odoo,fdvarela/odoo8,SAM-IT-SA/odoo,Endika/OpenUpgrade,ThinkOpen-Solutions/odoo,pedrobaeza/odoo,pedrobaeza/OpenUpgrade,BT-ojossen/odoo,dariemp/odoo,csrocha/OpenUpgrade,Grirrane/odoo,mkieszek/odoo,storm-computers/odoo,OpenUpgrade-dev/OpenUpgrade,joshuajan/odoo,makinacorpus/odoo,Eric-Zhong/odoo,massot/odoo,havt/odoo,Eric-Zhong/odoo,dllsf/odootest,avoinsystems/odoo,vrenaville/ngo-addons-backport,vnsofthe/odoo,rdeheele/odoo,mmbtba/odoo,luiseduardohdbackup/odoo,Endika/odoo,rowemoore/odoo,takis/odoo,dezynetechnologies/odoo,OpenPymeMx/OCB,factorlibre/OCB,acshan/odoo,kirca/OpenUpgrade,VielSoft/odoo,SAM-IT-SA/odoo,erkrishna9/odoo,alexcuellar/odoo,sysadminmatmoz/OCB,guerrerocarlos/odoo,Ichag/odoo,blaggacao/OpenUpgrade,oliverhr/odoo,Endika/OpenUpgrade,Danisan/odoo-1,apocalypsebg/odoo,Daniel-CA/odoo,Danisan/odoo-1,jpshort/odoo,mvaled/OpenUpgrade,minhtuancn/odoo,VitalPet/odoo,Noviat/odoo,srsman/odoo,Maspear/odoo,wangjun/odoo,osvalr/odoo,glovebx/odoo,Maspear/odoo,joshuajan/odoo,tvibliani/odoo,slevenhagen/odoo-npg,apanju/GMIO_Odoo,Adel-Magebinary/odoo,ujjwalwahi/odoo,rschnapka/odoo,SerpentCS/odoo,xzYue/odoo,abdellatifkarroum/odoo,ujjwalwahi/odoo,dalegregory/odoo,abdellatifkarroum/odoo,jaxkodex/odoo,optima-ict/odoo,Drooids/odoo,nuncjo/odoo,FlorianLudwig/odoo,BT-rmartin/odoo,deKupini/erp,microcom/odoo,ecosoft-odoo/odoo,dllsf/odootest,idncom/odoo,nitinitprof/odoo,minhtuancn/odoo,gvb/odoo,gavin-feng/odoo,dariemp/odoo,odooindia/odoo,ClearCorp-dev/odoo,salaria/odoo,provaleks/o8,Nick-OpusVL/odoo,CubicERP/odoo,GauravSahu/odoo,rschnapka/odoo,datenbetrieb/odoo,osvalr/odoo,alexteodor/odoo,alqfahad/odoo,rgeleta/odoo,sysadminmatmoz/OCB,leorochael/odoo,BT-rmartin/odoo,CubicERP/odoo,TRESCLOUD/odoopub,Nick-OpusVL/odoo,FlorianLudwig/odoo,bkirui/odoo,Endika/OpenUpgrade,x111ong/odoo,guewen/OpenUpgrade,abdellatifkarroum/odoo,mlaitinen/odoo,gavin-feng/odoo,jfpla/odoo,hifly/OpenUpgrade,funkring/fdoo,mvaled/OpenUpgrade,bakhtout/odoo-educ,cpyou/odoo,bkirui/odoo,oliverhr/odoo,lsinfo/odoo,highco-groupe/odoo,realsaiko/odoo,hopeall/odoo,dfang/odoo,KontorConsulting/odoo,ramadhane/odoo,vrenaville/ngo-addons-backport,cedk/odoo,tinkhaven-organization/odoo,papouso/odoo,sv-dev1/odoo,zchking/odoo,chiragjogi/odoo,brijeshkesariya/odoo,BT-rmartin/odoo,Elico-Corp/odoo_OCB,dezynetechnologies/odoo,joariasl/odoo,tangyiyong/odoo,camptocamp/ngo-addons-backport,nexiles/odoo,erkrishna9/odoo,jolevq/odoopub,ojengwa/odoo,vnsofthe/odoo,zchking/odoo,oihane/odoo,makinacorpus/odoo,Codefans-fan/odoo,fgesora/odoo,luiseduardohdbackup/odoo,jfpla/odoo,camptocamp/ngo-addons-backport,frouty/odoo_oph,CatsAndDogsbvba/odoo,ramitalat/odoo,fjbatresv/odoo,pplatek/odoo,andreparames/odoo,thanhacun/odoo,gsmartway/odoo,NL66278/OCB,alexcuellar/odoo,ovnicraft/odoo,Codefans-fan/odoo,jiachenning/odoo,odoo-turkiye/odoo,bguillot/OpenUpgrade,nexiles/odoo,fuhongliang/odoo,sinbazhou/odoo,jpshort/odoo,frouty/odoogoeen,guerrerocarlos/odoo,sadleader/odoo,minhtuancn/odoo,ecosoft-odoo/odoo,funkring/fdoo,tarzan0820/odoo,brijeshkesariya/odoo,Kilhog/odoo,steedos/odoo,glovebx/odoo,tinkerthaler/odoo,hbrunn/OpenUpgrade,kittiu/odoo,guerrerocarlos/odoo,nuncjo/odoo,kirca/OpenUpgrade,bkirui/odoo,ingadhoc/odoo,RafaelTorrealba/odoo,colinnewell/odoo,pedrobaeza/odoo,hanicker/odoo,gavin-feng/odoo,makinacorpus/odoo,ThinkOpen-Solutions/odoo,syci/OCB,ujjwalwahi/odoo,janocat/odoo,minhtuancn/odoo,CopeX/odoo,florentx/OpenUpgrade,draugiskisprendimai/odoo,jolevq/odoopub,lombritz/odoo,highco-groupe/odoo,arthru/OpenUpgrade,rdeheele/odoo,joariasl/odoo,prospwro/odoo,lombritz/odoo,shaufi10/odoo,colinnewell/odoo,CubicERP/odoo,dgzurita/odoo,cdrooom/odoo,guerrerocarlos/odoo,bakhtout/odoo-educ,BT-fgarbely/odoo,0k/OpenUpgrade,gvb/odoo,alhashash/odoo,demon-ru/iml-crm,OpenPymeMx/OCB,jaxkodex/odoo,BT-rmartin/odoo,PongPi/isl-odoo,ecosoft-odoo/odoo,Kilhog/odoo,jesramirez/odoo,Ichag/odoo,credativUK/OCB,Drooids/odoo,OSSESAC/odoopubarquiluz,srsman/odoo,blaggacao/OpenUpgrade,klunwebale/odoo,salaria/odoo,provaleks/o8,srsman/odoo,acshan/odoo,rgeleta/odoo,windedge/odoo,rdeheele/odoo,x111ong/odoo,jusdng/odoo,goliveirab/odoo,codekaki/odoo,slevenhagen/odoo,cpyou/odoo,salaria/odoo,leoliujie/odoo,fuhongliang/odoo,bwrsandman/OpenUpgrade,shivam1111/odoo,chiragjogi/odoo,oihane/odoo,camptocamp/ngo-addons-backport,abenzbiria/clients_odoo,PongPi/isl-odoo,luistorresm/odoo,ihsanudin/odoo,cloud9UG/odoo,kybriainfotech/iSocioCRM,ThinkOpen-Solutions/odoo,cpyou/odoo,guewen/OpenUpgrade,Adel-Magebinary/odoo,dezynetechnologies/odoo,camptocamp/ngo-addons-backport,PongPi/isl-odoo,tarzan0820/odoo,mkieszek/odoo,sysadminmatmoz/OCB,massot/odoo,credativUK/OCB,frouty/odoogoeen,factorlibre/OCB,sergio-incaser/odoo,camptocamp/ngo-addons-backport,bguillot/OpenUpgrade,nagyistoce/odoo-dev-odoo,leorochael/odoo,jiangzhixiao/odoo,JGarcia-Panach/odoo,OpenUpgrade/OpenUpgrade,codekaki/odoo,gorjuce/odoo,tangyiyong/odoo,wangjun/odoo,odooindia/odoo,odootr/odoo,juanalfonsopr/odoo,ubic135/odoo-design,addition-it-solutions/project-all,rahuldhote/odoo,spadae22/odoo,JonathanStein/odoo,omprakasha/odoo,nuncjo/odoo,ingadhoc/odoo,rowemoore/odoo,andreparames/odoo,AuyaJackie/odoo,odoousers2014/odoo,Ernesto99/odoo,CubicERP/odoo,prospwro/odoo,osvalr/odoo,guewen/OpenUpgrade,jiachenning/odoo,apanju/odoo,0k/odoo,cloud9UG/odoo,cedk/odoo,hoatle/odoo,Endika/odoo,ShineFan/odoo,apanju/GMIO_Odoo,Ichag/odoo,NeovaHealth/odoo,ehirt/odoo,highco-groupe/odoo,makinacorpus/odoo,mszewczy/odoo,leorochael/odoo,mlaitinen/odoo,spadae22/odoo,hassoon3/odoo,camptocamp/ngo-addons-backport,windedge/odoo,hip-odoo/odoo,jeasoft/odoo,ThinkOpen-Solutions/odoo,alhashash/odoo,MarcosCommunity/odoo,simongoffin/website_version,Eric-Zhong/odoo,salaria/odoo,bwrsandman/OpenUpgrade,SerpentCS/odoo,x111ong/odoo,0k/odoo,luistorresm/odoo,dllsf/odootest,andreparames/odoo,Kilhog/odoo,kittiu/odoo,dariemp/odoo,ihsanudin/odoo,JGarcia-Panach/odoo,havt/odoo,synconics/odoo,ApuliaSoftware/odoo,dkubiak789/odoo,arthru/OpenUpgrade,tangyiyong/odoo,massot/odoo,naousse/odoo,vnsofthe/odoo,0k/OpenUpgrade,charbeljc/OCB,omprakasha/odoo,ihsanudin/odoo,diagramsoftware/odoo,dkubiak789/odoo,jpshort/odoo,Nowheresly/odoo,ingadhoc/odoo,markeTIC/OCB,avoinsystems/odoo,cloud9UG/odoo,guewen/OpenUpgrade,QianBIG/odoo,KontorConsulting/odoo,lombritz/odoo,markeTIC/OCB,nhomar/odoo,RafaelTorrealba/odoo,RafaelTorrealba/odoo,0k/odoo,jiangzhixiao/odoo,provaleks/o8,naousse/odoo,sve-odoo/odoo,rubencabrera/odoo,hoatle/odoo,apanju/GMIO_Odoo,bplancher/odoo,hoatle/odoo,elmerdpadilla/iv,savoirfairelinux/odoo,MarcosCommunity/odoo,BT-fgarbely/odoo,shivam1111/odoo,florian-dacosta/OpenUpgrade,hopeall/odoo,tvtsoft/odoo8,Adel-Magebinary/odoo,factorlibre/OCB,omprakasha/odoo,sergio-incaser/odoo,patmcb/odoo,sebalix/OpenUpgrade,ClearCorp-dev/odoo,alhashash/odoo,gorjuce/odoo,JonathanStein/odoo,tangyiyong/odoo,hoatle/odoo,fevxie/odoo,dgzurita/odoo,ClearCorp-dev/odoo,shaufi10/odoo,jaxkodex/odoo,fjbatresv/odoo,gorjuce/odoo,xujb/odoo,ubic135/odoo-design,pedrobaeza/OpenUpgrade,frouty/odoo_oph,QianBIG/odoo,shaufi/odoo,JCA-Developpement/Odoo,CatsAndDogsbvba/odoo,JonathanStein/odoo,hassoon3/odoo,shaufi10/odoo,hbrunn/OpenUpgrade,damdam-s/OpenUpgrade,gavin-feng/odoo,mmbtba/odoo,Endika/odoo,bplancher/odoo,gorjuce/odoo,Maspear/odoo,janocat/odoo,wangjun/odoo,brijeshkesariya/odoo,OpenUpgrade-dev/OpenUpgrade,CubicERP/odoo,tinkhaven-organization/odoo,hanicker/odoo,ClearCorp-dev/odoo,odootr/odoo,shivam1111/odoo,microcom/odoo,elmerdpadilla/iv,apanju/odoo,savoirfairelinux/OpenUpgrade,jeasoft/odoo,bakhtout/odoo-educ,xzYue/odoo,grap/OpenUpgrade,VielSoft/odoo,abstract-open-solutions/OCB,glovebx/odoo,alexcuellar/odoo,Ichag/odoo,optima-ict/odoo,leoliujie/odoo,addition-it-solutions/project-all,ehirt/odoo,nuncjo/odoo,doomsterinc/odoo,tvibliani/odoo,ygol/odoo,gvb/odoo,klunwebale/odoo,cedk/odoo,NL66278/OCB,BT-astauder/odoo,JonathanStein/odoo,Noviat/odoo,ingadhoc/odoo,Antiun/odoo,KontorConsulting/odoo,BT-astauder/odoo,odoo-turkiye/odoo,savoirfairelinux/OpenUpgrade,csrocha/OpenUpgrade,BT-ojossen/odoo,Bachaco-ve/odoo,florentx/OpenUpgrade,Antiun/odoo,GauravSahu/odoo,Maspear/odoo,factorlibre/OCB,Drooids/odoo,OSSESAC/odoopubarquiluz,srimai/odoo,prospwro/odoo,massot/odoo,savoirfairelinux/odoo,sergio-incaser/odoo,kittiu/odoo,Endika/OpenUpgrade,NeovaHealth/odoo,storm-computers/odoo,collex100/odoo,charbeljc/OCB,inspyration/odoo,ehirt/odoo,shivam1111/odoo,waytai/odoo,dfang/odoo,nitinitprof/odoo,matrixise/odoo,jesramirez/odoo,odootr/odoo,gdgellatly/OCB1,dfang/odoo,jeasoft/odoo,collex100/odoo,fjbatresv/odoo,OpenPymeMx/OCB,grap/OCB,dalegregory/odoo,numerigraphe/odoo,kirca/OpenUpgrade,numerigraphe/odoo,OpenUpgrade/OpenUpgrade,dkubiak789/odoo,incaser/odoo-odoo,fuselock/odoo,JGarcia-Panach/odoo,OSSESAC/odoopubarquiluz,Noviat/odoo,rdeheele/odoo,sv-dev1/odoo,AuyaJackie/odoo,SAM-IT-SA/odoo,sysadminmatmoz/OCB,fuhongliang/odoo,windedge/odoo,mszewczy/odoo,rubencabrera/odoo,rschnapka/odoo,PongPi/isl-odoo,hassoon3/odoo,havt/odoo,bguillot/OpenUpgrade,aviciimaxwell/odoo,windedge/odoo,oihane/odoo,VitalPet/odoo,cedk/odoo,grap/OCB,Elico-Corp/odoo_OCB,OpenUpgrade/OpenUpgrade,alexcuellar/odoo,chiragjogi/odoo,oliverhr/odoo,mvaled/OpenUpgrade,srimai/odoo,diagramsoftware/odoo,Daniel-CA/odoo,ChanduERP/odoo,thanhacun/odoo,TRESCLOUD/odoopub,gorjuce/odoo,dgzurita/odoo,takis/odoo,0k/OpenUpgrade,dkubiak789/odoo,blaggacao/OpenUpgrade,jfpla/odoo,savoirfairelinux/odoo,jusdng/odoo,tarzan0820/odoo,mkieszek/odoo,virgree/odoo,bakhtout/odoo-educ,nexiles/odoo,CopeX/odoo,realsaiko/odoo,oihane/odoo,chiragjogi/odoo,Antiun/odoo,provaleks/o8,ChanduERP/odoo,abstract-open-solutions/OCB,omprakasha/odoo,bwrsandman/OpenUpgrade,lgscofield/odoo,blaggacao/OpenUpgrade,kifcaliph/odoo,ojengwa/odoo,naousse/odoo,mszewczy/odoo,grap/OCB,dkubiak789/odoo,abenzbiria/clients_odoo,collex100/odoo,jaxkodex/odoo,hubsaysnuaa/odoo,javierTerry/odoo,kittiu/odoo,ThinkOpen-Solutions/odoo,rowemoore/odoo,poljeff/odoo,joariasl/odoo,BT-astauder/odoo,mkieszek/odoo,nuuuboo/odoo,klunwebale/odoo,arthru/OpenUpgrade,mvaled/OpenUpgrade,alexcuellar/odoo,BT-ojossen/odoo,ovnicraft/odoo,fuhongliang/odoo,waytai/odoo,nuncjo/odoo,Adel-Magebinary/odoo,Ichag/odoo,alexteodor/odoo,hip-odoo/odoo,Bachaco-ve/odoo,ojengwa/odoo,rgeleta/odoo,sinbazhou/odoo,Drooids/odoo,synconics/odoo,MarcosCommunity/odoo,steedos/odoo,colinnewell/odoo,srimai/odoo,brijeshkesariya/odoo,havt/odoo,ehirt/odoo,odootr/odoo,andreparames/odoo,odoousers2014/odoo,lombritz/odoo,joshuajan/odoo,grap/OCB,diagramsoftware/odoo,windedge/odoo,xujb/odoo,Noviat/odoo,ramadhane/odoo,alqfahad/odoo,jiangzhixiao/odoo,erkrishna9/odoo,grap/OCB,Grirrane/odoo,alhashash/odoo,prospwro/odoo,fevxie/odoo,csrocha/OpenUpgrade,tvibliani/odoo,Drooids/odoo,doomsterinc/odoo,grap/OpenUpgrade,jeasoft/odoo,ramitalat/odoo,bguillot/OpenUpgrade,Adel-Magebinary/odoo,CopeX/odoo,apocalypsebg/odoo,naousse/odoo,cysnake4713/odoo,syci/OCB,agrista/odoo-saas,papouso/odoo,NL66278/OCB,oasiswork/odoo,Drooids/odoo,tangyiyong/odoo,jolevq/odoopub,sv-dev1/odoo,shaufi/odoo,Elico-Corp/odoo_OCB,lsinfo/odoo,fuselock/odoo,hip-odoo/odoo,steedos/odoo,fuselock/odoo,nagyistoce/odoo-dev-odoo,waytai/odoo,prospwro/odoo,charbeljc/OCB,poljeff/odoo,abstract-open-solutions/OCB,simongoffin/website_version,funkring/fdoo,collex100/odoo,tvtsoft/odoo8,guerrerocarlos/odoo,sinbazhou/odoo,hanicker/odoo,ChanduERP/odoo,oasiswork/odoo,goliveirab/odoo,sinbazhou/odoo,florian-dacosta/OpenUpgrade,QianBIG/odoo,vnsofthe/odoo,dkubiak789/odoo,sergio-incaser/odoo,incaser/odoo-odoo,syci/OCB,ShineFan/odoo,florentx/OpenUpgrade,lombritz/odoo,goliveirab/odoo,Elico-Corp/odoo_OCB,lightcn/odoo,n0m4dz/odoo,luiseduardohdbackup/odoo,microcom/odoo,fgesora/odoo,pedrobaeza/OpenUpgrade,javierTerry/odoo,jiachenning/odoo,oliverhr/odoo,apanju/odoo,hmen89/odoo,ygol/odoo,virgree/odoo,Ichag/odoo,shivam1111/odoo,feroda/odoo,feroda/odoo,VitalPet/odoo,grap/OpenUpgrade,VielSoft/odoo,nagyistoce/odoo-dev-odoo,KontorConsulting/odoo,dsfsdgsbngfggb/odoo,MarcosCommunity/odoo,gvb/odoo,Eric-Zhong/odoo,blaggacao/OpenUpgrade,idncom/odoo,rschnapka/odoo,numerigraphe/odoo,ApuliaSoftware/odoo,hanicker/odoo,javierTerry/odoo,savoirfairelinux/OpenUpgrade,juanalfonsopr/odoo,srimai/odoo,dllsf/odootest,virgree/odoo,OpenUpgrade/OpenUpgrade,oihane/odoo,ChanduERP/odoo,bobisme/odoo,hopeall/odoo,NeovaHealth/odoo,gdgellatly/OCB1,hubsaysnuaa/odoo,dariemp/odoo,tarzan0820/odoo,eino-makitalo/odoo,Ernesto99/odoo,apanju/GMIO_Odoo,jpshort/odoo,naousse/odoo,ingadhoc/odoo,fuhongliang/odoo,klunwebale/odoo,Ernesto99/odoo,agrista/odoo-saas,BT-fgarbely/odoo,RafaelTorrealba/odoo,slevenhagen/odoo-npg,stephen144/odoo,jesramirez/odoo,sysadminmatmoz/OCB,hubsaysnuaa/odoo,factorlibre/OCB,nagyistoce/odoo-dev-odoo,dsfsdgsbngfggb/odoo,Bachaco-ve/odoo,xujb/odoo,realsaiko/odoo,pedrobaeza/odoo,tvtsoft/odoo8,FlorianLudwig/odoo,eino-makitalo/odoo,doomsterinc/odoo,OpenPymeMx/OCB,oasiswork/odoo,CatsAndDogsbvba/odoo,glovebx/odoo,Codefans-fan/odoo,GauravSahu/odoo,Ernesto99/odoo,numerigraphe/odoo,fevxie/odoo,lightcn/odoo,damdam-s/OpenUpgrade,cysnake4713/odoo,lsinfo/odoo,srsman/odoo,VitalPet/odoo,thanhacun/odoo,oihane/odoo,optima-ict/odoo,bakhtout/odoo-educ,Noviat/odoo,cdrooom/odoo,Nowheresly/odoo,camptocamp/ngo-addons-backport,fuselock/odoo,lgscofield/odoo,ApuliaSoftware/odoo,hubsaysnuaa/odoo,incaser/odoo-odoo,mkieszek/odoo,syci/OCB,ecosoft-odoo/odoo,patmcb/odoo,acshan/odoo,fevxie/odoo,highco-groupe/odoo,fgesora/odoo,odoousers2014/odoo,hbrunn/OpenUpgrade,NeovaHealth/odoo,gavin-feng/odoo,mszewczy/odoo,sebalix/OpenUpgrade,tinkhaven-organization/odoo,goliveirab/odoo,bkirui/odoo,sebalix/OpenUpgrade,rdeheele/odoo,Antiun/odoo,lightcn/odoo,dezynetechnologies/odoo,janocat/odoo,bealdav/OpenUpgrade,luiseduardohdbackup/odoo,stephen144/odoo,FlorianLudwig/odoo,ThinkOpen-Solutions/odoo,jiangzhixiao/odoo,Maspear/odoo,Ichag/odoo,waytai/odoo,apanju/odoo,luistorresm/odoo,vnsofthe/odoo,jiachenning/odoo,idncom/odoo,Noviat/odoo,janocat/odoo,VitalPet/odoo,nuuuboo/odoo,waytai/odoo,OpenPymeMx/OCB,FlorianLudwig/odoo,Elico-Corp/odoo_OCB,ubic135/odoo-design,poljeff/odoo,mmbtba/odoo,hip-odoo/odoo,CopeX/odoo,dgzurita/odoo,synconics/odoo,rubencabrera/odoo,fevxie/odoo,christophlsa/odoo,KontorConsulting/odoo,tvtsoft/odoo8,stonegithubs/odoo,shingonoide/odoo,ygol/odoo,OSSESAC/odoopubarquiluz,goliveirab/odoo,aviciimaxwell/odoo,aviciimaxwell/odoo,vrenaville/ngo-addons-backport,christophlsa/odoo,avoinsystems/odoo,stonegithubs/odoo,shaufi10/odoo,OpusVL/odoo,ehirt/odoo,JCA-Developpement/Odoo,charbeljc/OCB,frouty/odoo_oph,dsfsdgsbngfggb/odoo,jfpla/odoo,jfpla/odoo,Nick-OpusVL/odoo,demon-ru/iml-crm,lgscofield/odoo,sinbazhou/odoo,hifly/OpenUpgrade,rubencabrera/odoo,brijeshkesariya/odoo,fdvarela/odoo8,diagramsoftware/odoo,odoousers2014/odoo,odootr/odoo,klunwebale/odoo,mmbtba/odoo,spadae22/odoo,PongPi/isl-odoo,Daniel-CA/odoo,salaria/odoo,tinkerthaler/odoo,gvb/odoo,dkubiak789/odoo,matrixise/odoo,zchking/odoo,florentx/OpenUpgrade,fuhongliang/odoo,juanalfonsopr/odoo,spadae22/odoo,omprakasha/odoo,Nowheresly/odoo,datenbetrieb/odoo,mustafat/odoo-1,incaser/odoo-odoo,abenzbiria/clients_odoo,damdam-s/OpenUpgrade,jfpla/odoo,lgscofield/odoo,ShineFan/odoo,kirca/OpenUpgrade,jeasoft/odoo,frouty/odoo_oph,Bachaco-ve/odoo,Eric-Zhong/odoo,Gitlab11/odoo,jaxkodex/odoo,nexiles/odoo,Endika/OpenUpgrade,BT-ojossen/odoo,Nowheresly/odoo,klunwebale/odoo,mustafat/odoo-1,rgeleta/odoo,Bachaco-ve/odoo,shaufi/odoo,eino-makitalo/odoo,osvalr/odoo,QianBIG/odoo,JGarcia-Panach/odoo,codekaki/odoo,Bachaco-ve/odoo,rowemoore/odoo,colinnewell/odoo,juanalfonsopr/odoo,bplancher/odoo,pedrobaeza/OpenUpgrade,shingonoide/odoo,Grirrane/odoo,fevxie/odoo,grap/OpenUpgrade,luiseduardohdbackup/odoo,jiachenning/odoo,xujb/odoo,idncom/odoo,hopeall/odoo,ecosoft-odoo/odoo,Endika/odoo,n0m4dz/odoo,GauravSahu/odoo,tinkhaven-organization/odoo,funkring/fdoo,lightcn/odoo,ihsanudin/odoo,joariasl/odoo,mszewczy/odoo,ygol/odoo,Endika/OpenUpgrade,CopeX/odoo,shaufi/odoo,gorjuce/odoo,stonegithubs/odoo,idncom/odoo,0k/odoo,dalegregory/odoo,osvalr/odoo,hip-odoo/odoo,jusdng/odoo,doomsterinc/odoo,dgzurita/odoo,abdellatifkarroum/odoo,makinacorpus/odoo,sve-odoo/odoo,ccomb/OpenUpgrade,ShineFan/odoo,ramitalat/odoo,CubicERP/odoo,tinkerthaler/odoo,0k/OpenUpgrade,ygol/odoo,nhomar/odoo-mirror,microcom/odoo,Gitlab11/odoo,alqfahad/odoo,cysnake4713/odoo,alexcuellar/odoo,slevenhagen/odoo,frouty/odoo_oph,sv-dev1/odoo,BT-ojossen/odoo,datenbetrieb/odoo,glovebx/odoo,alexteodor/odoo,hifly/OpenUpgrade,abdellatifkarroum/odoo,pplatek/odoo,kybriainfotech/iSocioCRM,javierTerry/odoo,ramadhane/odoo,dgzurita/odoo,slevenhagen/odoo,sv-dev1/odoo,nagyistoce/odoo-dev-odoo,csrocha/OpenUpgrade,sebalix/OpenUpgrade,gsmartway/odoo,jaxkodex/odoo,nagyistoce/odoo-dev-odoo,damdam-s/OpenUpgrade,ChanduERP/odoo,avoinsystems/odoo,datenbetrieb/odoo,nuncjo/odoo,sebalix/OpenUpgrade,guerrerocarlos/odoo,frouty/odoogoeen,addition-it-solutions/project-all,tinkerthaler/odoo,Ernesto99/odoo,shaufi/odoo,idncom/odoo,OpusVL/odoo,naousse/odoo,cpyou/odoo,oliverhr/odoo,n0m4dz/odoo,factorlibre/OCB,salaria/odoo,dsfsdgsbngfggb/odoo,nhomar/odoo,cedk/odoo,gdgellatly/OCB1,SAM-IT-SA/odoo,abenzbiria/clients_odoo,colinnewell/odoo,savoirfairelinux/odoo,ingadhoc/odoo,thanhacun/odoo,bakhtout/odoo-educ,Adel-Magebinary/odoo,omprakasha/odoo,leoliujie/odoo,ubic135/odoo-design,optima-ict/odoo,rubencabrera/odoo,laslabs/odoo,feroda/odoo,bguillot/OpenUpgrade,grap/OpenUpgrade,javierTerry/odoo,leorochael/odoo,Codefans-fan/odoo,idncom/odoo,salaria/odoo,minhtuancn/odoo,draugiskisprendimai/odoo,cdrooom/odoo,xzYue/odoo,virgree/odoo,ehirt/odoo,bwrsandman/OpenUpgrade,markeTIC/OCB,luiseduardohdbackup/odoo,dalegregory/odoo,luistorresm/odoo,hbrunn/OpenUpgrade,blaggacao/OpenUpgrade,jesramirez/odoo,poljeff/odoo,slevenhagen/odoo-npg,cloud9UG/odoo,shivam1111/odoo,abstract-open-solutions/OCB,odoo-turkiye/odoo,hanicker/odoo,bkirui/odoo,bealdav/OpenUpgrade,simongoffin/website_version,lightcn/odoo,VielSoft/odoo,bealdav/OpenUpgrade,dfang/odoo,chiragjogi/odoo,guerrerocarlos/odoo,csrocha/OpenUpgrade,gsmartway/odoo,realsaiko/odoo,RafaelTorrealba/odoo,ccomb/OpenUpgrade,NL66278/OCB,jaxkodex/odoo,addition-it-solutions/project-all,Nick-OpusVL/odoo,gsmartway/odoo,apanju/odoo,jeasoft/odoo,OpusVL/odoo,fjbatresv/odoo,x111ong/odoo,sebalix/OpenUpgrade,sysadminmatmoz/OCB,BT-astauder/odoo,x111ong/odoo,frouty/odoogoeen,mustafat/odoo-1,Codefans-fan/odoo,dfang/odoo,x111ong/odoo,sv-dev1/odoo,Danisan/odoo-1,dsfsdgsbngfggb/odoo,luistorresm/odoo,grap/OCB,tinkerthaler/odoo,CatsAndDogsbvba/odoo,gsmartway/odoo,jeasoft/odoo,numerigraphe/odoo,wangjun/odoo,rahuldhote/odoo,csrocha/OpenUpgrade,nitinitprof/odoo,vrenaville/ngo-addons-backport,AuyaJackie/odoo,syci/OCB,NeovaHealth/odoo,bwrsandman/OpenUpgrade,diagramsoftware/odoo,alexteodor/odoo,andreparames/odoo,mlaitinen/odoo,feroda/odoo,juanalfonsopr/odoo,TRESCLOUD/odoopub,hassoon3/odoo,rschnapka/odoo,virgree/odoo,hoatle/odoo,fgesora/odoo,Gitlab11/odoo,bakhtout/odoo-educ,charbeljc/OCB,Antiun/odoo,CopeX/odoo,sve-odoo/odoo,makinacorpus/odoo,nhomar/odoo-mirror,fossoult/odoo,gavin-feng/odoo,n0m4dz/odoo,inspyration/odoo,vrenaville/ngo-addons-backport,minhtuancn/odoo,jfpla/odoo,dalegregory/odoo,CatsAndDogsbvba/odoo,collex100/odoo,Bachaco-ve/odoo,kifcaliph/odoo,draugiskisprendimai/odoo,sve-odoo/odoo,ramadhane/odoo,Daniel-CA/odoo,guewen/OpenUpgrade,takis/odoo,charbeljc/OCB,demon-ru/iml-crm,collex100/odoo,apanju/GMIO_Odoo,Nick-OpusVL/odoo,kirca/OpenUpgrade,virgree/odoo,nuuuboo/odoo,QianBIG/odoo,fevxie/odoo,Danisan/odoo-1,shaufi10/odoo,ovnicraft/odoo,tvibliani/odoo,lightcn/odoo,bealdav/OpenUpgrade,Gitlab11/odoo,JGarcia-Panach/odoo,hbrunn/OpenUpgrade,alexteodor/odoo,deKupini/erp,Nick-OpusVL/odoo,savoirfairelinux/OpenUpgrade,mustafat/odoo-1,spadae22/odoo,ApuliaSoftware/odoo,realsaiko/odoo,srsman/odoo,xujb/odoo,luistorresm/odoo,Nowheresly/odoo,cedk/odoo,spadae22/odoo,dsfsdgsbngfggb/odoo,Grirrane/odoo,matrixise/odoo,acshan/odoo,kirca/OpenUpgrade,arthru/OpenUpgrade,sadleader/odoo,hassoon3/odoo,apanju/GMIO_Odoo,collex100/odoo,storm-computers/odoo,ramitalat/odoo,wangjun/odoo,ShineFan/odoo,Eric-Zhong/odoo,virgree/odoo,grap/OpenUpgrade,BT-rmartin/odoo,eino-makitalo/odoo,shingonoide/odoo,JCA-Developpement/Odoo,highco-groupe/odoo,funkring/fdoo,ovnicraft/odoo,rahuldhote/odoo,Antiun/odoo,brijeshkesariya/odoo,fuselock/odoo,lsinfo/odoo,apanju/GMIO_Odoo,shaufi/odoo,florian-dacosta/OpenUpgrade,frouty/odoogoeen,poljeff/odoo,tinkhaven-organization/odoo,BT-fgarbely/odoo,mustafat/odoo-1,mmbtba/odoo,slevenhagen/odoo-npg,SAM-IT-SA/odoo,jusdng/odoo,xujb/odoo,abenzbiria/clients_odoo,fgesora/odoo,laslabs/odoo,sadleader/odoo,havt/odoo,rschnapka/odoo,ovnicraft/odoo,arthru/OpenUpgrade,deKupini/erp,elmerdpadilla/iv,stephen144/odoo,omprakasha/odoo,funkring/fdoo,sysadminmatmoz/OCB,matrixise/odoo,ygol/odoo,oasiswork/odoo,hassoon3/odoo,xujb/odoo,eino-makitalo/odoo,pplatek/odoo,vrenaville/ngo-addons-backport,mlaitinen/odoo,savoirfairelinux/OpenUpgrade,dfang/odoo,thanhacun/odoo,sadleader/odoo,nitinitprof/odoo,storm-computers/odoo,grap/OCB,klunwebale/odoo,oasiswork/odoo,ujjwalwahi/odoo,SerpentCS/odoo,rschnapka/odoo,JCA-Developpement/Odoo,BT-ojossen/odoo,CatsAndDogsbvba/odoo,juanalfonsopr/odoo,optima-ict/odoo,poljeff/odoo,leoliujie/odoo,odoo-turkiye/odoo,FlorianLudwig/odoo,rowemoore/odoo,apocalypsebg/odoo,oihane/odoo,bobisme/odoo,xzYue/odoo,simongoffin/website_version,patmcb/odoo,ovnicraft/odoo,credativUK/OCB,dariemp/odoo,jiachenning/odoo,leoliujie/odoo,nexiles/odoo,apocalypsebg/odoo,stonegithubs/odoo,alqfahad/odoo,colinnewell/odoo,inspyration/odoo,jeasoft/odoo,nhomar/odoo,naousse/odoo,hmen89/odoo,draugiskisprendimai/odoo,florian-dacosta/OpenUpgrade,hopeall/odoo,kifcaliph/odoo,ingadhoc/odoo,fdvarela/odoo8,draugiskisprendimai/odoo,fjbatresv/odoo,jolevq/odoopub,mvaled/OpenUpgrade,osvalr/odoo,PongPi/isl-odoo,JGarcia-Panach/odoo,MarcosCommunity/odoo,tarzan0820/odoo,stonegithubs/odoo,joariasl/odoo,massot/odoo,slevenhagen/odoo,laslabs/odoo,srimai/odoo,oasiswork/odoo,credativUK/OCB,bplancher/odoo,jesramirez/odoo,leoliujie/odoo,apanju/odoo,jpshort/odoo,n0m4dz/odoo,alhashash/odoo,gdgellatly/OCB1,fgesora/odoo,agrista/odoo-saas,ccomb/OpenUpgrade,bwrsandman/OpenUpgrade,gorjuce/odoo,mustafat/odoo-1,waytai/odoo,mmbtba/odoo,srimai/odoo,codekaki/odoo,tinkhaven-organization/odoo,takis/odoo,stephen144/odoo,ccomb/OpenUpgrade,bkirui/odoo,hip-odoo/odoo,ramadhane/odoo,hubsaysnuaa/odoo,fossoult/odoo,synconics/odoo,gsmartway/odoo,grap/OpenUpgrade,dsfsdgsbngfggb/odoo,zchking/odoo,fgesora/odoo,steedos/odoo,codekaki/odoo,tvtsoft/odoo8,oliverhr/odoo,OpenUpgrade/OpenUpgrade,VielSoft/odoo,papouso/odoo,tvibliani/odoo,goliveirab/odoo,deKupini/erp,christophlsa/odoo,janocat/odoo,bobisme/odoo,shivam1111/odoo,erkrishna9/odoo,VielSoft/odoo,glovebx/odoo,BT-rmartin/odoo,abdellatifkarroum/odoo,bobisme/odoo,pedrobaeza/OpenUpgrade,frouty/odoogoeen,microcom/odoo,fjbatresv/odoo,nitinitprof/odoo,kybriainfotech/iSocioCRM,christophlsa/odoo | addons/product_visible_discount/__openerp__.py | addons/product_visible_discount/__openerp__.py | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Prices Visible Discounts',
'version': '1.0',
'author': 'OpenERP SA',
'category': 'Sales Management',
'description': """
This module lets you calculate discounts on Sale Order lines and Invoice lines base on the partner's pricelist.
===============================================================================================================
To this end, a new check box named 'Visible Discount' is added to the pricelist form.
**Example:**
For the product PC1 and the partner "Asustek": if listprice=450, and the price
calculated using Asustek's pricelist is 225. If the check box is checked, we
will have on the sale order line: Unit price=450, Discount=50,00, Net price=225.
If the check box is unchecked, we will have on Sale Order and Invoice lines:
Unit price=225, Discount=0,00, Net price=225.
""",
'depends': ["sale","purchase"],
'demo': [],
'data': ['product_visible_discount_view.xml'],
'auto_install': False,
'installable': True,
'certificate' : "001144718884654279901",
'images': ['images/pricelists_visible_discount.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Prices Visible Discounts',
'version': '1.0',
'author': 'OpenERP SA',
'category': 'Sales Management',
'description': """
This module lets you calculate discounts on Sale Order lines and Invoice lines base on the partner's pricelist.
===============================================================================================================
To this end, a new check box named 'Visible Discount' is added to the pricelist form.
**Example:**
For the product PC1 and the partner "Asustek": if listprice=450, and the price
calculated using Asustek's pricelist is 225. If the check box is checked, we
will have on the sale order line: Unit price=450, Discount=50,00, Net price=225.
If the check box is unchecked, we will have on Sale Order and Invoice lines:
Unit price=225, Discount=0,00, Net price=225.
""",
"depends": ["sale","purchase"],
"demo_xml": [],
"update_xml": ['product_visible_discount_view.xml'],
"auto_install": False,
"installable": True,
"certificate" : "001144718884654279901",
'images': ['images/pricelists_visible_discount.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
54ca48a2b8cbd53cd6506fdbce47d16f03a28a7d | Add unit tests for bubble sort | ueg1990/aids | tests/test_sorting_and_searching/test_bubble_sort.py | tests/test_sorting_and_searching/test_bubble_sort.py | import unittest
from aids.sorting_and_searching.bubble_sort import bubble_sort
class BubbleSortTestCase(unittest.TestCase):
'''
Unit tests for bubble sort
'''
def setUp(self):
self.example_1 = [2, 5, 4, 3, 1]
def test_bubble_sort(self):
bubble_sort(self.example_1)
self.assertEqual(self.example_1,[1,2,3,4,5])
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
f24bdbbd0a7abc32d49f844a4b97b128a3adc299 | add tests for KnownIssues | terceiro/squad,terceiro/squad,terceiro/squad,terceiro/squad | test/core/test_known_issues.py | test/core/test_known_issues.py | from django.test import TestCase
from django.utils import timezone
from unittest.mock import patch
from squad.core.models import Group, KnownIssue
class KnownIssueTest(TestCase):
def setUp(self):
self.group = Group.objects.create(slug='mygroup')
self.project = self.group.projects.create(slug='myproject')
self.env1 = self.project.environments.create(slug='env1')
self.suite1 = self.project.suites.create(slug="suite1")
self.date = timezone.now()
def test_active_known_issue(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
test = test_run.tests.create(suite=self.suite1, name="test_foo", result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environment.add(test_run.environment)
self.assertEqual(1, len(KnownIssue.active_by_environment(test_run.environment)))
def test_inactive_known_issue(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
test = test_run.tests.create(suite=self.suite1, name="test_foo", result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environment.add(test_run.environment)
known_issue.active = False
known_issue.save()
self.assertEqual(0, len(KnownIssue.active_by_environment(self.env1)))
def test_active_by_project(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
test = test_run.tests.create(suite=self.suite1, name="test_foo", result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environment.add(test_run.environment)
self.assertEqual(1, len(KnownIssue.active_by_project_and_test(self.project, test.full_name)))
def test_inactive_by_project(self):
build = self.project.builds.create(
datetime=self.date,
version=self.date.strftime("%Y%m%d"),
)
test_run = build.test_runs.create(environment=self.env1)
# create failed test
test = test_run.tests.create(suite=self.suite1, name="test_foo", result=False)
known_issue = KnownIssue.objects.create(
title="foo",
test_name=test.full_name
)
known_issue.save()
known_issue.environment.add(test_run.environment)
known_issue.active = False
known_issue.save()
self.assertEqual(0, len(KnownIssue.active_by_project_and_test(self.project, test.full_name)))
| agpl-3.0 | Python |
|
0b047f5b6123d851916ed12114512ddebec58225 | Add 20150509 question. | fantuanmianshi/Daily,fantuanmianshi/Daily | LeetCode/add_two_numbers.py | LeetCode/add_two_numbers.py | """
You are given two linked lists representing two non-negative numbers.
The digits are stored in reverse order and each of their nodes contain a single
digit. Add the two numbers and return it as a linked list.
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
Notice: extra digit.
"""
class ListNode:
"""
Definition for singly-linked list.
"""
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def addTwoNumbers(self, l1, l2):
new_root = None
temp_l1, temp_l2 = l1, l2
temp = new_root
extra_digit = 0
while None not in [temp_l1, temp_l2]:
value = temp_l1.val + temp_l2.val + extra_digit
if temp is None:
temp = ListNode(value)
new_root = temp
else:
new_node = ListNode(value)
temp.next = new_node
temp = new_node
if temp.val >= 10:
temp.val -= 10
extra_digit = 1
else:
extra_digit = 0
temp_l1 = temp_l1.next
temp_l2 = temp_l2.next
continue_temp = temp_l1 if temp_l1 is not None else temp_l2
while continue_temp is not None:
value = continue_temp.val + extra_digit
new_node = ListNode(value)
temp.next = new_node
temp = new_node
if temp.val >= 10:
temp.val -= 10
extra_digit = 1
else:
extra_digit = 0
continue_temp = continue_temp.next
if extra_digit >= 1:
new_node = ListNode(extra_digit)
temp.next = new_node
temp = new_node
return new_root
| mit | Python |
|
e5502056a36507bd6d77bc814014b45cd9afc8bf | make a version bumping and version sorting system | hepix-virtualisation/vmcaster | dishpub/versioning.py | dishpub/versioning.py | import re
regdelexp = re.compile('[-,.\/]')
regnumeric = re.compile('[0-9]+')
def split_line_by_delimiter(line,regex):
splitline = []
splititr = regex.finditer(line)
lstart = 0
for i in splititr:
(mstart,mend) = i.span()
if lstart != mstart:
splitline.append(line[lstart:mstart])
splitline.append(line[mstart:mend])
lstart = mend
linelen = len(line)
if lstart != linelen:
splitline.append(line[lstart:linelen])
return splitline
def string_sort(x,y):
xsplit = split_line_by_delimiter(x,regnumeric)
ysplit = split_line_by_delimiter(y,regnumeric)
ysplitlen = len(ysplit)
xsplitlen = len(xsplit)
minsplitlen = ysplitlen
if xsplitlen < ysplitlen:
minsplitlen = xsplitlen
for i in range(minsplitlen):
if xsplit[i] == ysplit[i]:
continue
if (xsplit[i].isdigit() and ysplit[i].isdigit()):
rc = int(0)
if int(xsplit[i]) > int(ysplit[i]):
rc = -1
if int(xsplit[i]) < int(ysplit[i]):
rc = 1
return rc
if xsplit[i].isdigit():
return -1
if ysplit[i].isdigit():
return 1
if xsplit[i] > ysplit[i]:
return -1
if xsplit[i] < ysplit[i]:
return 1
if xsplitlen < ysplitlen:
return 1
if xsplitlen > ysplitlen:
return -1
return 0
def split_numeric_sort(x, y):
xsplit = split_line_by_delimiter(x,regdelexp)
ysplit = split_line_by_delimiter(y,regdelexp)
ysplitlen = len(ysplit)
xsplitlen = len(xsplit)
minsplitlen = ysplitlen
if xsplitlen < ysplitlen:
minsplitlen = xsplitlen
for i in range(minsplitlen):
if xsplit[i] == ysplit[i]:
continue
if (xsplit[i].isdigit() and ysplit[i].isdigit()):
rc = int(0)
if int(xsplit[i]) > int(ysplit[i]):
rc = -1
if int(xsplit[i]) < int(ysplit[i]):
rc = 1
return rc
if xsplit[i].isdigit():
return -1
if ysplit[i].isdigit():
return 1
rc = string_sort(xsplit[i],ysplit[i])
if rc != 0:
return rc
if xsplitlen < ysplitlen:
return 1
if xsplitlen > ysplitlen:
return -1
return 0
def bumpVersion(versionString, versionLevel = 0):
# 0 patch level
# 1 minor level
# 2 major version
split = split_line_by_delimiter(versionString,regnumeric)
length = len(split)
indexs = range(0,length )
indexs.reverse()
indexToBeBumped = -1
indexCounter = -1
output = ""
for i in indexs:
oldVal = split[i]
if split[i].isdigit():
indexCounter += 1
if indexCounter == versionLevel:
oldVal = str(int(split[i]) + 1)
output = oldVal + output
if indexCounter < versionLevel:
# We have not found the correct index to update
return None
return output
if __name__ == "__main__":
result = bumpVersion("0.0.1", 0)
if "0.0.2" != result:
print "Fail"
result = bumpVersion("0.0.1a", 0)
if "0.0.2a" != result:
print "Fail"
result = bumpVersion("0.0.1a", 1)
if "0.1.1a" != result:
print "Fail"
result = bumpVersion("0.0.1a", 2)
if "1.0.1a" != result:
print "Fail"
result = bumpVersion("0.0.1a", 3)
if None != result:
print "Fail"
| apache-2.0 | Python |
|
e46da8f316485c7c9e11ffe751108539f9254a68 | Create ClientUDP.py | shutebt01/legendary-engine | ClientUDP.py | ClientUDP.py | '''
Created on 12 Feb 2015
@author: shutebt01
'''
#!/bin/env/python3
'''
Packet formating:
[type, src-name, src-group, data]
'''
import socket, threading, json
name = input("Enter User Name: ")
port = 16500
#host = input("Enter host: ")
room = "Global"
showall = False
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind(('', port))
#s.connect((host, port))
class InputThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self, target=self.input, name="Thread-Input")
def input(self):
global room
while True:
inp = input()
data = None
if not(inp.startswith('!')):
#assumes its a message if not a command
data = json.dumps(["Message", name, room, inp])
else:
# Creates initial packet with data for tracking
packet = ["Event", name, room]
split = inp.split(' ', 1)
if split[0] == "!pm":
pmsplit = split[1].split(' ', 1)
#TODO implement better validation
if (len(split) == 2):
#Adds data to packet
packet.append("pm")
packet.append(pmsplit[0])
packet.append(pmsplit[1])
data = json.dumps(packet)
if split[0] == "!room":
room = split[1]
print("You changed to room:" + room)
if split[0] == "!broadcast" or split[0] == "!bcast":
msg = split[1]
packet.append("bcast")
packet.append(msg)
data = json.dumps(packet)
if data:
s.sendto(data.encode("ascii"), ("<broadcast>", port))
class OutputThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self, target=self.output, name="Thread-Output")
def output(self):
while True:
data = s.recv(2048).decode("ascii")
array = json.loads(data)
if array[0] == "Message":
if array[2] == room:
print(array[1] + " (" + array[2] + "):" + array[3])
elif array[0] == "Event":
if array[3] == "pm" and array[4] == name:
print(array[1] + " (" + array[2] + ") -> You: " + array[5])
elif array[3] == "bcast":
print(array[1] + " (" + "*" + "):" + array[4])
Inp = InputThread()
Inp.start()
Out = OutputThread()
Out.start()
| unlicense | Python |
|
bf4b4ae886d8b631c443d1d992159f1922232dca | Create code.py | amitkumarj441/DiveInDjango | Code/code.py | Code/code.py | from django.db import models
class Address(models.Model):
address=models.CharField(max_length=255,blank=True)
city=models.CharField(max_length=150,blank=True)
state=models.CharField(max_length=2,blank=True) // Such as US for Unitet States of America, IN for India
pin=models.CharField(max_length=15,blank=True)
class Contact(models.Model):
first_name=models.CharField(max_length=255,blank=True)
last_name=models.CharField(max_length=255,blank=True)
email=models.EmailField(blank=True)
phone=models.CharField(max_length=150,blank=True)
birthdate=models.CharField(auto_now_add=True)
address=models.ForeignKey(Address,null=True)
| mpl-2.0 | Python |
|
67596d081059a004e5f7ab15f7972773fdf2f15e | ADD PyGrid SetupService message tests | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | tests/syft/grid/messages/setup_msg_test.py | tests/syft/grid/messages/setup_msg_test.py | # syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2 | apache-2.0 | Python |
|
3608c4d3b559ba7fa1bd9629231e98196681caa4 | add package py-gdbgui (#7715) | tmerrick1/spack,krafczyk/spack,krafczyk/spack,LLNL/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,LLNL/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,tmerrick1/spack,tmerrick1/spack,EmreAtes/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,iulian787/spack,LLNL/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,tmerrick1/spack | var/spack/repos/builtin/packages/py-gdbgui/package.py | var/spack/repos/builtin/packages/py-gdbgui/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyGdbgui(PythonPackage):
"""gdbgui is a modern, free, browser-based frontend to gdb"""
homepage = "https://gdbgui.com"
url = "https://pypi.io/packages/source/g/gdbgui/gdbgui-0.11.2.1.tar.gz"
version('0.11.2.1', 'c15dd3f60fe372b2e93f705c7ee75f51')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('gdb', type='run')
| lgpl-2.1 | Python |
|
478072e8350d03655364ea9147bbe21bafabbcce | Add tests for template tags | makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek | geotrek/feedback/tests/test_template_tags.py | geotrek/feedback/tests/test_template_tags.py | from datetime import datetime
from django.test import TestCase
from geotrek.authent.tests.factories import UserFactory, UserProfileFactory
from geotrek.feedback.templatetags.feedback_tags import (
predefined_emails, resolved_intervention_info, status_ids_and_colors)
from geotrek.feedback.tests.factories import (PredefinedEmailFactory,
ReportStatusFactory)
from geotrek.maintenance.tests.factories import ReportInterventionFactory
class TestFeedbackTemplateTags(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = UserFactory(username="CCCC")
UserProfileFactory.create(user=cls.user1, extended_username="Communauté des Communes des Communautés Communataires")
cls.user2 = UserFactory(username="Kurt")
UserProfileFactory.create(user=cls.user2)
solved_status = ReportStatusFactory(identifier='solved_intervention', color="#448654")
cls.intervention_solved_1 = ReportInterventionFactory(date=datetime(year=1997, month=4, day=4).date())
cls.report_1 = cls.intervention_solved_1.target
cls.report_1.status = solved_status
cls.report_1.assigned_user = cls.user1
cls.report_1.save()
cls.intervention_solved_2 = ReportInterventionFactory(date=datetime(year=1997, month=5, day=4).date())
cls.report_2 = cls.intervention_solved_2.target
cls.report_2.status = solved_status
cls.report_2.assigned_user = cls.user2
cls.report_2.save()
cls.email1 = PredefinedEmailFactory()
cls.email2 = PredefinedEmailFactory()
def test_resolved_intervention_username(self):
self.assertEqual(
"{\"date\": \"04/04/1997\", \"username\": \"Communaut\\u00e9 des Communes des Communaut\\u00e9s Communataires\"}",
resolved_intervention_info(self.report_1)
)
self.assertEqual(
"{\"date\": \"04/05/1997\", \"username\": \"Kurt\"}",
resolved_intervention_info(self.report_2)
)
def test_status_ids_and_colors(self):
self.assertEqual(
"{\"1\": {\"id\": \"solved_intervention\", \"color\": \"#448654\"}, \"2\": {\"id\": \"ID 1\", \"color\": \"#444444\"}, \"3\": {\"id\": \"ID 2\", \"color\": \"#444444\"}}",
status_ids_and_colors()
)
def test_predefined_emails(self):
self.assertEqual(
"{\"1\": {\"label\": \"Predefined Email 0\", \"text\": \"Some email body content 0\"}, \"2\": {\"label\": \"Predefined Email 1\", \"text\": \"Some email body content 1\"}}",
predefined_emails()
)
| bsd-2-clause | Python |
|
8355cb358d14589a194926d37beeb5af7af2a591 | Increase event image url limit from 200 | sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer | falmer/events/migrations/0012_auto_20170905_1208.py | falmer/events/migrations/0012_auto_20170905_1208.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-05 11:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0011_auto_20170905_1028'),
]
operations = [
migrations.AlterField(
model_name='mslevent',
name='image_url',
field=models.URLField(max_length=2000),
),
migrations.AlterField(
model_name='mslevent',
name='url',
field=models.URLField(max_length=2000),
),
]
| mit | Python |
|
a16b4401f37f08d8cb5e1f9ec1b7d4a3221360ab | Add test case for regular extrusion | nschloe/python4gmsh | test/test_regular_extrusion.py | test/test_regular_extrusion.py | # -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.built_in.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
| bsd-3-clause | Python |
|
5c7b70061d6e96619c6e3a40c87aaec39b408fdf | Enable subscription admin area | praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control | subscription/admin.py | subscription/admin.py | from django.contrib import admin
from subscription.models import MessageSet, Message, Subscription
admin.site.register(MessageSet)
admin.site.register(Message)
admin.site.register(Subscription)
| bsd-3-clause | Python |
|
7bd6f3e7751deecfc3cd555fc071d722c856802c | Implement division using built in library function | dawsonjon/Chips-2.0,dawsonjon/Chips-2.0,dawsonjon/Chips-2.0,dawsonjon/Chips-2.0,dawsonjon/Chips-2.0 | chips/compiler/builtins.py | chips/compiler/builtins.py | #!/usr/bin/env python
"""Support Library for builtin Functionality"""
__author__ = "Jon Dawson"
__copyright__ = "Copyright (C) 2013, Jonathan P Dawson"
__version__ = "0.1"
builtins="""
unsigned unsigned_divide_xxxx(unsigned dividend, unsigned divisor){
unsigned denom = divisor;
unsigned bit = 1;
unsigned quotient = 0;
if( denom > dividend ) return 0;
if( denom == dividend ) return 1;
while(denom <= dividend){
denom <<= 1;
bit <<= 1;
}
denom >>= 1;
bit >>= 1;
while(bit){
if(dividend >= denom){
dividend -= denom;
quotient |= bit;
}
bit >>= 1;
denom >>= 1;
}
return quotient;
}
int divide_xxxx(int dividend, int divisor){
unsigned udividend, udivisor, uquotient;
unsigned dividend_sign, divisor_sign, quotient_sign;
dividend_sign = dividend & 0x8000u;
divisor_sign = divisor & 0x8000u;
quotient_sign = dividend_sign ^ divisor_sign;
udividend = dividend_sign ? -dividend : dividend;
udivisor = divisor_sign ? -divisor : divisor;
uquotient = unsigned_divide_xxxx(udividend, udivisor);
return quotient_sign ? -uquotient : uquotient;
}
long unsigned long_unsigned_divide_xxxx(long unsigned dividend, long unsigned divisor){
long unsigned denom = divisor;
long unsigned bit = 1;
long unsigned quotient = 0;
if( denom > dividend ) return 0;
if( denom == dividend ) return 1;
while(denom <= dividend){
denom <<= 1;
bit <<= 1;
}
denom >>= 1;
bit >>= 1;
while(bit){
if(dividend >= denom){
dividend -= denom;
quotient |= bit;
}
bit >>= 1;
denom >>= 1;
}
return quotient;
}
long int long_divide_xxxx(long int dividend, long int divisor){
long unsigned udividend, udivisor, uquotient;
long unsigned dividend_sign, divisor_sign, quotient_sign;
dividend_sign = dividend & 0x80000000ul;
divisor_sign = divisor & 0x80000000ul;
quotient_sign = dividend_sign ^ divisor_sign;
udividend = dividend_sign ? -dividend : dividend;
udivisor = divisor_sign ? -divisor : divisor;
uquotient = long_unsigned_divide_xxxx(udividend, udivisor);
return quotient_sign ? -uquotient : uquotient;
}
"""
| mit | Python |
|
6eb49ae8dcf33a7d7be9ed3c8208bc83a9a46757 | Create python_wiki_one.py | stephaneAG/Python_tests,stephaneAG/Python_tests,stephaneAG/Python_tests,stephaneAG/Python_tests | ckOLDab/python_wiki_one.py | ckOLDab/python_wiki_one.py | import time
import BaseHTTPServer
HOST_NAME = '127.0.0.1' # !!!REMEMBER TO CHANGE THIS!!!
PORT_NUMBER = 80 # Maybe set this to 9000.
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_HEAD(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
def do_GET(s):
"""Respond to a GET request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><head><title>Title goes here.</title></head>")
s.wfile.write("<body><p>This is a test.</p>")
# If someone went to "http://something.somewhere.net/foo/bar/",
# then s.path equals "/foo/bar/".
s.wfile.write("<p>You accessed path: %s</p>" % s.path)
s.wfile.write("</body></html>")
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
| mit | Python |
|
4fd9984e4187a64906ca9e0b3a7ccf01486c156d | Create Fred14_LeftArmServos.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/Ray.Edgley/fred/fred01/Fred14_LeftArmServos.py | home/Ray.Edgley/fred/fred01/Fred14_LeftArmServos.py | #########################################################
# Fred14 Setup the Left Arm Servos
#########################################################
# We will be using the following services:
# Servo Service
#########################################################
# I Fred's Left Arm, we have the Bicep Rotator, the Elbow,
# the Wrist and the five fingures.
# You know it's just like the Right Arm
# Lets start with the Bicep Rotator.
# All service are created using the Runtime Service.
leftBicep = Runtime.createAndStart("leftBicep", "Servo")
# Next we need to attach ther servo Service to a Controller Service, in this case it will be the head
# Adafruit16ChServoDriver. We also need to tell the Servo Service which pin on the controller
# the servo is connected to, in this case pin 15
leftBicep.attach(leftArm,15)
# Now we tell the Servo Service about our servos limits, in some cases if the servo goes to far, things will break
leftBicep.setMinMax(0,180)
# This allows you to map the input to the Servo service to an actual servo position output
leftBicep.map(0,180,1,180)
# there is a rest command that can be issued to the servo,
# when that happens, this is the position that the servo will go to
leftBicep.setRest(90)
# if your servo run backwards, then set this to true in order to reverse it.
leftBicep.setInverted(False)
# degrees per second rotational velocity, setting -1 will set the speed to the servo's default
leftBicep.setVelocity(60)
# this allows the Servo Sevice to turn off the motor when it has reached the target position.
# the major advantage to this is the servos will use less power and have a lower chance of buring out.
leftBicep.setAutoDisable(True)
# Ok now that we have fully defined the headX servo lets make sure it is in the rest position.
leftBicep.rest()
# commands not used here but will be in other parts on the program are the following:
# leftBicep.moveTo(x) where x is the position you want move to.
# leftBicep.moveToBlockig(x) as above except execution of the program will pause until the position is reached.
# leftBicep.disable() will turn off the servo without unloading the service.
# leftBicep.enable() the oposite of disable will turn the servo back on after being disabled.
# disable and enable are not required if setAutoDisable is set to True
# For each servo that we have, we need to create a Servo Service, so this will be a process
# of repeating what we did above for each servo just using a diferent object name.
leftElbow = Runtime.createAndStart("leftElbow", "Servo")
leftElbow.attach(leftArm,14)
leftElbow.setMinMax(0,180)
leftElbow.map(0,180,1,180)
leftElbow.setRest(90)
leftElbow.setInverted(False)
leftElbow.setVelocity(60)
leftElbow.setAutoDisable(True)
leftElbow.rest()
leftWrist = Runtime.createAndStart("leftWrist", "Servo")
leftWrist.attach(leftArm,0)
leftWrist.setMinMax(0,180)
leftWrist.map(0,180,1,180)
leftWrist.setRest(90)
leftWrist.setInverted(False)
leftWrist.setVelocity(60)
leftWrist.setAutoDisable(True)
leftWrist.rest()
leftThumb = Runtime.createAndStart("leftThumb", "Servo")
leftThumb.attach(leftArm,1)
leftThumb.setMinMax(0,180)
leftThumb.map(0,180,1,180)
leftThumb.setRest(90)
leftThumb.setInverted(False)
leftThumb.setVelocity(60)
leftThumb.setAutoDisable(True)
leftThumb.rest()
leftIndex = Runtime.createAndStart("leftIndex", "Servo")
leftIndex.attach(leftArm,2)
leftIndex.setMinMax(0,180)
leftIndex.map(0,180,1,180)
leftIndex.setRest(90)
leftIndex.setInverted(False)
leftIndex.setVelocity(60)
leftIndex.setAutoDisable(True)
leftIndex.rest()
leftMajure = Runtime.createAndStart("leftMajure", "Servo")
leftMajure.attach(leftArm,3)
leftMajure.setMinMax(0,180)
leftMajure.map(0,180,1,180)
leftMajure.setRest(90)
leftMajure.setInverted(False)
leftMajure.setVelocity(60)
leftMajure.setAutoDisable(True)
leftMajure.rest()
leftRing = Runtime.createAndStart("leftRing", "Servo")
leftRing.attach(leftArm,4)
leftRing.setMinMax(0,180)
leftRing.map(0,180,1,180)
leftRing.setRest(90)
leftRing.setInverted(False)
leftRing.setVelocity(60)
leftRing.setAutoDisable(True)
leftRing.rest()
leftLittle = Runtime.createAndStart("leftLittle", "Servo")
leftLittle.attach(leftArm,5)
leftLittle.setMinMax(0,180)
leftLittle.map(0,180,1,180)
leftLittle.setRest(90)
leftLittle.setInverted(False)
leftLittle.setVelocity(60)
leftLittle.setAutoDisable(True)
leftLittle.rest()
| apache-2.0 | Python |
|
53cdd6e7bcb37567382f3b3688b6a55f1b2968be | Add test_binaryclassifier | Neurita/darwin | tests/test_binaryclassifier.py | tests/test_binaryclassifier.py |
import numpy as np
from sklearn import svm, datasets
from darwin.pipeline import ClassificationPipeline
def test_binary_classification_with_classification_pipeline():
# generate the dataset
n_samples=100
n_features=20
x, y = datasets.make_gaussian_quantiles(mean=None, cov=1.0, n_samples=n_samples,
n_features=n_features, n_classes=2,
shuffle=True, random_state=1)
# another way to generate the data
# x, y = datasets.make_hastie_10_2(n_samples=10, random_state=1)
# -- test with darwin
classifier_name='linsvm'
cvmethod='10'
n_feats = x.shape[1]
pipe = ClassificationPipeline(n_feats=n_feats, clfmethod=classifier_name,
cvmethod=cvmethod)
results, metrics = pipe.cross_validation(x, y)
assert(results is not None)
| bsd-3-clause | Python |
|
bb80025f3ed8169a2558e9c5c6bc4db5a862d7ae | Integrate LLVM at llvm/llvm-project@529a3d87a799 | yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "529a3d87a799a2cba29bc1d0f426a00d5bb4c88f"
LLVM_SHA256 = "3d2ca52bd36ad3904f2f5d0e43935b0e82f3d1ac137e0a89025141e36735944f"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:macos_build_fix.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "6139626d738fd03b968e07698f5cd26924e3cd65"
LLVM_SHA256 = "b9581ac44a7d9dd3a8470497cddb63c387ab6520d82d077fb609bc29b4e7b887"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:macos_build_fix.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
fee6e923b27947721ba1ea4ce5005f54eabba421 | Integrate LLVM at llvm/llvm-project@4504e1134c91 | yongtang/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,paolodedios/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "4504e1134c9118f3c322685f8a90129e09bab92c"
LLVM_SHA256 = "2b1d7a96ff37600cae12d2ed51b9f0554b1bbc6511ffe51ac7525928b29bab44"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "0ad1d9fdf22dad41312e02b8bc990bf58ce1744c"
LLVM_SHA256 = "517db6d771cf24d9f0aea6d4fdd59591347c7eb9d86ef58521fe8cb929fbe82b"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
a868b0d057b34dbd487a1e3d2b08d5489651b3ff | Integrate LLVM at llvm/llvm-project@fe611b1da84b | Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,yongtang/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,yongtang/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "fe611b1da84b9442c093739394d336af9e99c1a1"
LLVM_SHA256 = "52edc892b020736f4c53e52b63687ee7caab93c90a0062b4026f4d49fc18266f"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:BUILD.bazel",
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "bd7ece4e063e7afd08cbaa311878c09aadf5ec21"
LLVM_SHA256 = "e9390dfa94c1143f35437bea8a011b030194e047bc3df45e2627cff88f83d2ed"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:BUILD.bazel",
)
| apache-2.0 | Python |
d8d87a0cdcce8c8ec1770aa722a17a40d25409e6 | Add migration script to migrate all "extra" entries to lists | chennan47/osf.io,Nesiehr/osf.io,adlius/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,Nesiehr/osf.io,acshi/osf.io,laurenrevere/osf.io,cslzchen/osf.io,acshi/osf.io,caneruguz/osf.io,caseyrollins/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,adlius/osf.io,sloria/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,hmoco/osf.io,cwisecarver/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,aaxelb/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,crcresearch/osf.io,aaxelb/osf.io,caseyrollins/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,sloria/osf.io,cslzchen/osf.io,acshi/osf.io,icereval/osf.io,sloria/osf.io,leb2dg/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,erinspace/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,adlius/osf.io,icereval/osf.io,felliott/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,chrisseto/osf.io,leb2dg/osf.io,felliott/osf.io,leb2dg/osf.io,felliott/osf.io,crcresearch/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,erinspace/osf.io,pattisdr/osf.io,mattclark/osf.io,mfraezz/osf.io,binoculars/osf.io,chrisseto/osf.io,Nesiehr/osf.io,binoculars/osf.io,cslzchen/osf.io,laurenrevere/osf.io,adlius/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,acshi/osf.io,hmoco/osf.io,aaxelb/osf.io,chrisseto/osf.io,baylee-d/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,chennan47/osf.io,binoculars/osf.io,TomBaxter/osf.io,hmoco/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,TomBaxter/osf.io,icereval/osf.io,acshi/osf.io,caneruguz/osf.io,chennan47/osf.io,baylee-d/osf.io,mattclark/osf.io,Johnetordoff/osf.io | scripts/migration/migrate_registration_extra_again.py | scripts/migration/migrate_registration_extra_again.py | """
Changes existing question.extra on all registrations and draft registrations
to a list. Required for multiple files attached to a question.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as scripts_utils
from website.models import Node, DraftRegistration
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def migrate_extras(queryset, dry=True):
migrated = []
errored = set()
model_name = 'Node'
for obj in queryset:
# 1 transaction per obj, to prevent locking errors
with TokuTransaction():
changed = False
if isinstance(obj, DraftRegistration):
meta = [obj.registration_metadata]
model_name = 'DraftRegistration'
if obj.registered_node: # Skip over drafts that have been completed
continue
else:
meta = obj.registered_meta.values()
model_name = 'Node'
if not meta:
continue
for data in meta:
for question, answer in data.items():
if isinstance(answer.get('extra'), dict):
if not answer.get('extra'):
logger.info('Migrating extra for question {!r} on {} {}'.format(question, model_name, obj._id))
answer['extra'] = []
changed = True
else: # We don't expect to get here
logger.error('Found non-empty "extra" on {} {} for question {!r}'.format(model_name, obj._id, question))
errored.add(obj)
for value in answer.values():
if isinstance(value, dict):
for k, v in value.items():
if isinstance(v, dict) and isinstance(v.get('extra'), dict):
if not v.get('extra'):
logger.info('Migrating {}/extra for question {} on {} {}'.format(k, question, model_name, obj._id))
v['extra'] = []
changed = True
else: # We don't expect to get here
logger.error('Found non-empty "{}/extra" on {} {} for question {}'.format(k, model_name, obj._id, question))
errored.add(obj)
if changed:
migrated.append(obj._id)
if model_name == 'DraftRegistration':
# Prevent datetime_updated from being updated on save
obj._fields['datetime_updated']._auto_now = False
if not dry:
changed = obj.save()
if model_name == 'DraftRegistration':
assert changed == {'registration_metadata'}, 'Expected only registration_metadata to change. Got: {}'.format(changed)
return migrated, errored
def migrate(dry=True):
registrations = Node.find(
Q('is_registration', 'eq', True) &
Q('registered_meta', 'ne', None)
)
regs_migrated, reg_errored = migrate_extras(registrations, dry=dry)
drafts = DraftRegistration.find(Q('registration_metadata', 'ne', {}))
drafts_migrated, drafts_errored = migrate_extras(drafts, dry=dry)
logger.info('Migrated registered_meta for {} registrations'.format(len(regs_migrated)))
if reg_errored:
logger.error('{} errored: {}'.format(len(reg_errored), reg_errored))
logger.info('Migrated registered_meta for {} draft registrations'.format(len(drafts_migrated)))
if drafts_errored:
logger.error('{} errored: {}'.format(len(drafts_errored), drafts_errored))
if __name__ == '__main__':
dry_run = '--dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
migrate(dry=dry_run)
| apache-2.0 | Python |
|
a973c3b5d1683cd27f8b88c4c38daa934dae75c1 | refactor storeRegiser | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | usermanage/views/storeRegister.py | usermanage/views/storeRegister.py | from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.models import User, Group
from django.contrib.auth.decorators import login_required, user_passes_test, permission_required
from django.contrib.auth.forms import UserCreationForm
from customermanage.models import Coupon, Wallet
from storemanage.models import Ticket
# Create your views here.
from usermanage import models
def storeRegister(request):
if request.user.is_authenticated:
return redirect('index:index')
if request.method == 'GET':
return render(request,'usermanage/register-store.html')
data = request.POST
# check user already exits
if User.objects.filter(username=data['username']).exists():
return render(request,'usermanage/register-store.html', {
'user_error' : True,
})
user = User.objects.create_user(data['username'], password = data['password'])
g = Group.objects.get(name='store')
g.user_set.add(user)
user.save()
g.save()
storeprofile = models.Store(user = user, store_name=data['storename'], profile_image_url=data['profile_image_url'])
storeprofile.save()
return redirect_after_login(user)
| mit | Python |
|
9316bc07c77e2f51332a40bf430cef117f4d89e1 | Add script to check for Dockerfile coverage | rue89-tech/configuration,stvstnfrd/configuration,michaelsteiner19/open-edx-configuration,armaan/edx-configuration,hastexo/edx-configuration,armaan/edx-configuration,stvstnfrd/configuration,gsehub/configuration,stvstnfrd/configuration,hks-epod/configuration,open-craft/configuration,edx/configuration,open-craft/configuration,armaan/edx-configuration,proversity-org/configuration,stvstnfrd/configuration,hks-epod/configuration,michaelsteiner19/open-edx-configuration,edx/configuration,gsehub/configuration,edx/configuration,Stanford-Online/configuration,hastexo/edx-configuration,gsehub/configuration,hastexo/edx-configuration,open-craft/configuration,appsembler/configuration,rue89-tech/configuration,arbrandes/edx-configuration,michaelsteiner19/open-edx-configuration,open-craft/configuration,EDUlib/configuration,nunpa/configuration,nunpa/configuration,arbrandes/edx-configuration,mitodl/configuration,mitodl/configuration,Stanford-Online/configuration,rue89-tech/configuration,hks-epod/configuration,Stanford-Online/configuration,jorgeomarmh/configuration,appsembler/configuration,jorgeomarmh/configuration,EDUlib/configuration,armaan/edx-configuration,EDUlib/configuration,nunpa/configuration,stvstnfrd/configuration,hastexo/edx-configuration,gsehub/configuration,proversity-org/configuration,michaelsteiner19/open-edx-configuration,proversity-org/configuration,arbrandes/edx-configuration,arbrandes/edx-configuration,Stanford-Online/configuration,edx/configuration,hks-epod/configuration,jorgeomarmh/configuration,proversity-org/configuration,appsembler/configuration,appsembler/configuration,nunpa/configuration,mitodl/configuration,EDUlib/configuration,mitodl/configuration,jorgeomarmh/configuration,rue89-tech/configuration,rue89-tech/configuration,EDUlib/configuration,hks-epod/configuration,Stanford-Online/configuration,proversity-org/configuration,gsehub/configuration,hastexo/edx-configuration,arbrandes/edx-configuration | util/check_dockerfile_coverage.py | util/check_dockerfile_coverage.py | import yaml
import os
import pathlib2
import itertools
import argparse
import logging
import sys
TRAVIS_BUILD_DIR = os.environ.get("TRAVIS_BUILD_DIR")
CONFIG_FILE_PATH = pathlib2.Path(TRAVIS_BUILD_DIR, "util", "parsefiles_config.yml")
LOGGER = logging.getLogger(__name__)
def check_coverage(containers):
# open config file containing container weights
config_file_path = pathlib2.Path(CONFIG_FILE_PATH)
with (config_file_path.open(mode='r')) as file:
try:
config = yaml.load(file)
except yaml.YAMLError, exc:
LOGGER.error("error in configuration file: %s" % str(exc))
sys.exit(1)
# get container weights
weights = config.get("weights")
# convert all containers in config file to a list of tuples (<container>, <weight>)
weights_list = [x.items() for x in weights]
weights_list = list(itertools.chain.from_iterable(weights_list))
# performs intersection between weighted containers and input containers
used_containers = [x for x in weights_list if x[0] in containers]
# determine which Dockerfiles are not covered; i.e. the set difference of the Dockerfiles to build minus the Dockerfile
# available to be built is non-empty
uncovered = set(containers) - set([x[0] for x in used_containers])
# exit with error code if uncovered Dockerfiles exist
if uncovered:
LOGGER.error("The following Dockerfiles are not described in the parsefiles_config.yml file: {}. Please see the following documentation on how to add Dockerfile ranks to the configuration file: {}".format(uncovered, "https://github.com/edx/configuration/blob/master/util/README.md"))
sys.exit(1)
def arg_parse():
parser = argparse.ArgumentParser(description = 'Given a list of containers as input and a number of shards, '
'finds an approximation of the optimal distribution of the containers over the shards, provided a set of hard-coded weights '
'in parsefiles_config.yml.')
parser.add_argument('containers', help = "the Dockerfiles that need to be built as the result of some commit change and whose coverage is checked")
return parser.parse_args()
if __name__ == '__main__':
args = arg_parse()
# configure logging
logging.basicConfig()
containers = []
for word in args.containers.split():
containers.append(word)
check_coverage(containers)
| agpl-3.0 | Python |
|
c78c82987feb4dbe91b750ab90a3d163fc1340c6 | Add datatables filter mixin | allanlei/rhinocloud-utils | rhinocloud/contrib/jquery/views.py | rhinocloud/contrib/jquery/views.py | from django.views import generic
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
class DataTablesServerDataMixin(object):
query_class = Q
initial_query_kwargs = {}
searchable_fields = ()
def get_searchable_fields(self):
if self.searchable_fields is not None:
fields = self.searchable_fields
else:
raise ImproperlyConfigured('Provide searchable_fields or override get_searchable_fields().')
return fields
def get_query_class(self):
if self.query_class:
qc = self.query_class
else:
raise ImproperlyConfigured('Provide query_class or override get_query_class().')
return qc
def get_initial_query_kwargs(self):
if self.initial_query_kwargs is not None:
kwargs = self.initial_query_kwargs
else:
raise ImproperlyConfigured('Provide initial_query_kwargs or override get_initial_query_kwargs().')
return kwargs
def get_initial_query(self):
return self.get_query_class()(**self.get_initial_query_kwargs())
def get_searchterm_query(self, field, value):
return self.get_query_class()(**{'%s__contains' % field: value})
def get_queryset(self, **kwargs):
queryset = super(DataTablesServerDataMixin, self).get_queryset(**kwargs)
iSortingCols = int(self.request.GET.get('iSortingCols', -1))
sSearch = self.request.GET.get('sSearch', None)
if sSearch is not None:
query = self.get_initial_query()
for field in self.get_searchable_fields():
query.add(self.get_searchterm_query(field, sSearch), Q.OR)
queryset = queryset.filter(query)
ordering = []
for i in range(iSortingCols):
sSortDir = self.request.GET['sSortDir_%s' % i]
iSortingCols = int(self.request.GET['iSortCol_%s' % i])
ordering.append('%s%s' % (sSortDir == 'asc' and '-' or '', self.get_searchable_fields()[iSortingCols]))
queryset = queryset.order_by(*ordering)
return queryset
| bsd-3-clause | Python |
|
4aeec42ca745db546b66f0a708f1c13f806164f8 | Add commmand to re-assign person cases to facilities | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/enikshay/management/commands/reassign_from_facility.py | custom/enikshay/management/commands/reassign_from_facility.py | from __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from casexml.apps.case.util import get_all_changes_to_case_property
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
iter_all_active_person_episode_cases,
)
from dimagi.utils.chunked import chunked
MJK = 'df661f7aaf384e9c98d88beeedb83050'
ALERT_INDIA = 'af50474dd6b747b29a2934b7b0359bdf'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('--commit', action='store_true')
def handle(self, domain, **options):
commit = options['commit']
filename = "reassign_from_facility-{}.csv".format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
columns = ['case_id', 'facility_assigned_to', 'owner_id',
'last_owner_id_changed', 'last_facility_assigned_to_changed', 'note']
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_cases = []
to_update = []
for person, _ in with_progress_bar(cases, length=len(case_ids)):
facility_assigned_to = person.get_case_property('facility_assigned_to')
owner_id = person.owner_id
if facility_assigned_to == owner_id:
continue
if not facility_assigned_to and owner_id in [MJK, ALERT_INDIA]:
# cases with a blank facility and owned by MJK or Alert-India are known about already
continue
owner_id_changes = sorted(get_all_changes_to_case_property(person, 'owner_id'),
key=lambda c: c.modified_on, reverse=True)
facility_id_changes = sorted(get_all_changes_to_case_property(person, 'facility_assigned_to'),
key=lambda c: c.modified_on, reverse=True)
case_dict = {
'case_id': person.case_id,
'facility_assigned_to': facility_assigned_to,
'owner_id': owner_id,
}
try:
case_dict['last_owner_id_changed'] = owner_id_changes[0].modified_on
case_dict['last_facility_assigned_to_changed'] = facility_id_changes[0].modified_on
if owner_id_changes[0].modified_on < facility_id_changes[0].modified_on:
case_dict['note'] = 'updated'
to_update.append((person.case_id, {"owner_id": facility_assigned_to}, False))
else:
case_dict['note'] = 'not updated'
except IndexError as e:
case_dict['last_owner_id_changed'] = None
case_dict['last_facility_assigned_to_changed'] = None
case_dict['note'] = 'no changes found: {}'.format(six.text_type(e))
bad_cases.append(case_dict)
if commit:
print("Updating: ", len(to_update), " cases")
for update in chunked(to_update, 100):
bulk_update_cases(domain, update, self.__module__)
else:
print("Would have updated: ", len(to_update), " cases")
with open(filename, 'w') as f:
writer = csv.DictWriter(f, fieldnames=columns)
writer.writeheader()
for case in bad_cases:
writer.writerow(case)
| bsd-3-clause | Python |
|
5e9a66fb07b8071a8ac3d2130c2796a7cda03739 | Create ConfigurationPIN.py | Timothee38/pythonScriptsAVC | ConfigurationPIN.py | ConfigurationPIN.py | # -*- coding: utf8 -*-
# Imports necessaires
import re
import sys
import os
import random
name = os.popen('whoami').read()
cutName = name.split("\n")
name = cutName[0]
sys.path.append('~/AndroidViewClient/src/')
from com.dtmilano.android.viewclient import ViewClient
from com.dtmilano.android.adb.adbclient import AdbClient
device, serialno = ViewClient.connectToDeviceOrExit()
if not device:
raise Exception("Connection avec l'appareil impossible.")
#retour à la page d'accueuil du téléphone pour commencer les manips (on appuie sur la touche de retour a l'accueuil du téléphone)
device.press('KEYCODE_HOME')
#Lancement de paramètres
package = 'com.android.settings'
activity = '.Settings'
componentName = package + "/" + activity
device.startActivity(componentName)
#Creation d'un objet ViewClient
vc = ViewClient(device=device, serialno=serialno)
# on défile l'écran pour faire apparaître “Sécurité”
device.drag((100,600),(100,100),1,20)
vc.sleep(0.01) #pause pour charger l'affichage
vc.dump() #dump récupère l'arbre de l'IHM dans ViewClient
# On clique sur l'onglet "Sécurité"
security = vc.findViewWithText(u"Ecran de verrouillage") # utf-8
security.touch() # on declenche l'appui
vc.sleep(0.01) #pause
vc.dump()
#On clique sur l'onglet "Verrouillage de l'écran"
mode = vc.findViewWithText(u"Glissement")
if mode is not None:
mode.touch() #On "clique" ici en touchant l'écran.
vc.sleep(0.02)
vc.dump()
#Setup du code pin
pin = vc.findViewWithText('Code PIN')
pin.touch()# On clique sur le bouton "PIN"
vc.sleep(0.02)
vc.dump()
# On choisit la zone de saisie de texte
# choix par id car l'Edit Text n'a pas de nom
password = vc.findViewByIdOrRaise("id/no_id/18")
#Creation d'un code PIN aleatoire entre 1000 & 9999
rand = str(random.randint(1000,9999))
print rand # affichage console du code PIN
#Creation/ouverture d'un fichier txt
fichier = open("/home/"+name+"/AVCData/CodesPIN.txt", "a")
#Inscription du PIN dans ce fichier txt
fichier.write("PIN"+serialno+" : "+rand+"\n")
fichier.close()
password.type(rand) #écriture du mot de passe
vc.sleep(0.01)
vc.dump()
ctn = vc.findViewWithText('Continuer')
ctn.touch() # appui sur Continuer
vc.sleep(0.01)
vc.dump()
password = vc.findViewByIdOrRaise("id/no_id/18")
password.type(rand) # 2ème saisie du mot de passe
vc.sleep(0.01)
vc.dump()
ok = vc.findViewWithText('OK')
ok.touch() # confirmation
else:
print "Ce Smartphone déjà sécurisé par code PIN"
| mit | Python |
|
805708048f493ca538a9e0b8d9d40ae1d4baf2c3 | Add a tool to reproduce HTTP KeepAlive races in OpenStack gate jobs. | JordanP/openstack-snippets,JordanP/openstack-snippets | keepalive-race/keep-alive-race.py | keepalive-race/keep-alive-race.py | #!/usr/bin/python3
"""
This script demonstrates a race condition with HTTP/1.1 keepalive
"""
import decimal
import json
import subprocess
import time
import threading
import requests
requests.packages.urllib3.disable_warnings()
CREDS = json.loads(subprocess.check_output(
"openstack --os-cloud devstack token issue -f json".split(),
).decode())
URL = 'https://10.0.1.44:8774/v2/%s/servers/detail' % (CREDS['project_id'])
def decimal_range(x, y, jump):
x = decimal.Decimal(x)
y = decimal.Decimal(y)
jump = decimal.Decimal(jump)
while x < y:
yield float(x)
x += jump
def get(exit):
for delay in decimal_range(4.95, 4.96, 0.005):
session = requests.Session()
if exit.is_set():
return
for i in range(10):
if exit.is_set():
return
time.sleep(delay)
headers = {
'User-Agent': 'timeout-race/%s' % i,
'X-Auth-Token': CREDS['id']
}
try:
session.get(URL, verify=False, headers=headers)
except Exception as e:
print(e)
exit.set()
threads = []
exit = threading.Event()
for i in range(50):
threads.append(threading.Thread(target=get,args=(exit,)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
| apache-2.0 | Python |
|
e18c1d6a62d8a896e853f1bd16d4ea5bf6f67401 | add statistics script | nikken1/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor | statistics.py | statistics.py | #!/usr/bin/env python
"""
Takes the database from the output of integrate.py (e.g. after the disambiguated
inventors have been merged into the database) and computes statistics on top of it
"""
import uuid
from datetime import datetime
from lib import alchemy
from collections import Counter, defaultdict
def compute_future_citation_rank():
"""
Ranks each patent by number of future citations in a given year
Returns nested dictionary:
years[YEAR][PATENT_ID] = number of times PATENT_ID was cited in YEAR
"""
citations = (c for c in alchemy.session.query(alchemy.USPatentCitation).yield_per(1))
years = defaultdict(Counter)
print "Counting citations...", datetime.now()
for cit in citations:
if cit.date:
year = cit.date.year
patid = cit.patent_id
years[year][patid] += 1
print "Finished counting citations", datetime.now()
return years
def insert_future_citation_rank(years):
"""
Accepts as input the dictionary returned from compute_future_citation_rank:
years[YEAR][PATENT_ID] = number of times PATENT_ID was cited in YEAR
Inserts rows into the correct table:
"""
# remove old rows to make way for new rankings
deleted = alchemy.session.query(alchemy.FutureCitationRank).delete()
print 'Removed {0} rows from FutureCitationRank'.format(deleted)
print 'Inserting records in order...', datetime.now()
for year in years.iterkeys():
rank = 0
prev_num_cits = float('inf')
commit_counter = 0
for i, record in enumerate(years[year].most_common()):
if record[1] < prev_num_cits:
prev_num_cits = record[1]
rank += 1
row = {'uuid': str(uuid.uuid1()),
'patent_id': record[0],
'num_citations': record[1],
'citation_year': year,
'rank': rank}
dbrow = alchemy.FutureCitationRank(**row)
alchemy.session.merge(dbrow)
if (i+1) % 1000 == 0:
alchemy.commit()
alchemy.commit()
print 'Finished inserting records', datetime.now()
def compute_inventor_rank():
"""
Ranks each inventor by number of granted patents in a given year
Returns nested dictionary:
years[YEAR][INVENTOR_ID] = number of patents granted in YEAR to INVENTOR_ID
"""
patents = (p for p in alchemy.session.query(alchemy.Patent).yield_per(1))
years = defaultdict(Counter)
print 'Counting granted patents...', datetime.now()
for pat in patents:
year = pat.date.year
inventors = pat.inventors
for inventor in inventors:
years[year][inventor.id] += 1
print 'Finished counting', datetime.now()
return years
def insert_inventor_rank(years):
"""
Accepts as input the dictionary returned from compute_inventor_rank:
years[YEAR][INVENTOR_ID] = number of patents granted in YEAR to INVENTOR_ID
Inserts rows into the correct table:
"""
deleted = alchemy.session.query(alchemy.InventorRank).delete()
print 'removed {0} rows'.format(deleted)
print 'Inserting records in order...', datetime.now()
for year in years.iterkeys():
rank = 0
prev_num_cits = float('inf')
commit_counter = 0
for i, record in enumerate(years[year].most_common()):
if record[1] < prev_num_cits:
prev_num_cits = record[1]
rank += 1
row = {'uuid': str(uuid.uuid1()),
'inventor_id': record[0],
'num_patents': record[1],
'patent_year': year,
'rank': rank}
dbrow = alchemy.InventorRank(**row)
alchemy.session.merge(dbrow)
if (i+1) % 1000 == 0:
alchemy.commit()
alchemy.commit()
print 'Finished inserting records', datetime.now()
if __name__=='__main__':
years = compute_future_citation_rank()
insert_future_citation_rank(years)
years = compute_inventor_rank()
insert_inventor_rank(years)
| bsd-2-clause | Python |
|
64314d580fac2cf0e0434527437b106a94e8ded0 | Add setup.py build for kivy-launcher reboot | kronenpj/python-for-android,germn/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,kivy/python-for-android,kivy/python-for-android,germn/python-for-android,germn/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,kivy/python-for-android,rnixx/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,kivy/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android | testapps/testlauncherreboot_setup/sdl2.py | testapps/testlauncherreboot_setup/sdl2.py | '''
Clone Python implementation of Kivy Launcher from kivy/kivy-launcher repo,
install deps specified in the OPTIONS['apk']['requirements'] and put it
to a dist named OPTIONS['apk']['dist-name'].
Tested with P4A Dockerfile at 5fc5241e01fbbc2b23b3749f53ab48f22239f4fc,
kivy-launcher at ad5c5c6e886a310bf6dd187e992df972864d1148 on Windows 8.1
with Docker for Windows and running on Samsung Galaxy Note 9, Android 8.1.
docker run \
--interactive \
--tty \
-v "/c/Users/.../python-for-android/testapps":/home/user/testapps \
-v ".../python-for-android/pythonforandroid":/home/user/pythonforandroid \
p4a sh -c '\
. venv/bin/activate \
&& cd testapps/testlauncherreboot_setup \
&& python sdl2.py apk \
--sdk-dir $ANDROID_SDK_HOME \
--ndk-dir $ANDROID_NDK_HOME'
'''
# pylint: disable=import-error,no-name-in-module
from subprocess import Popen
from distutils.core import setup
from os import listdir
from os.path import join, dirname, abspath, exists
from pprint import pprint
from setuptools import find_packages
ROOT = dirname(abspath(__file__))
LAUNCHER = join(ROOT, 'launcherapp')
if not exists(LAUNCHER):
PROC = Popen([
'git', 'clone',
'https://github.com/kivy/kivy-launcher',
LAUNCHER
])
PROC.communicate()
assert PROC.returncode == 0, PROC.returncode
pprint(listdir(LAUNCHER))
pprint(listdir(ROOT))
OPTIONS = {
'apk': {
'debug': None,
'bootstrap': 'sdl2',
'requirements': (
'python3,sdl2,kivy,android,pyjnius,plyer'
),
# 'sqlite3,docutils,pygments,'
# 'cymunk,lxml,pil,openssl,pyopenssl,'
# 'twisted,audiostream,ffmpeg,numpy'
'android-api': 27,
'ndk-api': 21,
'dist-name': 'bdisttest_python3launcher_sdl2_googlendk',
'name': 'TestLauncherPy3-sdl2',
'package': 'org.kivy.testlauncherpy3_sdl2_googlendk',
'ndk-version': '10.3.2',
'arch': 'armeabi-v7a',
'permissions': [
'ACCESS_COARSE_LOCATION', 'ACCESS_FINE_LOCATION',
'BLUETOOTH', 'BODY_SENSORS', 'CAMERA', 'INTERNET',
'NFC', 'READ_EXTERNAL_STORAGE', 'RECORD_AUDIO',
'USE_FINGERPRINT', 'VIBRATE', 'WAKE_LOCK',
'WRITE_EXTERNAL_STORAGE'
]
}
}
PACKAGE_DATA = {
'launcherapp': [
'*.py', '*.png', '*.ttf', '*.eot', '*.svg', '*.woff',
],
'launcherapp/art': [
'*.py', '*.png', '*.ttf', '*.eot', '*.svg', '*.woff',
],
'launcherapp/art/fontello': [
'*.py', '*.png', '*.ttf', '*.eot', '*.svg', '*.woff',
],
'launcherapp/data': [
'*.py', '*.png', '*.ttf', '*.eot', '*.svg', '*.woff',
],
'launcherapp/launcher': [
'*.py', '*.png', '*.ttf', '*.eot', '*.svg', '*.woff',
]
}
PACKAGES = find_packages()
print('packages are', PACKAGES)
setup(
name='testlauncherpy3_sdl2_googlendk',
version='1.0',
description='p4a sdl2.py apk',
author='Peter Badida',
author_email='[email protected]',
packages=find_packages(),
options=OPTIONS,
package_data=PACKAGE_DATA
)
| mit | Python |
|
e11de6b814da4e5ade9fadaa035d6141ab3c113f | add test that features can be read and written ok | salilab/rmf,salilab/rmf,salilab/rmf,salilab/rmf | test/test_features.py | test/test_features.py | #!/usr/bin/python
import unittest
import RMF
class GenericTest(unittest.TestCase):
def _create(self, path):
fh= RMF.create_rmf_file(path)
rt= fh.get_root_node()
reps=[rt.add_child("rep"+str(i), RMF.REPRESENTATION) for i in range(0,5)]
sf= RMF.ScoreFactory(fh)
fn= rt.add_child("feature", RMF.FEATURE)
sd= sf.get(fn)
sd.set_score(10.0)
sd.set_representation(reps)
def _test(self, path):
fh= RMF.open_rmf_file_read_only(path)
rt= fh.get_root_node()
ch= rt.get_children()
fn= ch[-1]
reps= ch[:-1]
sf= RMF.ScoreConstFactory(fh)
sd= sf.get(fn)
print sd.get_score()
print reps
print sd.get_representation()
self.assert_(sd.get_score()==10)
self.assert_(sd.get_representation() == reps)
def test_multiparent(self):
"""Test that feature nodes work right"""
for suffix in RMF.suffixes:
path=RMF._get_temporary_file_path("alias2."+suffix)
print path
self._create(path)
self._test(path)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
|
48da7ceb86387d3cb6fd53f50110232813123ecc | Add tests for ansible roster virtual | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/pytests/unit/roster/test_ansible.py | tests/pytests/unit/roster/test_ansible.py | import pytest
import salt.roster.ansible as ansible
from tests.support.mock import patch
@pytest.mark.xfail
@pytest.mark.parametrize(
"which_value",
[False, None],
)
def test_virtual_returns_False_if_ansible_inventory_doesnt_exist(which_value):
with patch("salt.utils.path.which", autospec=True, return_value=which_value):
assert ansible.__virtual__() == (False, "Install `ansible` to use inventory")
| apache-2.0 | Python |
|
f3e3ab4fea1d367578adffbefd072616beaee65e | Create word_a10n.py | Kunalpod/codewars,Kunalpod/codewars | word_a10n.py | word_a10n.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Word a10n (abbreviation)
#Problem level: 6 kyu
import re
def abbreviate(s):
words = re.findall('[A-Za-z][A-Za-z][A-Za-z][A-Za-z]+', s)
for word in words:
s = s.replace(word, word[0] + str(len(word) - 2) + word[-1])
return s
| mit | Python |
|
ccb2c0c2c2fac64b88248819b00feddeaafd252c | Create evernote_test.py | py-in-the-sky/challenges,py-in-the-sky/challenges,py-in-the-sky/challenges | evernote_test.py | evernote_test.py | from evernote import *
NOTE1 = """<note>
<guid>6BA8DC47-EB38-40D9-BE32-5D5DD82E9EC7</guid>
<created>2013-07-12T19:22:13Z</created>
<tag>poetry</tag>
<tag>whitman</tag>
<content>
Gliding o'er all, through all,
Through Nature, Time, and Space,
As a ship on the waters advancing,
The voyage of the soul - not life alone,
Death, many deaths I'll sing.
And some other words for indexing.
</content>
</note>"""
NOTE2 = """<note>
<guid>450E1369-9D9D-4168-8969-2A4DCC8DDEC4</guid>
<created>2014-04-29T08:37:16Z</created>
<tag>poetry</tag>
<tag>byron</tag>
<content>
Famed for their civil and domestic quarrels
See heartless Henry lies by headless Charles;
Between them stands another sceptred thing,
It lives, it reigns - "aye, every inch a king."
Charles to his people, Henry to his wife,
In him the double tyrant starts to life:
Justice and Death have mixed their dust in vain.
The royal Vampires join and rise again.
What now can tombs avail, since these disgorge
The blood and dirt of both to mould a George!
</content>
</note>"""
def test():
note1, note2 = parse_note(NOTE1), parse_note(NOTE2)
assert note1['guid'] == '6BA8DC47-EB38-40D9-BE32-5D5DD82E9EC7'
assert 'poetry' in note2['tag']
assert 'for' in set.intersection(note1['content'], note2['content'])
trie1 = tree()
trie_delete('blah', 'blah', trie1) # just make sure no error thrown
assert get_word('none', trie1) is NULL_GUIDS
assert find_trie('none', trie1) == tree()
assert 'n' in trie1 and 'e' in find_trie('non', trie1)
trie_put('to', 'to', trie1)
trie_put('toes', 'toes', trie1)
assert 'to' in get_word('to', trie1) and 'toes' not in get_word('to', trie1)
assert 'to' in get_prefix('to', trie1) and 'toes' in get_prefix('to', trie1)
trie_delete('to', 'to', trie1)
assert 'to' not in get_prefix('to', trie1) and 'toes' in get_prefix('to', trie1)
trie_put('toes', 'toes2', trie1)
assert 'toes' in get_word('toes', trie1) and 'toes2' in get_word('toes', trie1)
trie2 = tree()
words = 'aaa aab aac aaa abb abc acc acb'.split()
pivot = 'aac'
for w in words:
trie_put(w, w, trie2)
gte = get_gte(pivot, trie2)
assert 'aac' in gte and 'abb' in gte and 'acb' in gte and 'aab' not in gte
gte2 = get_gte('aaaa', trie2)
assert 'aac' in gte2 and 'aaa' not in gte2 and 'aaaa' not in gte2
create(note1)
create(note2)
content = indexes['content']
assert '6BA8DC47-EB38-40D9-BE32-5D5DD82E9EC7' in get_word('for', content)
assert '450E1369-9D9D-4168-8969-2A4DCC8DDEC4' in get_word('for', content)
query = 'civil fo*'
queries = (parse_query(qs) for qs in query.split())
intersection = search_intersection(queries)
assert '450E1369-9D9D-4168-8969-2A4DCC8DDEC4' in intersection
assert '6BA8DC47-EB38-40D9-BE32-5D5DD82E9EC7' not in intersection
queries2 = (parse_query(qs) for qs in 'tag:poetry'.split())
intersection2 = search_intersection(queries2)
assert '450E1369-9D9D-4168-8969-2A4DCC8DDEC4' in intersection2
assert '6BA8DC47-EB38-40D9-BE32-5D5DD82E9EC7' in intersection2
queries3 = (parse_query(qs) for qs in 'tag:blah'.split())
assert len(search_intersection(queries3)) == 0
queries4 = (parse_query(qs) for qs in 'tag:Poetry'.split())
assert len(search_intersection(queries4)) == 2
return 'tests pass!'
if __name__ == '__main__':
print test()
| mit | Python |
|
8dd3207298e7d81f5d4abdfa62604d5849d132fd | Add Python hello client | lizenn/erlang-dbus,lizenn/erlang-dbus | example/hello.py | example/hello.py | #!/usr/bin/env python
import dbus
bus = dbus.SessionBus()
remote_object = bus.get_object("org.za.hem.DBus", "/Root")
dbus_interface = "org.designfu.SampleInterface"
iface = dbus.Interface(remote_object, dbus_interface)
hello_reply_list = remote_object.HelloWorld(17, "Hello from example-client.py!", dbus_interface=dbus_interface)
print (hello_reply_list)
remote_object.HelloWorld(1, {"php":"Rasmus Lerdorf",\
"perl":"Larry Wall",\
"python":"Guido van Rossum"})
hello_reply_tuple = iface.GetTuple()
print str(hello_reply_tuple)
hello_reply_dict = iface.GetDict()
print str(hello_reply_dict)
| apache-2.0 | Python |
|
5836eb513b244a21e33e111cd3c3d6f33530aeae | Add a simple widget that sets up basic layout for common controls. | 4degrees/harmony | source/harmony/ui/widget/simple.py | source/harmony/ui/widget/simple.py | # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from PySide import QtGui
from .base import Widget
class Simple(Widget):
'''Simple widget that wraps a single control.'''
def _construct(self):
'''Construct widget.'''
super(Simple, self)._construct()
self.setLayout(QtGui.QHBoxLayout())
self.layout().addWidget(self._requiredIndicator)
self.layout().addWidget(self._titleLabel)
self._prefix = QtGui.QFrame()
self._prefix.setLayout(QtGui.QHBoxLayout())
self._prefix.layout().addWidget(self._requiredIndicator)
self._prefix.layout().addWidget(self._titleLabel)
self.layout().addWidget(self._prefix, stretch=0)
self._control = self._constructControl()
self.layout().addWidget(self._control, stretch=1)
self.layout().addWidget(self._errorIndicator, stretch=0)
def _constructControl(self):
'''Return the control widget.
Subclasses should override this to return an appropriate control
widget.
'''
raise NotImplementedError()
def value(self):
'''Return current value.'''
raise NotImplementedError()
def setValue(self, value):
'''Set current *value*.'''
raise NotImplementedError()
| apache-2.0 | Python |
|
6b5850d70a1c52b617fc7daeefa57023f6ef63c7 | Add some paramaterized tests for skolemization and de-skolemization | RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib | test/test_graph/test_skolemization.py | test/test_graph/test_skolemization.py | import logging
import re
from test.testutils import GraphHelper
from typing import Pattern, Union
import pytest
from rdflib import Graph
from rdflib.namespace import Namespace
from rdflib.term import BNode, Literal, URIRef
EG = Namespace("http://example.com/")
base_triples = {
(EG.subject, EG.predicate, EG.object0),
(EG.subject, EG.predicate, EG.object1),
}
@pytest.mark.parametrize(
["node", "expected_uri"],
[
(URIRef("http://example.com"), None),
(Literal("some string in here ..."), None),
(BNode("GMeng4V7"), "http://rdlib.net/.well-known/genid/rdflib/GMeng4V7"),
(
BNode(),
re.compile("^" + re.escape("http://rdlib.net/.well-known/genid/rdflib/")),
),
],
)
def test_skolemization(
node: Union[BNode, URIRef, Literal], expected_uri: Union[Pattern[str], str, None]
) -> None:
g = Graph()
for triple in base_triples:
g.add(triple)
g.add((EG.scheck, EG.pcheck, node))
assert len(g) == 3
dsg = g.skolemize()
if expected_uri is None:
GraphHelper.assert_sets_equals(g, dsg)
else:
assert len(dsg) == len(g)
iset = GraphHelper.triple_or_quad_set(dsg)
logging.debug("iset = %s", iset)
assert iset.issuperset(base_triples)
check_triples = list(dsg.triples((EG.scheck, EG.pcheck, None)))
assert len(check_triples) == 1
sbnode = check_triples[0][2]
logging.debug("sbnode = %s, sbnode_value = %s", sbnode, f"{sbnode}")
assert isinstance(sbnode, URIRef)
if isinstance(expected_uri, str):
assert expected_uri == f"{sbnode}"
else:
assert expected_uri.match(f"{sbnode}") is not None
@pytest.mark.parametrize(
["iri", "expected_bnode_value"],
[
("http://example.com", None),
("http://example.com/not/.well-known/genid/1", None),
("http://rdlib.net/not/.well-known/genid/1", None),
("http://example.com/.well-known/genid/1", re.compile("^N")),
("http://rdlib.net/.well-known/genid/rdflib/GMeng4V7", "GMeng4V7"),
],
)
def test_deskolemization(
iri: str, expected_bnode_value: Union[str, None, Pattern[str]]
) -> None:
g = Graph()
for triple in base_triples:
g.add(triple)
g.add((EG.scheck, EG.pcheck, URIRef(iri)))
assert len(g) == 3
dsg = g.de_skolemize()
if expected_bnode_value is None:
GraphHelper.assert_sets_equals(g, dsg)
else:
assert len(dsg) == len(g)
iset = GraphHelper.triple_or_quad_set(dsg)
logging.debug("iset = %s", iset)
assert iset.issuperset(base_triples)
check_triples = list(dsg.triples((EG.scheck, EG.pcheck, None)))
assert len(check_triples) == 1
bnode = check_triples[0][2]
logging.debug("bnode = %s, bnode_value = %s", bnode, f"{bnode}")
assert isinstance(bnode, BNode)
if isinstance(expected_bnode_value, str):
assert expected_bnode_value == f"{bnode}"
else:
assert expected_bnode_value.match(f"{bnode}") is not None
| bsd-3-clause | Python |
|
a35be4a666c26ec13d61ee30639d9c3894d129e1 | add py script for profiling time | mandiant/capa,mandiant/capa | scripts/profile-time.py | scripts/profile-time.py | import sys
import timeit
import logging
import argparse
import subprocess
import tqdm
import tabulate
import capa.main
import capa.perf
import capa.rules
import capa.engine
import capa.helpers
import capa.features
import capa.features.common
import capa.features.freeze
logger = logging.getLogger("capa.profile")
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
label = subprocess.run(
"git show --pretty=oneline --abbrev-commit | head -n 1", shell=True, capture_output=True, text=True
).stdout.strip()
is_dirty = (
subprocess.run(
"git status | grep 'modified: ' | grep -v 'rules' | grep -v 'tests/data'",
shell=True,
capture_output=True,
text=True,
).stdout
!= ""
)
if is_dirty:
label += " (dirty)"
parser = argparse.ArgumentParser(description="Profile capa performance")
capa.main.install_common_args(parser, wanted={"format", "sample", "signatures", "rules"})
parser.add_argument("--number", type=int, default=3, help="batch size of profile collection")
parser.add_argument("--repeat", type=int, default=30, help="batch count of profile collection")
parser.add_argument("--label", type=str, default=label, help="description of the profile collection")
args = parser.parse_args(args=argv)
capa.main.handle_common_args(args)
try:
taste = capa.helpers.get_file_taste(args.sample)
except IOError as e:
logger.error("%s", str(e))
return -1
try:
with capa.main.timing("load rules"):
rules = capa.rules.RuleSet(capa.main.get_rules(args.rules, disable_progress=True))
except (IOError) as e:
logger.error("%s", str(e))
return -1
try:
sig_paths = capa.main.get_signatures(args.signatures)
except (IOError) as e:
logger.error("%s", str(e))
return -1
if (args.format == "freeze") or (args.format == "auto" and capa.features.freeze.is_freeze(taste)):
with open(args.sample, "rb") as f:
extractor = capa.features.freeze.load(f.read())
else:
extractor = capa.main.get_extractor(
args.sample, args.format, capa.main.BACKEND_VIV, sig_paths, should_save_workspace=False
)
with tqdm.tqdm(total=args.number * args.repeat) as pbar:
def do_iteration():
capa.perf.reset()
capa.main.find_capabilities(rules, extractor, disable_progress=True)
pbar.update(1)
samples = timeit.repeat(do_iteration, number=args.number, repeat=args.repeat)
logger.debug("perf: find capabilities: min: %0.2fs" % (min(samples) / float(args.number)))
logger.debug("perf: find capabilities: avg: %0.2fs" % (sum(samples) / float(args.repeat) / float(args.number)))
logger.debug("perf: find capabilities: max: %0.2fs" % (max(samples) / float(args.number)))
for (counter, count) in capa.perf.counters.most_common():
logger.debug("perf: counter: {:}: {:,}".format(counter, count))
print(
tabulate.tabulate(
[
(
args.label,
"{:,}".format(capa.perf.counters["evaluate.feature"]),
"%0.2fs" % (sum(samples) / float(args.repeat) / float(args.number)),
"%0.2fs" % (min(samples) / float(args.number)),
"%0.2fs" % (max(samples) / float(args.number)),
)
],
headers=["label", "count(evaluations)", "avg(time)", "min(time)", "max(time)"],
tablefmt="github",
)
)
return 0
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | Python |
|
8628ab8cbcb185826e97af9148ec7d07861e29e7 | Add setup_pfiles.py to setup parameter files for CIAO tools | liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis | scripts/setup_pfiles.py | scripts/setup_pfiles.py | # Copyright (c) 2017 Weitian LI <[email protected]>
# MIT license
#
# Weitian LI
# 2017-02-06
"""
Prepare the CIAO parameter files and setup the PFILES environment
variable to keep the pfiles locally, in order to avoid the conflicts
between multiple instance of the same CIAO tools.
"""
import os
import subprocess
import shutil
def setup_pfiles(tools):
"""
Copy the parameter files of the specified tools to the current
working directory, and setup the ``PFILES`` environment variable.
Parameters
----------
tools : list[str]
Name list of the tools to be set up
"""
for tool in tools:
pfile = subprocess.check_output([
"paccess", tool
]).decode("utf-8").strip()
subprocess.check_call(["punlearn", tool])
shutil.copy(pfile, ".")
# Setup the ``PFILES`` environment variable
os.environ["PFILES"] = "./:" + os.environ["PFILES"]
| mit | Python |
|
6c8966e0e299c12e95d41009a8dde7519946b432 | add db level constraint for one active subscription per domain | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/accounting/migrations/0006_unique_active_domain_subscription.py | corehq/apps/accounting/migrations/0006_unique_active_domain_subscription.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-22 17:18
from __future__ import unicode_literals
from django.db import migrations
from corehq.sql_db.operations import HqRunSQL
class Migration(migrations.Migration):
dependencies = [
('accounting', '0005_automatic_downgrade_adjustment_method'),
]
operations = [
HqRunSQL(
"""
CREATE UNIQUE INDEX accounting_subscription_active_subscriber
ON accounting_subscription(subscriber_id) WHERE (is_active = TRUE and is_hidden_to_ops = FALSE);
""",
reverse_sql=
"""
DROP INDEX accounting_subscription_active_subscriber;
""",
)
]
| bsd-3-clause | Python |
|
faaa206923b99f4d986a32ddfd854b234377e988 | Add ena2fasta script | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | bin/ena2fasta.py | bin/ena2fasta.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from Bio import SeqIO
@click.command()
@click.argument('filename', type=click.File('r'))
@click.argument('output', type=click.File('w'))
def main(filename, output):
"""
Convert a ENA EMBL file into a fasta file suitable for ribotyper analysis.
"""
SeqIO.convert(filename, "embl", output, "fasta")
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
c6af972ca6dfd6396b3f16e2e218263faffe16ab | Add run_mraic.py | mgharvey/misc_python | bin/run_mraic.py | bin/run_mraic.py | #!/usr/bin/env python
"""
Name: run_mraic.py
Author: Michael G. Harvey
Date: 5 July 2013
Description: Run mraic.pl (Nylanderb 2004) on a folder of alignments in phylip/phyml format.
Usage: python run_mraic.py mraic_dir in_dir out_dir
python run_mraic.py /Users/michaelharvey/Applications/mraic /Users/michaelharvey/Desktop/pic/beast/deep_UCEs/77_loci_phylip ~/Desktop/mraic_out
python run_mraic.py /Users/michaelharvey/Applications/mraic /Users/michaelharvey/Desktop/pic/beast/shallow_UCEs/Xm/orthologs/phylip ~/Desktop/mraic_UCE_shallow_out
"""
import os
import sys
import argparse
def get_args():
parser = argparse.ArgumentParser(
description="""Program description""")
parser.add_argument(
"mraic_dir",
type=str,
help="""The directory for mraic.pl"""
)
parser.add_argument(
"in_dir",
type=str,
help="""The output directory"""
)
parser.add_argument(
"out_dir",
type=str,
help="""The output directory"""
)
return parser.parse_args()
def main():
args = get_args()
outfile = open("{0}/mraic_out.txt".format(args.out_dir), 'wb')
files = list()
prefiles = os.listdir("{0}".format(args.in_dir))
for prefile in prefiles: # Remove hidden files
if not prefile.startswith('.'):
files.append(prefile)
os.chdir("{0}".format(args.mraic_dir))
for file in files:
os.system("perl mraic.pl {0}/{1}".format(args.in_dir, file))
infile = open("{0}/{1}.MrAIC.txt".format(args.in_dir, file), 'r')
for line in infile:
if line.startswith("Minimum AICc model:"):
parts = line.split()
outfile.write("{0}\t{1}\n".format(file, parts[3]))
infile.close()
outfile.flush()
outfile.close()
if __name__ == '__main__':
main() | bsd-3-clause | Python |
|
2e43441e43942b2f24a89209cfdc56652523901d | Create test_pir.py | somchaisomph/RPI.GPIO.TH | test/test_pir.py | test/test_pir.py | from ...
if __name__ == "__main__":
try :
pir = PIR()
pir.start()
while 1 :
print(pir.result())
except KeyboardInterrupt :
pir.stop()
pir.cleanup()
| mit | Python |
|
ed2548ca027b4fd062a10ddf2ce359d9115f40a4 | add a __main__.py to nuitka works | edgewood/borg,RonnyPfannschmidt/borg,raxenak/borg,RonnyPfannschmidt/borg,edgewood/borg,ionelmc/borg,edgewood/borg,ionelmc/borg,raxenak/borg,edgewood/borg,edgimar/borg,edgimar/borg,raxenak/borg,level323/borg,level323/borg,mhubig/borg,ionelmc/borg,edgimar/borg,edgimar/borg,RonnyPfannschmidt/borg,level323/borg,RonnyPfannschmidt/borg,mhubig/borg,mhubig/borg,RonnyPfannschmidt/borg,raxenak/borg | borg/__main__.py | borg/__main__.py | from borg.archiver import main
main()
| bsd-3-clause | Python |
|
8195278aa5044371c8fa7963be15169209e1b92a | Add new test to call neutron API and check results in dragonflow db. | FrankDuan/df_code,FrankDuan/df_code,openstack/dragonflow,openstack/dragonflow,FrankDuan/df_code,openstack/dragonflow | dragonflow/tests/fullstack/test_neutron_api.py | dragonflow/tests/fullstack/test_neutron_api.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dragonflow.common import common_params
from dragonflow.common import exceptions as df_exceptions
from neutron.common import config as common_config
from neutron.tests import base
from neutronclient.neutron import client
import os_client_config
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import importutils
cfg.CONF.register_opts(common_params.df_opts, 'df')
def get_cloud_config(cloud='devstack-admin'):
return os_client_config.OpenStackConfig().get_one_cloud(cloud=cloud)
def credentials(cloud='devstack-admin'):
"""Retrieves credentials to run functional tests"""
return get_cloud_config(cloud=cloud).get_auth_args()
class TestNeutronAPIandDB(base.BaseTestCase):
def setUp(self):
super(TestNeutronAPIandDB, self).setUp()
creds = credentials()
tenant_name = creds['project_name']
auth_url = creds['auth_url'] + "/v2.0"
self.neutron = client.Client('2.0', username=creds['username'],
password=creds['password'], auth_url=auth_url,
tenant_name=tenant_name)
self.neutron.format = 'json'
common_config.init(['--config-file', '/etc/neutron/neutron.conf'])
db_driver_class = importutils.import_class(cfg.CONF.df.nb_db_class)
self.db_driver = db_driver_class()
self.db_driver.initialize(db_ip=cfg.CONF.df.remote_db_ip,
db_port=cfg.CONF.df.remote_db_port)
def test_create_network(self):
test_network = 'mynetwork1'
network = {'name': test_network, 'admin_state_up': True}
network = self.neutron.create_network({'network': network})
if not network or not network['network']:
self.fail("Failed to create network using neutron API")
network_id = network['network']['id']
table = 'lswitch'
try:
value = self.db_driver.get_key(table, network_id)
except df_exceptions.DBKeyNotFound:
self.fail("Failed to create network using neutron API")
return
value2 = jsonutils.loads(value)
if 'external_ids' in value2:
if (value2['external_ids']['neutron:network_name'] ==
test_network):
self.neutron.delete_network(network_id)
return
self.fail("Failed to find newly created network in Dragonflow DB")
| apache-2.0 | Python |
|
fefe47a03337d072e47b439a940d7b6eeef56b93 | Add OpenTracing example | instana/python-sensor,instana/python-sensor | example/opentracing.py | example/opentracing.py | # encoding=utf-8
import opentracing
import instana
import time
# Loop continuously with a 2 second sleep to generate traces
while True:
entry_span = opentracing.tracer.start_span('universe')
entry_span.set_tag('http.method', 'GET')
entry_span.set_tag('http.url', '/users')
entry_span.set_tag('span.kind', 'entry')
intermediate_span = opentracing.tracer.start_span('nebula', child_of=entry_span)
intermediate_span.finish()
db_span = opentracing.tracer.start_span('black-hole', child_of=entry_span)
db_span.set_tag('db.instance', 'users')
db_span.set_tag('db.statement', 'SELECT * FROM user_table')
db_span.set_tag('db.type', 'mysql')
db_span.set_tag('db.user', 'mysql_login')
db_span.set_tag('span.kind', 'exit')
db_span.finish()
intermediate_span = opentracing.tracer.start_span('space-dust', child_of=entry_span)
intermediate_span.log_kv({'message': 'All seems ok'})
intermediate_span.finish()
entry_span.set_tag('http.status_code', 200)
entry_span.finish()
time.sleep(2)
| mit | Python |
|
f3448d0b37fdac8976a2cc8e4604a6cb2ea7a4ed | add vsmlib.utils which should have been part of commit 6238f235 | treitter/vehicle_signal_manager,GENIVI/vehicle_signal_manager | vsmlib/utils.py | vsmlib/utils.py | def parse_signal_num_file(filename):
signal_to_num = {}
vsi_version = -1
try:
with open(filename) as signal_to_num_file:
lines = signal_to_num_file.readlines()
for line in lines:
line_stripped = line.strip()
if vsi_version < 0:
try:
vsi_version = float(line_stripped)
except ValueError as err:
print("failed to parse VSI file version number from " \
"line: {}: {}".format(line, err),
file=sys.stderr)
exit(1)
else:
try:
signal, signum_str = line_stripped.split(" ")
signal = signal.strip()
signum = int(signum_str.strip())
signal_to_num[signal] = signum
except ValueError as err:
print("malformed signal number file line: line: {}: " \
"{}".format(line, err), file=sys.stderr)
exit(1)
except Exception as file_err:
print("failed to open signal number file: {}".format(file_err),
file=sys.stderr)
exit(1)
return signal_to_num, vsi_version
| mpl-2.0 | Python |
|
8bd66387ba5cd50dc0b545dc7b627792ed601faa | Add test | samedhi/gaend,samedhi/gaend,talkiq/gaend,talkiq/gaend | tests/context.py | tests/context.py | import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import gaend
| mit | Python |
|
59e546ae5afe22aab967e5376c8799e29ccbd86a | Add the basic version of my file comparison script | kirkchambe/random_scripts | directoryFileContentCmp.py | directoryFileContentCmp.py | #! /usr/env/python
import os
import hashlib
import sys
bufsize = 65536
# Path1 = '/Users/kirkchambers/Desktop'
# Path2 = '/Users/kirkchambers/DataSets'
def generate_file_digests_for(path):
path_set = set()
for item in os.walk(path):
(directory, _subdirectories, files) = item
for file in files:
if (file[0] == '.'):
continue
else:
fqFilename = os.path.join(directory, file)
path_set.add(generate_file_digest(fqFilename, file))
return path_set
def generate_file_digest(fqFilename, shortFilename):
hasher = hashlib.md5()
with open(fqFilename, 'rb') as filestream:
fileBuffer = filestream.read(bufsize)
while len(fileBuffer) > 0:
hasher.update(fileBuffer)
fileBuffer = filestream.read(bufsize)
# return "Filename:{file}\nHash:{hash}\nSize:{size}\n".format(file=fqFilename, hash=hasher.hexdigest(), size=os.path.getsize(fqFilename))
return (hasher.hexdigest(), fqFilename, os.path.getsize(fqFilename))
def usage():
print "file_list.py directory1 directory2"
print "Prints out the files present in directory1 which are NOT present in directory2"
if __name__ == "__main__":
try:
(_command, Path1, Path2) = sys.argv
except:
usage()
exit(1)
path_set_1 = generate_file_digests_for(Path1)
path_set_2 = generate_file_digests_for(Path2)
# union = path_set_1 | path_set_2
set_1_exclusives = path_set_1 - path_set_2
# set_2_exclusives = path_set_2 - path_set_1
# print "length of 1: {}".format(len(path_set_1))
# print "length of 2: {}".format(len(path_set_2))
# print "length of union: {}".format(len(union))
# print "length of set1 uniqs: {}".format(len(set_1_exclusives))
# print "length of set2 uniqs: {}".format(len(set_2_exclusives))
print "Files present in {path1} and not in {path2}:".format(path1=Path1, path2=Path2)
for item in set_1_exclusives:
print item[1] | mit | Python |
|
345758259d9ee80826758373c3970db1c28a870b | Bump development version | sephii/djangocms-blog,marty3d/djangocms-blog,dapeng0802/djangocms-blog,mistalaba/djangocms-blog,vnavascues/djangocms-blog,kriwil/djangocms-blog,DjangoBeer/djangocms-blog,EnglishConnection/djangocms-blog,britny/djangocms-blog,mistalaba/djangocms-blog,creimers/djangocms-blog,jedie/djangocms-blog,nephila/djangocms-blog,kriwil/djangocms-blog,marty3d/djangocms-blog,motleytech/djangocms-blog,creimers/djangocms-blog,skirsdeda/djangocms-blog,skirsdeda/djangocms-blog,skirsdeda/djangocms-blog,jedie/djangocms-blog,sephii/djangocms-blog,ImaginaryLandscape/djangocms-blog,nephila/djangocms-blog,DjangoBeer/djangocms-blog,DjangoBeer/djangocms-blog,EnglishConnection/djangocms-blog,ImaginaryLandscape/djangocms-blog,dapeng0802/djangocms-blog,britny/djangocms-blog,nephila/djangocms-blog,vnavascues/djangocms-blog,motleytech/djangocms-blog | djangocms_blog/__init__.py | djangocms_blog/__init__.py | __version__ = '0.3.a3'
| __version__ = '0.3.a2'
| bsd-3-clause | Python |
4af2a6a62e4be78bd20550c3ae5089c51b4fec62 | add separate function for pagination | samitnuk/studentsdb,samitnuk/studentsdb,samitnuk/studentsdb | students/utils.py | students/utils.py | from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
def paginate(objects, size, request, context, var_name='object_list'):
"""Paginate objects provided by view.
This function takes:
* list of elements;
* number of objects per page;
* request object to get url parameters from;
* context to set new variables into;
* var_name - variable name for list of objects.
It returns updated context object.
"""
# apply pagination
paginator = Paginator(objects, size)
# try to get page number from request
page = request.GET.get('page', '1')
try:
object_list = paginator.page(page)
except PageNotAnInteger:
# if page is not an integer, deliver first page
object_list = paginator.page(1)
except EmptyPage:
# if page is out of range (e.g. 9999),
# deliver last page of results
object_list = paginator.page(paginator.num_pages)
# set variables into context
context[var_name] = object_list
context['is_paginated'] = object_list.has_other_pages()
context['page_obj'] = object_list
context['paginator'] = paginator
return context
| mit | Python |
|
0ee1150e1f8f0c8cae7b906c4d349b8867bbe0b4 | Add dmrg example | sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf | examples/dmrg/30-dmrg_casscf_nevpt2_for_Cr2.py | examples/dmrg/30-dmrg_casscf_nevpt2_for_Cr2.py | #!/usr/bin/env python
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf.mrpt.nevpt2 import sc_nevpt
from pyscf.dmrgscf.dmrgci import DMRGSCF
#
# This calculation requires about 10 GB memory per processor.
#
b = 1.5
mol = gto.Mole()
mol.verbose = 5
mol.output = 'cr2-%3.2f.out' % b
mol.max_memory = 70000
mol.atom = [
['Cr',( 0.000000, 0.000000, -b/2)],
['Cr',( 0.000000, 0.000000, b/2)],
]
mol.basis = {'Cr': 'ccpvdz-dk'}
mol.symmetry = True
mol.build()
m = scf.sfx2c1e(scf.RHF(mol))
m.conv_tol = 1e-9
m.chkfile = 'hf_chk-%s'%b
m.level_shift = 0.5
m.kernel()
dm = m.make_rdm1()
m.level_shift = 0
m.scf(dm)
mc = DMRGSCF(m, 20, 28) # 20o, 28e
mc.fcisolver.maxM = 1000
mc.fcisolver.tol = 1e-6
mc.chkfile = 'mc_chk_18o-%s'%b
cas_occ = {'A1g':4, 'A1u':4,
'E1ux':2, 'E1uy':2, 'E1gx':2, 'E1gy':2,
'E2ux':1, 'E2uy':1, 'E2gx':1, 'E2gy':1}
mo = mc.sort_mo_by_irrep(cas_occ)
mc.kernel(mo)
#
# DMRG-NEVPT2
#
sc_nevpt(mc)
| apache-2.0 | Python |
|
1830c24988fccd7069bb4f9d4c66940ce623425f | add execute apcupsd cgi sample | thinkAmi-sandbox/apcupsd_python-sample | execute_apcupsd_cgi.py | execute_apcupsd_cgi.py | from http.server import CGIHTTPRequestHandler, test
import os
def main():
# http://stackoverflow.com/questions/11419572/how-to-set-the-documentroot-while-using-pythons-httpserver
os.chdir(r"C:\apcupsd")
# ディレクトリ名の前の`/`を付け忘れると正常に動作しない
CGIHTTPRequestHandler.cgi_directories = ["/cgi"]
test(HandlerClass=CGIHTTPRequestHandler, port=8080)
if __name__ == "__main__":
main() | unlicense | Python |
|
c3afc6c28530c3dfc3bd57d9a1841a60bf92ba4f | Fix bug which caused page cyclers to always clear cache before load. | mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,littlstar/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Chilledheart/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,ltilve/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,M4sse/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,Just-D/chromium-1,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,chuan9/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,patrickm/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,Just-D/chromium-1,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,littlstar/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,jaruba/chromium.src,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,markYoungH/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,jaruba/chromium.src,dednal/chromium.src,dushu1203/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,anirudhSK/chromium,dednal/chromium.src,Just-D/chromium-1,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl | tools/perf/benchmarks/netsim_top25.py | tools/perf/benchmarks/netsim_top25.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
def __init__(self):
super(NetsimTop25, self).__init__()
self.test.clear_cache_before_each_run = True
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from perf_tools import page_cycler
class NetsimTop25(test.Test):
"""Measures load time of the top 25 sites under simulated cable network."""
test = page_cycler.PageCycler
test.clear_cache_before_each_run = True
page_set = 'tools/perf/page_sets/top_25.json'
options = {
'extra_wpr_args': [
'--shaping_type=proxy',
'--net=cable'
],
'pageset_repeat': '5',
}
| bsd-3-clause | Python |
00a99f80f555ae23aeefa8b8fe284f74c28f4252 | add update example | kisel/trex-core,dimagol/trex-core,kisel/trex-core,dimagol/trex-core,kisel/trex-core,dimagol/trex-core,dimagol/trex-core,kisel/trex-core,dimagol/trex-core,kisel/trex-core,dimagol/trex-core,kisel/trex-core | scripts/automation/trex_control_plane/stl/examples/stl_imix_bidir_update.py | scripts/automation/trex_control_plane/stl/examples/stl_imix_bidir_update.py | import stl_path
from trex_stl_lib.api import *
import imp
import time
import json
from pprint import pprint
import argparse
# IMIX test
# it maps the ports to sides
# then it load a predefind profile 'IMIX'
# and attach it to both sides and inject
# at a certain rate for some time
# finally it checks that all packets arrived
def imix_test (server):
# create client
c = STLClient(server = server)
passed = True
try:
# connect to server
c.connect()
# take all the ports
c.reset()
dir_0 = [0]
dir_1 = [1]
print "Mapped ports to sides {0} <--> {1}".format(dir_0, dir_1)
# load IMIX profile
profile_file = os.path.join(stl_path.STL_PROFILES_PATH, 'imix.py')
profile1 = STLProfile.load_py(profile_file, direction=0)
profile2 = STLProfile.load_py(profile_file, direction=1)
stream1 = profile1.get_streams()
stream2 = profile2.get_streams()
# add both streams to ports
c.add_streams(stream1, ports = dir_0)
c.add_streams(stream2, ports = dir_1)
# clear the stats before injecting
c.clear_stats()
c.start(ports = (dir_0 + dir_1), mult = "100kpps", total = True)
while True:
for rate in range(200,1000,10):
# choose rate and start traffic for 10 seconds on 5 mpps
#mult = "30%"
my_mult = ("%dkpps"%rate)
print "Injecting {0} <--> {1} on total rate of '{2}' ".format(dir_0, dir_1, my_mult)
c.clear_stats()
c.update(ports = (dir_0 + dir_1), mult = my_mult)
time.sleep(2);
# block until done
#c.wait_on_traffic(ports = (dir_0 + dir_1))
# read the stats after the test
stats = c.get_stats()
# use this for debug info on all the stats
pprint(stats)
# sum dir 0
dir_0_opackets = sum([stats[i]["opackets"] for i in dir_0])
dir_0_ipackets = sum([stats[i]["ipackets"] for i in dir_0])
# sum dir 1
dir_1_opackets = sum([stats[i]["opackets"] for i in dir_1])
dir_1_ipackets = sum([stats[i]["ipackets"] for i in dir_1])
lost_0 = dir_0_opackets - dir_1_ipackets
lost_1 = dir_1_opackets - dir_0_ipackets
print "\nPackets injected from {0}: {1:,}".format(dir_0, dir_0_opackets)
print "Packets injected from {0}: {1:,}".format(dir_1, dir_1_opackets)
print "\npackets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_1, lost_0)
print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_1, dir_0, lost_1)
if (lost_0 <= 0) and (lost_1 <= 0): # less or equal because we might have incoming arps etc.
passed = True
else:
passed = False
except STLError as e:
passed = False
print e
finally:
c.disconnect()
if passed:
print "\nTest has passed :-)\n"
else:
print "\nTest has failed :-(\n"
parser = argparse.ArgumentParser(description="Example for TRex Stateless, sending IMIX traffic")
parser.add_argument('-s', '--server',
dest='server',
help='Remote trex address',
default='127.0.0.1',
type = str)
args = parser.parse_args()
# run the tests
imix_test(args.server)
| apache-2.0 | Python |
|
b5c21a5eeb8894ae93290c4c78fa23e5207bc0b3 | Create Flaskapp.wsgi | DreamN/Pic-storage,DreamN/Pic-storage,DreamN/Pic-storage | Flaskapp.wsgi | Flaskapp.wsgi | #!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/FlaskApp/")
from FlaskApp import app as application
application.secret_key = 'Add your secret key'
| mit | Python |
|
3c2f3baa1a76d386d5604c0c1dc8d4f3a33b11ad | Create Helloworld.py | codyrad/test,codyrad/test,codyrad/test,codyrad/test,codyrad/test,codyrad/test | Helloworld.py | Helloworld.py | print('Hello World!')
| cc0-1.0 | Python |
|
13e4fbdf81c9012ff82081d6030191be226d716c | Add script for verifying commit message format. | pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython | tools/verifygitlog.py | tools/verifygitlog.py | #!/usr/bin/env python3
import re
import subprocess
import sys
verbosity = 0 # Show what's going on, 0 1 or 2.
suggestions = 1 # Set to 0 to not include lengthy suggestions in error messages.
def verbose(*args):
if verbosity:
print(*args)
def very_verbose(*args):
if verbosity > 1:
print(*args)
def git_log(pretty_format, *args):
# Delete pretty argument from user args so it doesn't interfere with what we do.
args = ["git", "log"] + [arg for arg in args if "--pretty" not in args]
args.append("--pretty=format:" + pretty_format)
very_verbose("git_log", *args)
# Generator yielding each output line.
for line in subprocess.Popen(args, stdout=subprocess.PIPE).stdout:
yield line.decode().rstrip("\r\n")
def verify(sha):
verbose("verify", sha)
errors = []
warnings = []
def error_text(err):
return "commit " + sha + ": " + err
def error(err):
errors.append(error_text(err))
def warning(err):
warnings.append(error_text(err))
# Author and committer email.
for line in git_log("%ae%n%ce", sha, "-n1"):
very_verbose("email", line)
if "noreply" in line:
error("Unwanted email address: " + line)
# Message body.
raw_body = list(git_log("%B", sha, "-n1"))
if not raw_body:
error("Message is empty")
return errors, warnings
# Subject line.
subject_line = raw_body[0]
very_verbose("subject_line", subject_line)
if not re.match(r"^[^!]+: [A-Z]+.+ .+\.$", subject_line):
error("Subject line should contain ': ' and end in '.': " + subject_line)
if len(subject_line) >= 73:
error("Subject line should be 72 or less characters: " + subject_line)
# Second one divides subject and body.
if len(raw_body) > 1 and raw_body[1]:
error("Second message line should be empty: " + raw_body[1])
# Message body lines.
for line in raw_body[2:]:
if len(line) >= 76:
error("Message lines should be 75 or less characters: " + line)
if not raw_body[-1].startswith("Signed-off-by: ") or "@" not in raw_body[-1]:
warning("Message should be signed-off")
return errors, warnings
def run(args):
verbose("run", *args)
has_errors = False
has_warnings = False
for sha in git_log("%h", *args):
errors, warnings = verify(sha)
has_errors |= any(errors)
has_warnings |= any(warnings)
for err in errors:
print("error:", err)
for err in warnings:
print("warning:", err)
if has_errors or has_warnings:
if suggestions:
print("See https://github.com/micropython/micropython/blob/master/CODECONVENTIONS.md")
else:
print("ok")
if has_errors:
sys.exit(1)
def show_help():
print("usage: verifygitlog.py [-v -n -h] ...")
print("-v : increase verbosity, can be speficied multiple times")
print("-n : do not print multi-line suggestions")
print("-h : print this help message and exit")
print("... : arguments passed to git log to retrieve commits to verify")
print(" see https://www.git-scm.com/docs/git-log")
print(" passing no arguments at all will verify all commits")
print("examples:")
print("verifygitlog.py -n10 # Check last 10 commits")
print("verifygitlog.py -v master..HEAD # Check commits since master")
if __name__ == "__main__":
args = sys.argv[1:]
verbosity = args.count("-v")
suggestions = args.count("-n") == 0
if "-h" in args:
show_help()
else:
args = [arg for arg in args if arg not in ["-v", "-n", "-h"]]
run(args)
| mit | Python |
|
ee614036b45e9f10f680cef56a5eaa2d86c424fb | Create cybercrimeatmtracker.py | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | plugins/feeds/public/cybercrimeatmtracker.py | plugins/feeds/public/cybercrimeatmtracker.py | import re
import logging
from dateutil import parser
from datetime import datetime, timedelta
from core.observables import Hash
from core.feed import Feed
from core.errors import ObservableValidationError
class CybercrimeAtmTracker(Feed):
default_values = {
'frequency': timedelta(hours=1),
'name': 'CybercrimeAtmTracker',
'source': 'http://atm.cybercrime-tracker.net/rss.php',
'description': 'CyberCrime ATM Tracker - Latest 40 CnC URLS',
}
def update(self):
for item in self.update_xml(
'item', ['title', 'link', 'pubDate', 'description']):
self.analyze(item)
def analyze(self, item):
observable_sample = item['title']
context_sample = {}
context_sample['description'] = 'ATM sample'
context_sample['date_added'] = parser.parse(item['pubDate'])
context_sample['source'] = self.name
family = False
if ' - ' in observable_sample:
family, observable_sample = observable_sample.split(' - ')
try:
sample = Hash.get_or_create(value=observable_sample)
sample.add_context(context_sample)
sample.add_source('feed')
sample_tags = ['atm']
if family:
sample_tags.append(family)
sample.tag(sample_tags)
except ObservableValidationError as e:
logging.error(e)
return
| apache-2.0 | Python |
|
ece838042acd75ba7edde833856ac02e4efe9977 | Create PPTconnect.py | cadavrege0733/YammerPlus | PPTconnect.py | PPTconnect.py | from TwitterAPI import TwitterAPI
import win32com.client
from MSO import *
# Open PowerPoint
Application = win32com.client.Dispatch("PowerPoint.Application")
# Add a presentation
Presentation = Application.Presentations.Add()
# Go to http://dev.twitter.com and create an app.
# The consumer key and secret will be generated for you after
consumer_key = "e1WhbINIG0betPfLmm16g"
consumer_secret = "JVU8Rhrq9QANJX8rybNhWhEKhqMrU4yqC7yvU2Gxh0"
# After the step above, you will be redirected to your app's page.
# Create an access token under the the "Your access token" section
access_token_key = "14888261-5JLox5DCiHe7iQRPdJaTb93syK9W8DqZotMy8V5OF"
access_token_secret ="Ws1dUSp5eApbtPggPtOn276t5fM1LgnHiFyVWaylbKsKP"
# Create a Twitter client
twitter = TwitterAPI(consumer_key, consumer_secret, access_token_key, access_token_secret)
##for tweet in results.get_iterator():
## print (tweet['id'], tweet['text'])
def draw_tweet(Base, item, pos):
y = 40 + (pos % 4) * 120
image = Base.Shapes.AddPicture(
# To get the larger resolution image, just remove _normal from the URL
item['user']['profile_image_url'].replace('_normal', ''),
LinkToFile=True,
SaveWithDocument=False,
Left=20, Top=y,
Width=100, Height=100)
try:
status = item['text'].encode('cp1252')
except UnicodeEncodeError:
status = item['text']
text = Base.Shapes.AddShape(1, 130, y, 460, 100)
text.Fill.ForeColor.ObjectThemeColor = 2
text.Fill.ForeColor.Brightness = +0.95
text.Line.Visible = False
text.TextFrame.TextRange.Text = status
text.TextFrame.TextRange.Font.Color.ObjectThemeColor = 3
text.TextFrame.TextRange.ParagraphFormat.Alignment = 1
user = Base.Shapes.AddShape(9, 600, y, 100, 100)
user.Fill.ForeColor.ObjectThemeColor = 4
user.Line.Visible = False
user.TextFrame.TextRange.Text = '@' + item['user']['screen_name']
Base = Presentation.Slides.Add(1, 12)
#query = {'q' : 'Top Chef', 'lang' : 'es', 'count': 100}
results = twitter.request('statuses/filter', {'track': 'blue'})
##for tweet in results.get_iterator():
## print (tweet['id'], tweet['text'])
for pos, item in enumerate(results.get_iterator()):
draw_tweet(Base, item, pos)
if pos > 20:
break
| apache-2.0 | Python |
|
65029a09af9dcafc156a5a0632a63e3cf4b6c50d | add benchmark to compare to lasagne | diogo149/treeano,diogo149/treeano,diogo149/treeano | benchmarks/lag_task_lasgne.py | benchmarks/lag_task_lasgne.py | from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import numpy as np
import theano
import theano.tensor as T
import lasagne
fX = theano.config.floatX
# ################################## config ##################################
N_TRAIN = 1000
LAG = 10
LENGTH = 50
HIDDEN_STATE_SIZE = 10
BATCH_SIZE = 64
# ############################### prepare data ###############################
def binary_toy_data(lag=1, length=20):
inputs = np.random.randint(0, 2, length).astype(fX)
outputs = np.array(lag * [0] + list(inputs), dtype=fX)[:length]
return inputs, outputs
def minibatch(lag, length, batch_size):
inputs = []
outputs = []
for _ in range(batch_size):
i, o = binary_toy_data(lag, length)
inputs.append(i)
outputs.append(o)
return np.array(inputs)[..., np.newaxis], np.array(outputs)[..., np.newaxis]
# ############################## prepare model ##############################
l = lasagne.layers.InputLayer(shape=(None, None, 1))
l = lasagne.layers.LSTMLayer(l,
num_units=HIDDEN_STATE_SIZE,
grad_clipping=1,
learn_init=True)
l = lasagne.layers.ReshapeLayer(l, shape=(-1, HIDDEN_STATE_SIZE))
l = lasagne.layers.DenseLayer(l,
num_units=1,
nonlinearity=lasagne.nonlinearities.sigmoid)
in_var = T.tensor3()
targets = T.tensor3()
outputs = lasagne.layers.get_output(l, in_var).reshape(in_var.shape)
loss = T.mean((targets - outputs) ** 2)
all_params = lasagne.layers.get_all_params(l)
updates = lasagne.updates.adam(loss, all_params)
train_fn = theano.function([in_var, targets], [loss], updates=updates)
valid_fn = theano.function([in_var], [outputs])
# ################################# training #################################
print("Starting training...")
import time
st = time.time()
for i in range(N_TRAIN):
inputs, outputs = minibatch(lag=LAG, length=LENGTH, batch_size=BATCH_SIZE)
loss = train_fn(inputs, outputs)[0]
print(loss)
print("total_time: %s" % (time.time() - st))
inputs, outputs = minibatch(lag=LAG, length=LENGTH, batch_size=BATCH_SIZE)
pred = valid_fn(inputs)[0]
pred_accuracies = (np.round(pred) == outputs).mean(axis=0)[LAG:]
print(pred_accuracies)
print(pred_accuracies.mean())
| apache-2.0 | Python |
|
33abec38e82e132a6e192d5ae0535b84d8aa47f4 | add import script for Poole | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_poole.py | polling_stations/apps/data_collection/management/commands/import_poole.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E06000029'
addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.CSV'
stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.CSV'
elections = ['parl.2017-06-08']
| bsd-3-clause | Python |
|
b6b92e278202c27b124909aa5352726799d8d162 | add stack with max python solution | Liuqian0501/elements-of-programming-interview,Liuqian0501/elements-of-programming-interview,Liuqian0501/elements-of-programming-interview | 08-stack-n-queue/8.1-stack-with-max/python/stackMax.py | 08-stack-n-queue/8.1-stack-with-max/python/stackMax.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 18 10:08:25 2017
@author: LiuQianKevin
"""
class Stack:
class cache:
def __init__(self, _max = -float('inf'), count = 0):
self.max = _max;
self.count = count;
def __init__(self):
self._element = [];
self._maxCache =[];
def push(self, x):
#update elemetn
self._element.append(x);
#update cache
#if x larger than maxchache[-1], or maxcheche empty, add
if(not self._maxCache or x > self._maxCache[-1].max):
self._maxCache.append(self.cache(x, 1));
#if x equal to maxcache[-1].max, cout += 1
elif(x == self._maxCache[-1].max):
self._maxCache[-1].count += 1;
#if x larger than maxchache[-1].max, do nothing
def pop(self):
#update element
result = self._element.pop();
#update cache
#if result < maxCache[-1].max, no update
#if result == ---------------, cout -= 1, if cout == 0, pop it
if(result == self.max()):
self._maxCache[-1].count -= 1;
if(self._maxCache[-1].count == 0):
self._maxCache.pop();
return result;
def empty(self):
return not self._element;
def max(self):
return self._maxCache[-1].max;
def main():
s = Stack()
s.push(1)
s.push(2)
assert s.max() == 2
print(s.max()) # 2
print(s.pop()) # 2
assert s.max() == 1
print(s.max()) # 1
s.push(3)
s.push(2)
assert s.max() == 3
print(s.max()) # 3
s.pop()
assert s.max() == 3
print(s.max()) # 3
s.pop()
assert s.max() == 1
print(s.max()) # 1
s.pop()
try:
s.max()
s.pop()
s.pop()
s.pop()
s.pop()
except IndexError as e:
print(e)
if __name__ == '__main__':
main() | mit | Python |
|
f78f74d836d2eca1cafe3b6401b5c8d13e6d139b | Fix type1/type2 | GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek | geotrek/tourism/migrations/0004_auto_20190328_1339.py | geotrek/tourism/migrations/0004_auto_20190328_1339.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-03-28 12:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tourism', '0003_auto_20190306_1417'),
]
operations = [
migrations.AlterField(
model_name='touristiccontent',
name='type1',
field=models.ManyToManyField(blank=True, db_table=b't_r_contenu_touristique_type1', related_name='contents1', to='tourism.TouristicContentType1', verbose_name='Type 1'),
),
migrations.AlterField(
model_name='touristiccontent',
name='type2',
field=models.ManyToManyField(blank=True, db_table=b't_r_contenu_touristique_type2', related_name='contents2', to='tourism.TouristicContentType2', verbose_name='Type 2'),
),
]
| bsd-2-clause | Python |
|
ef751116c597dfbd12eac849dd0cb3ca02eb6315 | Add SettingsFromCategoryModel that lists settings in a certain category | onitake/Uranium,onitake/Uranium | UM/Qt/Bindings/SettingsFromCategoryModel.py | UM/Qt/Bindings/SettingsFromCategoryModel.py | from PyQt5.QtCore import Qt, QCoreApplication, pyqtSlot
from UM.Qt.ListModel import ListModel
from UM.Settings.Setting import Setting
from UM.Resources import Resources
from UM.Application import Application
class SettingsFromCategoryModel(ListModel):
NameRole = Qt.UserRole + 1
TypeRole = Qt.UserRole + 2
ValueRole = Qt.UserRole + 3
ValidRole = Qt.UserRole + 4
KeyRole = Qt.UserRole + 5
OptionsRole = Qt.UserRole + 6
UnitRole = Qt.UserRole + 7
DescriptionRole = Qt.UserRole + 8
def __init__(self, category, parent = None):
super().__init__(parent)
self._category = category
self._updateSettings()
self.addRoleName(self.NameRole, "name")
self.addRoleName(self.TypeRole,"type")
self.addRoleName(self.ValueRole,"value")
self.addRoleName(self.ValidRole,"valid")
self.addRoleName(self.KeyRole,"key")
self.addRoleName(self.OptionsRole,"options")
self.addRoleName(self.UnitRole,"unit")
self.addRoleName(self.DescriptionRole, "description")
## Triggred by setting if it has a conditional activation
#def handleActiveChanged(self, key):
#temp_setting = self._machine_settings.getSettingByKey(key)
#if temp_setting is not None:
#index = self._find(self.items,"key",temp_setting.getKey())
#if index != -1:
#self.setProperty(index, 'disabled', (temp_setting.checkAllChildrenVisible() or not temp_setting.isActive()))
#self.setProperty(index, 'visibility', (temp_setting.isVisible() and temp_setting.isActive()))
#for child_setting in temp_setting.getAllChildren():
#index = self._find(self.items,"key",child_setting.getKey())
#if index != -1:
#self.setProperty(index, 'disabled', (child_setting.checkAllChildrenVisible() or not child_setting.isActive()))
#self.setProperty(index, 'visibility', (child_setting.isVisible() and child_setting.isActive()))
@pyqtSlot(int, str, str)
## Notification that setting has changed.
def setSettingValue(self, index, key, value):
if self._category.getSettingByKey(key) is not None:
self._category.getSettingByKey(key).setValue(value)
self.setProperty(index,'valid', self.isValid(key))
@pyqtSlot(str,result=int)
## Check if the entered value of the setting is valid (warning/error)
# \returns error key.
def isValid(self,key):
if self._category.getSettingByKey(key) is not None:
return self._category.getSettingByKey(key).validate()
return 5
## Create model for combo box (used by enum type setting)
# \param options List of strings
# \return ListModel with "text":value pairs
def createOptionsModel(self, options):
model = ListModel()
model.addRoleName(self.NameRole,"text")
for option in options:
model.appendItem({"text":str(option)})
return model
@pyqtSlot(str,bool)
## Set the visibility of a setting.
# Note that this might or might not effect the disabled property aswel!
# \param key Key of the setting that is affected
# \param visibility Visibility of the setting.
def setVisibility(self, key, visibility):
setting = self._machine_settings.getSettingByKey(key)
if setting is not None:
setting.setVisible(visibility)
for index in range(0,len(self.items)):
temp_setting = self._machine_settings.getSettingByKey(self.items[index]["key"])
if temp_setting is not None:
self.setProperty(index, 'disabled', temp_setting.checkAllChildrenVisible())
self.setProperty(index, 'visibility', temp_setting.isVisible())
self.setProperty(index, 'value', temp_setting.getValue())
# Convenience function that finds the index in a list of dicts based on key value pair
def _find(self,lst, key, value):
for i, dic in enumerate(lst):
if dic[key] == value:
return i
return -1
def _updateSettings(self):
for setting in self._category.getAllSettings():
if setting.isVisible() and setting.isActive():
self.appendItem({
"name": setting.getLabel(),
"description": setting.getDescription(),
"type": setting.getType(),
"value": setting.getValue(),
"valid": setting.validate(),
"key": setting.getKey(),
"options": self.createOptionsModel(setting.getOptions()),
"unit": setting.getUnit()
})
#setting.visibleChanged.connect(self._onSettingVisibleChanged)
#setting.activeChanged.connect(self.handleActiveChanged)
| agpl-3.0 | Python |
|
f29a0845bc0983e18ce6484543b206dfb3091818 | Add easier way to import cv2 | OSURoboticsClub/underwater,OSURoboticsClub/underwater,OSURoboticsClub/underwater,OSURoboticsClub/underwater | vision/opencv.py | vision/opencv.py | import sys
sys.path.append('lib/opencv/build/lib')
import cv2
| mit | Python |
|
2848955e59b5106ffe48c4ebfa05095a6be460e5 | Add visual script | Bob-King/WifiTrafficAnalyzer,Bob-King/WifiTrafficAnalyzer,Bob-King/WifiTrafficAnalyzer | visual/visual.py | visual/visual.py | #!/usr/bin/env python3
import re
import numpy as np
import matplotlib.pyplot as plt
import unittest
class Parser:
'''Wta log parser'''
def __init__(self):
'''Open log file'''
self._pattern = re.compile(
r'^ra=((?:[0-9a-fA-F]{2}:){5}(?:[0-9a-fA-F]{2}))[ \t]+' +
r'ta=((?:[0-9a-fA-F]{2}:){5}(?:[0-9a-fA-F]{2}))[ \t]+' +
r'tsf=([0-9]+)[ \t]+' +
r'seq=([0-9]+)[ \t]+' +
r'rssi=(-[0-9]+)$')
def _match(self, line, ra, ta):
match = self._pattern.match(line)
if not match:
return None
if ra == match.group(1) and ta == match.group(2):
return (match.group(1), match.group(2), int(match.group(3)),
int(match.group(5)))
def getRecords(self, path, ra, ta):
f = open(path)
records = []
for line in self.f.lines():
r = _match(line, ra, ta)
if r:
records.append(r)
return records
class ParserTest(unittest.TestCase):
'''Parser's unit test class'''
def test_match(self):
line = "ra=00:4b:69:6e:73:30 ta=c8:93:46:a3:8e:74 tsf=1473507516 seq=28769 rssi=-60"
ra = "00:4b:69:6e:73:30"
ta = "c8:93:46:a3:8e:74"
tsf = 1473507516
rssi = -60
p = Parser()
r = p._match(line, ra, ta)
self.assertTrue(r is not None)
self.assertEqual(r[0], ra)
self.assertEqual(r[1], ta)
self.assertEqual(r[2], tsf)
self.assertEqual(r[3], rssi)
def main():
pass
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
|
b5cc83a705eaa22872d304b92c7b6e57b5581604 | Add unit-test for "readbytes_multiple" | exhuma/puresnmp,exhuma/puresnmp | puresnmp/test/test/test_helpers.py | puresnmp/test/test/test_helpers.py | '''
Tests for unit-test helpers
'''
from textwrap import dedent
from binascii import hexlify
import puresnmp.test as th
from io import StringIO
def test_readbytes_multiple():
data = StringIO(dedent(
'''\
#
# This is a comment
#
30 2d 02 01 01 04 07 70 72 69 76 61 74 65 a2 1f 0-.....private..
----
30 2d 02 01 01 04 07 70 72 69 76 61 74 65 a2 1f 0-.....private..
----
30 2e 02 01 01 04 07 70 72 69 76 61 74 65 a2 20 0......private.
'''
))
expected = [
b'\x30\x2d\x02\x01\x01\x04\x07\x70\x72\x69\x76\x61\x74\x65\xa2\x1f',
b'\x30\x2d\x02\x01\x01\x04\x07\x70\x72\x69\x76\x61\x74\x65\xa2\x1f',
b'\x30\x2e\x02\x01\x01\x04\x07\x70\x72\x69\x76\x61\x74\x65\xa2\x20',
]
result = list(th.readbytes_multiple(data))
assert result == expected
| mit | Python |
|
f434e45b58bfa7001d21d1920a65903f941df833 | Add __main__.py so that the package can be executed by `python -m jiebarpc` [ciskip] | messense/jieba-rpc | jiebarpc/__main__.py | jiebarpc/__main__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import sys
import argparse
from jiebarpc import JiebaRPCServer, JiebaRPCDispatcher
def main(host, port, processnum=1):
server = JiebaRPCServer(JiebaRPCDispatcher(processnum))
server.listen(host, port)
server.start()
return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser(
'python -m jiebarpc',
description='Run jiebarpc server'
)
parser.add_argument('-n', '--processnum', type=int, default=1,
help='How many processes to use.')
parser.add_argument('address',
help='Server listen address like localhost:8888',)
ns = parser.parse_args()
address = ns.address.split(':')
host = address[0]
port = int(address[1])
sys.exit(main(host, port, ns.processnum))
| mit | Python |
|
5d297710416ebaea3a79e1ded0604d53178c493a | add python solution for Project Euler problem 1 | bruckhaus/challenges,bruckhaus/challenges,bruckhaus/challenges,bruckhaus/challenges | python_challenges/project_euler/problem_1.py | python_challenges/project_euler/problem_1.py | __author__ = 'tilmannbruckhaus'
def divisible_by_3_or_5(i):
divisible = i % 3 == 0 or i % 5 == 0
# print("natural number:", i, "is divisible:", divisible)
return divisible
def sum_of_multiples_of_3_or_5(limit):
# If we list all the natural numbers below 10 that are multiples of 3 or 5,
# we get 3, 5, 6 and 9. The sum of these multiples is 23.
# Find the sum of all the multiples of 3 or 5 below 1000.
multi_sum = 0
for limit in range(limit):
if divisible_by_3_or_5(limit):
multi_sum += limit
return multi_sum
for test_limit in [10, 1000]:
print sum_of_multiples_of_3_or_5(test_limit)
| mit | Python |
|
1db14473edff479f97703fb68cb1aa8d65c25023 | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/exp/benchmark/python/benchmark.py | lib/node_modules/@stdlib/math/base/special/exp/benchmark/python/benchmark.py | #!/usr/bin/env python
"""Benchmark exp."""
import timeit
name = "exp"
repeats = 3
iterations = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import exp; from random import random;"
stmt = "y = exp(100.0*random() - 50.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(3):
print("# python::" + name)
elapsed = t.timeit(number=iterations)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(repeats, repeats)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
d1f4e257b449b6993e0cdc87055113018b6efabb | Create promoter_bin.py | ShaopengLiu1/Atac-seq_Quality_Control_pipe,ShaopengLiu1/Atac-seq_Quality_Control_pipe,ShaopengLiu1/Atac-seq_Quality_Control_pipe | code_collection/promoter_bin.py | code_collection/promoter_bin.py | import sys
peak=[]
with open(sys.argv[1],'r') as f:
for line in f:
line=line.strip('\n').split('\t')
peak.append(int(line[3]))
f.close()
num=int(len(peak)/100.0)
bin=[]
for i in range(99):
bin.append(str(i+1)+'\t'+str(sum(peak[num*i:num*(i+1)])/(num*1.0))+'\n')
bin.append('100'+'\t'+str(sum(peak[num*99:])/(num*1.0))+'\n')
with open('bin.txt','w') as f:
f.writelines(bin)
f.close
| mit | Python |
|
93a3b7d61877e9350ea2b32ade918755fc874bb8 | Create run_test.py | ceholden/staged-recipes,basnijholt/staged-recipes,patricksnape/staged-recipes,chrisburr/staged-recipes,sannykr/staged-recipes,ReimarBauer/staged-recipes,hadim/staged-recipes,stuertz/staged-recipes,pmlandwehr/staged-recipes,rvalieris/staged-recipes,jakirkham/staged-recipes,jakirkham/staged-recipes,isuruf/staged-recipes,rvalieris/staged-recipes,goanpeca/staged-recipes,petrushy/staged-recipes,dschreij/staged-recipes,NOAA-ORR-ERD/staged-recipes,synapticarbors/staged-recipes,kwilcox/staged-recipes,rmcgibbo/staged-recipes,sodre/staged-recipes,birdsarah/staged-recipes,igortg/staged-recipes,chohner/staged-recipes,scopatz/staged-recipes,johanneskoester/staged-recipes,larray-project/staged-recipes,chohner/staged-recipes,Cashalow/staged-recipes,shadowwalkersb/staged-recipes,mariusvniekerk/staged-recipes,patricksnape/staged-recipes,kwilcox/staged-recipes,birdsarah/staged-recipes,dschreij/staged-recipes,guillochon/staged-recipes,sodre/staged-recipes,shadowwalkersb/staged-recipes,conda-forge/staged-recipes,SylvainCorlay/staged-recipes,mariusvniekerk/staged-recipes,barkls/staged-recipes,petrushy/staged-recipes,chrisburr/staged-recipes,hadim/staged-recipes,igortg/staged-recipes,glemaitre/staged-recipes,johanneskoester/staged-recipes,Juanlu001/staged-recipes,grlee77/staged-recipes,glemaitre/staged-recipes,basnijholt/staged-recipes,sannykr/staged-recipes,larray-project/staged-recipes,pmlandwehr/staged-recipes,mcs07/staged-recipes,scopatz/staged-recipes,sodre/staged-recipes,jochym/staged-recipes,jjhelmus/staged-recipes,Juanlu001/staged-recipes,stuertz/staged-recipes,ReimarBauer/staged-recipes,Cashalow/staged-recipes,conda-forge/staged-recipes,grlee77/staged-recipes,mcs07/staged-recipes,barkls/staged-recipes,asmeurer/staged-recipes,ceholden/staged-recipes,NOAA-ORR-ERD/staged-recipes,guillochon/staged-recipes,synapticarbors/staged-recipes,jochym/staged-recipes,ocefpaf/staged-recipes,jjhelmus/staged-recipes,isuruf/staged-recipes,ocefpaf/staged-recipes,asmeurer/staged-recipes,cpaulik/staged-recipes,goanpeca/staged-recipes,cpaulik/staged-recipes,SylvainCorlay/staged-recipes,rmcgibbo/staged-recipes | recipes/django-environ/run_test.py | recipes/django-environ/run_test.py | import django
from django.conf import settings
settings.configure(INSTALLED_APPS=['environ', 'django.contrib.contenttypes', 'django.contrib.auth'])
django.setup()
import environ
| bsd-3-clause | Python |
|
19186f44b1ed4c4b60ffc1ef796fa0894b25da68 | Add garage.partdefs.sockets | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | py/garage/garage/partdefs/sockets.py | py/garage/garage/partdefs/sockets.py | from garage import parameters
from garage import parts
from garage import sockets
PARTS = parts.Parts(sockets.__name__)
PARTS.patch_getaddrinfo = parts.AUTO
PARAMS = parameters.define_namespace(sockets.__name__, 'socket utils')
PARAMS.patch_getaddrinfo = parameters.create(
False, 'enable patching getaddrinfo for caching query results')
@parts.define_maker
def make() -> PARTS.patch_getaddrinfo:
if PARAMS.patch_getaddrinfo.get():
sockets.patch_getaddrinfo()
| mit | Python |
|
a25e2c0e5d8466e449641fb4c1c74a6e9872da75 | add connection module for pyRpc | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | python/proto/pyRpc/tcp_connection.py | python/proto/pyRpc/tcp_connection.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import asyncore
import socket
import const
from pyRpc.logger import LoggerMgr
class TcpConnection(asyncore.dispatcher):
ST_INIT = 0
ST_ESTABLISHED = 1
ST_DISCONNECTED = 2
def __init__(self, fd, peername):
super(TcpConnection, self).__init__(fd)
self.logger = LoggerMgr.getLogger('pyRpc.TcpConnection')
self.peername = peername
self.writebuf = ''
self.readbuf_len = const.RPC_READBUF_LEN
self.status = TcpConnection.ST_INIT
if fd:
self.status = TcpConnection.ST_DISCONNECTED
self.set_socket_option()
self.rpc_channel = None
def set_socket_option(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
def set_rpc_channel(self, rpc_channel):
self.rpc_channel = rpc_channel
def get_rpc_channel(self):
return self.rpc_channel
def is_established(self):
return self.status == TcpConnection.ST_ESTABLISHED
def set_readbuf_len(self, readbuf_len):
self.readbuf_len = readbuf_len
def get_peername(self):
return self.peername
def writable(self):
return len(self.writebuf) > 0
def write_data(self, data):
self.writebuf += data
def disconnect(self):
if self.status == TcpConnection.ST_DISCONNECTED:
return
if self.rpc_channel:
self.rpc_channel.on_disconnected()
self.rpc_channel = None
if self.socket:
super(TcpConnection, self).close()
self.status = TcpConnection.ST_DISCONNECTED
def handle_read(self):
self.logger.debug('TcpConnection.handle_read')
data = self.recv(self.readbuf_len)
if data:
if not self.rpc_channel:
return
self.rpc_channel.on_read(data)
def handle_write(self):
self.logger.debug('TcpConnection.handle_write')
if self.writebuf:
size = self.send(self.writebuf)
self.writebuf = self.writebuf[size:]
def handle_close(self):
self.logger.debug('TcpConnection.handle_close')
super(TcpConnection, self).handle_close()
self.disconnect()
def handle_error(self):
self.logger.debug('TcpConnection.handle_error')
super(TcpConnection, self).handle_error()
self.disconnect()
def handle_expt(self):
self.logger.debug('TcpConnection.handle_expt')
super(TcpConnection, self).handle_expt()
self.disconnect()
| bsd-2-clause | Python |
|
b7d23a337ad121a032a8aa2c395c3705bad12b28 | add migration to grandfather in all existing plans to have Case Sharing via Groups and Child Cases privileges | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/accounting/migrations/0043_grandfather_case_privs.py | corehq/apps/accounting/migrations/0043_grandfather_case_privs.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2019-07-23 16:43
from __future__ import unicode_literals
from __future__ import absolute_import
from django.core.management import call_command
from django.db import migrations
from corehq.apps.hqadmin.management.commands.cchq_prbac_bootstrap import (
cchq_prbac_bootstrap,
)
from corehq.privileges import (
CASE_SHARING_GROUPS,
CHILD_CASES,
)
def _grandfather_case_privs(apps, schema_editor):
call_command(
'cchq_prbac_grandfather_privs',
CASE_SHARING_GROUPS,
CHILD_CASES,
noinput=True,
)
class Migration(migrations.Migration):
dependencies = [
('accounting', '0042_domain_user_history__unique__and__nonnullable'),
]
operations = [
migrations.RunPython(cchq_prbac_bootstrap),
migrations.RunPython(_grandfather_case_privs),
]
| bsd-3-clause | Python |
|
304826205804e3972968b16fbf9bb9021eaf9acd | add FieldOfStudyHierarchy class | citationfinder/scholarly_citation_finder | scholarly_citation_finder/apps/core/migrations/0015_fieldofstudyhierarchy.py | scholarly_citation_finder/apps/core/migrations/0015_fieldofstudyhierarchy.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-11 13:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0014_publicationreference_source'),
]
operations = [
migrations.CreateModel(
name='FieldOfStudyHierarchy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('child_level', models.SmallIntegerField()),
('parent_level', models.SmallIntegerField()),
('confidence', models.FloatField()),
('child', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fieldofstudyhierarchy_child', to='core.FieldOfStudy')),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fieldofstudyhierarchy_parent', to='core.FieldOfStudy')),
],
),
]
| mit | Python |
|
ce4bcc19e61518273e054553494288364ab4f677 | Add lc085_maximal_rectangle.py | bowen0701/algorithms_data_structures | lc085_maximal_rectangle.py | lc085_maximal_rectangle.py | """Leetcode 85. Maximal Rectangle
Hard
URL: https://leetcode.com/problems/maximal-rectangle/
Given a 2D binary matrix filled with 0's and 1's, find the largest rectangle
containing only 1's and return its area.
Example:
Input:
[
["1","0","1","0","0"],
["1","0","1","1","1"],
["1","1","1","1","1"],
["1","0","0","1","0"]
]
Output: 6
"""
class Solution(object):
def maximalRectangle(self, matrix):
"""
:type matrix: List[List[str]]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
5d8af7dec1806e7f897a89d1a54ff5f2dc5bfec0 | Add 'merge-json.py' script to make the final annotations file. | mtholder/propinquity,OpenTreeOfLife/propinquity,mtholder/propinquity,OpenTreeOfLife/propinquity,OpenTreeOfLife/propinquity,mtholder/propinquity | bin/merge-json.py | bin/merge-json.py | #!/usr/bin/env python
from collections import Mapping
import json
import sys
filename1 = sys.argv[1]
filename2 = sys.argv[2]
json_data1=open(filename1).read()
dictA = json.loads(json_data1)
json_data2=open(filename2).read()
dictB = json.loads(json_data2)
merged_dict = {key: value for (key, value) in (dictA.items() + dictB.items())}
# string dump of the merged dict
print json.dumps(merged_dict)
| bsd-2-clause | Python |
|
7f319b9f84e441cbe893fd2cc68ecd77cfcfd987 | create perl-file-which package (#6800) | LLNL/spack,krafczyk/spack,EmreAtes/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,EmreAtes/spack,tmerrick1/spack,LLNL/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack | var/spack/repos/builtin/packages/perl-file-which/package.py | var/spack/repos/builtin/packages/perl-file-which/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlFileWhich(PerlPackage):
"""Perl implementation of the which utility as an API"""
homepage = "http://cpansearch.perl.org/src/PLICEASE/File-Which-1.22/lib/File/Which.pm"
url = "http://search.cpan.org/CPAN/authors/id/P/PL/PLICEASE/File-Which-1.22.tar.gz"
version('1.22', 'face60fafd220dc83fa581ef6f96d480')
| lgpl-2.1 | Python |
|
a2b4389db17759086c4cd804b6cbfb1b658d547e | Create equal_sides_of_an_array.py | Kunalpod/codewars,Kunalpod/codewars | equal_sides_of_an_array.py | equal_sides_of_an_array.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Equal Sides Of An Array
#Problem level: 6 kyu
def find_even_index(arr):
if not sum(arr[1:]): return 0
if not sum(arr[:len(arr)-1]): return len(arr)-1
for i in range(1, len(arr)-1):
if sum(arr[:i])==sum(arr[i+1:]):
return i
return -1
| mit | Python |
|
d2978eae5b502cc5bc4b020044b88f02522f90cd | Add jobs.utils module | virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool | virtool/jobs/utils.py | virtool/jobs/utils.py | def is_running_or_waiting(document):
latest_state = document["status"][-1]["state"]
return latest_state != "waiting" and latest_state != "running"
| mit | Python |
|
3bae93629c81cc33e565912e4b9bafeff536ec22 | Create hostgroup_info.py | linearregression/platform-python-lsf-api,PlatformLSF/platform-python-lsf-api,xlyang0211/platform-python-lsf-api | examples/hostgroup_info.py | examples/hostgroup_info.py | def queryHostGroupInfo():
"""
"query host group info"
"""
if lsf.lsb_init("queryHostGroupInfo") > 0:
return -1;
strArr = lsf.new_stringArray(2);
lsf.stringArray_setitem(strArr, 0, "hg1");
lsf.stringArray_setitem(strArr, 1, "hg2");
for hgroupInfo in lsf.get_hostgroup_info_by_name(strArr,2):
if hgroupInfo != None:
print 'hgroup name = %s' % hgroupInfo.group;
print 'hgroup list = %s' % hgroupInfo.memberList;
else:
print 'hgroupInfo is null'
return -1;
return 0;
if __name__ == '__main__':
queryHostGroupInfo();
| epl-1.0 | Python |
|
986b20363cc84be1822588dd7cc935fca7ef7f48 | add test for get_genofile_samplelist in marker_regression/run_mapping.py | pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2 | wqflask/tests/wqflask/marker_regression/test_run_mapping.py | wqflask/tests/wqflask/marker_regression/test_run_mapping.py | import unittest
from unittest import mock
from wqflask.marker_regression.run_mapping import get_genofile_samplelist
class AttributeSetter:
def __init__(self,obj):
for k,v in obj.items():
setattr(self,k,v)
class MockDataSetGroup(AttributeSetter):
def get_genofiles(self):
return [{"location":"~/genofiles/g1_file","sample_list":["S1","S2","S3","S4"]}]
class TestRunMapping(unittest.TestCase):
def setUp(self):
self.group=MockDataSetGroup({"genofile":"~/genofiles/g1_file"})
self.dataset=AttributeSetter({"group":self.group})
def tearDown(self):
self.dataset=AttributeSetter({"group":{"location":"~/genofiles/g1_file"}})
def test_get_genofile_samplelist(self):
#location true and sample list true
results_1=get_genofile_samplelist(self.dataset)
self.assertEqual(results_1,["S1","S2","S3","S4"])
#return empty array
self.group.genofile="~/genofiles/g2_file"
result_2=get_genofile_samplelist(self.dataset)
self.assertEqual(result_2,[])
| agpl-3.0 | Python |
|
fe145fd87db777d9eeb361688d502b1b3ec4b2e1 | Add a new Model-View-Projection matrix tool. | microy/MeshToolkit,microy/PyMeshToolkit,microy/MeshToolkit,microy/PyMeshToolkit | Transformation.py | Transformation.py | # -*- coding:utf-8 -*-
# ***************************************************************************
# Transformation.py
# -------------------
# update : 2013-11-13
# copyright : (C) 2013 by Michaël Roy
# email : [email protected]
# ***************************************************************************
# ***************************************************************************
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU General Public License as published by *
# * the Free Software Foundation; either version 2 of the License, or *
# * (at your option) any later version. *
# * *
# ***************************************************************************
#
# External dependencies
#
from numpy import *
| mit | Python |
|
c2089b3ed549d89942f57075d0b6d573d980bc30 | make app load in worker in uwsgi.ini, pass db configuration dynamically to docker image as env variable | jacohend/flask-restless-security,jacohend/flask-restless-security | app/config.py | app/config.py | from datetime import timedelta
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://{}:{}@{}/{}'
APP_NAME = '{} Server'
SECRET_KEY = '{}'
JWT_EXPIRATION_DELTA = timedelta(days=30)
JWT_AUTH_URL_RULE = '/api/v1/auth'
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_TRACKABLE = True
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = '{}'
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProductionConfig(Config):
APP_NAME = '{} Production Server'
DEBUG = False
class DevelopmentConfig(Config):
DEBUG = True
MAIL_SUPPRESS_SEND = False
class TestingConfig(Config):
TESTING = True
| mit | Python |
|
2f0700093141643bd66e99d271f9e74087e148e6 | Add Message model migration file. | qubs/climate-data-api,qubs/climate-data-api,qubs/data-centre,qubs/data-centre | core/migrations/0002_message.py | core/migrations/0002_message.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-05 19:19
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('goes_id', models.CharField(max_length=8)),
('goes_channel', models.PositiveSmallIntegerField()),
('goes_spacecraft', models.CharField(choices=[('E', 'East'), ('W', 'West')], default='E', max_length=1)),
('arrival_time', models.DateTimeField()),
('failure_code', models.CharField(max_length=1)),
('signal_strength', models.PositiveSmallIntegerField()),
('frequency_offset', models.CharField(max_length=2)),
('modulation_index', models.CharField(choices=[('N', 'Normal (60 degrees +/- 5)'), ('L', 'Low (50 degrees)'), ('H', 'High (70 degrees)')], default='N', max_length=1)),
('data_quality', models.CharField(choices=[('N', 'Normal (error rate < 10^-6)'), ('F', 'Fair (10^-6 < error rate < 10^-4)'), ('P', 'Poor (error rate > 10^-4)')], default='N', max_length=1)),
('data_source', models.CharField(choices=[('LE', 'Cincinnati East; USACE LRD Cincinnati'), ('d1', 'NIFC West Boise ID - Unit 1; NIFC Boise'), ('d2', 'NIFC West Boise ID - Unit 2; NIFC Boise'), ('OW', 'Omaha West; USACE NWO'), ('RE', 'Rock Island East; USACE MVR'), ('RW', 'Rock Island West; USACE MVR'), ('SF', 'West Palm Beach East; SFWMD'), ('UB', 'Ucom Backup @ WCDA; NOAA Wallops CDA'), ('UP', 'Ucom Primary @ WCDA; NOAA Wallops CDA'), ('XE', 'Sioux Falls, East; USGS EROS'), ('XW', 'Sioux Falls, West; USGS EROS'), ('XL', 'Sioux Falls, LRIT; USGS EROS'), ('RL', 'Reston, LRIT; Reston, Virginia')], max_length=2)),
('recorded_message_length', models.PositiveSmallIntegerField()),
('values', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)),
('message_text', models.TextField()),
('station', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.Station')),
],
),
]
| apache-2.0 | Python |
|
68dbfedf90fb9e6c922971deaeccad148a258a70 | Add tests for PyEcore extension (EClass/EModelElement tests) | aranega/pyecore,pyecore/pyecore | tests/test_dynamic_ecore_extension.py | tests/test_dynamic_ecore_extension.py | import pytest
from pyecore.ecore import *
import pyecore.ecore as ecore
from ordered_set import OrderedSet
def test__EModelElement_extension():
A = EClass('A', superclass=(EModelElement.eClass))
a = A()
assert a.eAnnotations == OrderedSet()
annotation = EAnnotation(source='testAnnot')
annotation.details['test'] = 'value'
a.eAnnotations.append(annotation)
assert len(a.eAnnotations) == 1
assert a.getEAnnotation('testAnnot') is annotation
assert a.getEAnnotation('testAnnot').details['test'] == 'value'
def test__EClass_extension():
SuperEClass = EClass('SuperEClass', superclass=(EClass.eClass,))
A = SuperEClass(name='A')
assert isinstance(A, EClass)
a = A()
assert isinstance(a, EObject)
assert a.eClass is A
def test__EClass_modification():
EClass.new_feature = EAttribute('new_feature', EInt)
A = EClass('A')
assert A.new_feature == 0
A.new_feature = 5
assert A.new_feature == 5
with pytest.raises(BadValueError):
A.new_feature = 'a'
| bsd-3-clause | Python |
|
40431228c8535f325b005bb52485cae87a8be714 | Add test module for napalm_acl | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/unit/modules/test_napalm_acl.py | tests/unit/modules/test_napalm_acl.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Anthony Shaw <[email protected]>`
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
import tests.support.napalm as napalm_test_support
import salt.modules.napalm_acl as napalm_acl # NOQA
@skipIf(NO_MOCK, NO_MOCK_REASON)
class NapalmAclModuleTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
module_globals = {
'__salt__': {
'config.option': MagicMock(return_value={
'test': {
'driver': 'test',
'key': '2orgk34kgk34g'
}
}),
'file.file_exists': napalm_test_support.true,
'file.join': napalm_test_support.join,
'file.get_managed': napalm_test_support.get_managed_file,
'random.hash': napalm_test_support.random_hash
}
}
return {napalm_acl: module_globals}
def test_load_term_config(self):
ret = napalm_acl.load_term_config("test_filter", "test_term")
assert ret['out'] is napalm_test_support.TEST_TERM_CONFIG
| apache-2.0 | Python |
|
f987b39bb43301c735f30169010832665953efe6 | Add a sample permission plugin for illustrating the check on realm resources, related to #6211. | pkdevbox/trac,pkdevbox/trac,pkdevbox/trac,pkdevbox/trac | sample-plugins/public_wiki_policy.py | sample-plugins/public_wiki_policy.py | from fnmatch import fnmatchcase
from trac.config import Option
from trac.core import *
from trac.perm import IPermissionPolicy
class PublicWikiPolicy(Component):
"""Sample permission policy plugin illustrating how to check
permission on realms.
Don't forget to integrate that plugin in the appropriate place in the
list of permission policies:
{{{
[trac]
permission_policies = PublicWikiPolicy, DefaultPermissionPolicy
}}}
Then you can configure which pages you want to make public:
{{{
[public_wiki]
view = Public*
modify = PublicSandbox/*
}}}
"""
implements(IPermissionPolicy)
view = Option('public_wiki', 'view', 'Public*',
"""Case-sensitive glob pattern used for granting view permission on
all Wiki pages matching it.""")
modify = Option('public_wiki', 'modify', 'Public*',
"""Case-sensitive glob pattern used for granting modify permissions
on all Wiki pages matching it.""")
def check_permission(self, action, username, resource, perm):
if resource: # fine-grained permission check
if resource.realm == 'wiki': # wiki realm or resource
if resource.id: # ... it's a resource
if action == 'WIKI_VIEW': # (think 'VIEW' here)
pattern = self.view
else:
pattern = self.modify
if fnmatchcase(resource.id, pattern):
return True
else: # ... it's a realm
return True
# this policy ''may'' grant permissions on some wiki pages
else: # coarse-grained permission check
#
# support for the legacy permission checks: no resource specified
# and realm information in the action name itself.
#
if action.startswith('WIKI_'):
return True
# this policy ''may'' grant permissions on some wiki pages
| bsd-3-clause | Python |
|
784cd71fe24b1f5ce57a1982186dabc768892883 | Fix discount calculation logic | KenMutemi/saleor,maferelo/saleor,car3oon/saleor,tfroehlich82/saleor,laosunhust/saleor,rchav/vinerack,laosunhust/saleor,UITools/saleor,spartonia/saleor,itbabu/saleor,UITools/saleor,mociepka/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,jreigel/saleor,tfroehlich82/saleor,UITools/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,rchav/vinerack,rodrigozn/CW-Shop,jreigel/saleor,rodrigozn/CW-Shop,itbabu/saleor,spartonia/saleor,spartonia/saleor,tfroehlich82/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,maferelo/saleor,spartonia/saleor,rodrigozn/CW-Shop,mociepka/saleor,car3oon/saleor,KenMutemi/saleor,car3oon/saleor,laosunhust/saleor,mociepka/saleor,rchav/vinerack,KenMutemi/saleor,laosunhust/saleor | saleor/product/models/discounts.py | saleor/product/models/discounts.py | from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
from django_prices.models import PriceField
from prices import FixedDiscount
class NotApplicable(ValueError):
pass
@python_2_unicode_compatible
class FixedProductDiscount(models.Model):
name = models.CharField(max_length=255)
products = models.ManyToManyField('Product', blank=True)
discount = PriceField(pgettext_lazy('Discount field', 'discount value'),
currency=settings.DEFAULT_CURRENCY,
max_digits=12, decimal_places=2)
class Meta:
app_label = 'product'
def __repr__(self):
return 'FixedProductDiscount(name=%r, discount=%r)' % (
str(self.discount), self.name)
def __str__(self):
return self.name
def modifier_for_product(self, variant):
from ...product.models import ProductVariant
if isinstance(variant, ProductVariant):
pk = variant.product.pk
check_price = variant.get_price_per_item()
else:
pk = variant.pk
check_price = variant.get_price_per_item(variant)
if not self.products.filter(pk=pk).exists():
raise NotApplicable('Discount not applicable for this product')
if self.discount > check_price:
raise NotApplicable('Discount too high for this product')
return FixedDiscount(self.discount, name=self.name)
def get_product_discounts(variant, discounts, **kwargs):
for discount in discounts:
try:
yield discount.modifier_for_product(variant, **kwargs)
except NotApplicable:
pass
| from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
from django_prices.models import PriceField
from prices import FixedDiscount
class NotApplicable(ValueError):
pass
@python_2_unicode_compatible
class FixedProductDiscount(models.Model):
name = models.CharField(max_length=255)
products = models.ManyToManyField('Product', blank=True)
discount = PriceField(pgettext_lazy('Discount field', 'discount value'),
currency=settings.DEFAULT_CURRENCY,
max_digits=12, decimal_places=2)
class Meta:
app_label = 'product'
def __repr__(self):
return 'FixedProductDiscount(name=%r, discount=%r)' % (
str(self.discount), self.name)
def __str__(self):
return self.name
def modifier_for_product(self, variant):
if not self.products.filter(pk=variant.product.pk).exists():
raise NotApplicable('Discount not applicable for this product')
if self.discount > variant.get_price(discounted=False):
raise NotApplicable('Discount too high for this product')
return FixedDiscount(self.discount, name=self.name)
def get_product_discounts(variant, discounts, **kwargs):
for discount in discounts:
try:
yield discount.modifier_for_product(variant, **kwargs)
except NotApplicable:
pass
| bsd-3-clause | Python |
c35fa6caa631b03c3b5b3c9ea5bf494254ed9b1f | add script for usb backend to receive tracing data | nashif/zephyr,Vudentz/zephyr,Vudentz/zephyr,nashif/zephyr,finikorg/zephyr,galak/zephyr,zephyrproject-rtos/zephyr,zephyrproject-rtos/zephyr,Vudentz/zephyr,finikorg/zephyr,zephyrproject-rtos/zephyr,galak/zephyr,Vudentz/zephyr,zephyrproject-rtos/zephyr,finikorg/zephyr,nashif/zephyr,finikorg/zephyr,nashif/zephyr,galak/zephyr,zephyrproject-rtos/zephyr,galak/zephyr,galak/zephyr,Vudentz/zephyr,nashif/zephyr,finikorg/zephyr,Vudentz/zephyr | scripts/tracing/trace_capture_usb.py | scripts/tracing/trace_capture_usb.py | #!/usr/bin/env python3
#
# Copyright (c) 2019 Intel Corporation.
#
# SPDX-License-Identifier: Apache-2.0
"""
Script to capture tracing data with USB backend.
"""
import usb.core
import usb.util
import argparse
import sys
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-v", "--vendor_id", required=True,
help="usb device vendor id")
parser.add_argument("-p", "--product_id", required=True,
help="usb device product id")
parser.add_argument("-o", "--output", default='channel0_0',
required=False, help="tracing data output file")
args = parser.parse_args()
def main():
parse_args()
if args.vendor_id.isdecimal():
vendor_id = int(args.vendor_id)
else:
vendor_id = int(args.vendor_id, 16)
if args.product_id.isdecimal():
product_id = int(args.product_id)
else:
product_id = int(args.product_id, 16)
output_file = args.output
try:
usb_device = usb.core.find(idVendor=vendor_id, idProduct=product_id)
except Exception as e:
sys.exit("{}".format(e))
if usb_device is None:
sys.exit("No device found, check vendor_id and product_id")
if usb_device.is_kernel_driver_active(0):
try:
usb_device.detach_kernel_driver(0)
except usb.core.USBError as e:
sys.exit("{}".format(e))
# set the active configuration. With no arguments, the first
# configuration will be the active one
try:
usb_device.set_configuration()
except usb.core.USBError as e:
sys.exit("{}".format(e))
configuration = usb_device[0]
interface = configuration[(0, 0)]
# match the only IN endpoint
read_endpoint = usb.util.find_descriptor(interface, custom_match = \
lambda e: \
usb.util.endpoint_direction( \
e.bEndpointAddress) == \
usb.util.ENDPOINT_IN)
# match the only OUT endpoint
write_endpoint = usb.util.find_descriptor(interface, custom_match = \
lambda e: \
usb.util.endpoint_direction( \
e.bEndpointAddress) == \
usb.util.ENDPOINT_OUT)
usb.util.claim_interface(usb_device, interface)
#enable device tracing
write_endpoint.write('enable')
#try to read to avoid garbage mixed to useful stream data
buff = usb.util.create_buffer(8192)
read_endpoint.read(buff, 10000)
with open(output_file, "wb") as file_desc:
while True:
buff = usb.util.create_buffer(8192)
length = read_endpoint.read(buff, 100000)
for index in range(length):
file_desc.write(chr(buff[index]).encode('latin1'))
usb.util.release_interface(usb_device, interface)
if __name__=="__main__":
try:
main()
except KeyboardInterrupt:
print('Data capture interrupted, data saved into {}'.format(args.output))
sys.exit(0)
| apache-2.0 | Python |
|
964d01fd9a730d02aac85740bce0ef9dace6517b | add migrations | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | molo/core/migrations/0054_merged_cms_models.py | molo/core/migrations/0054_merged_cms_models.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-02-21 12:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0032_add_bulk_delete_page_permission'),
('core', '0053_add_next_and_recommended_functionality'),
]
operations = [
migrations.CreateModel(
name='Languages',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SiteLanguageRelation',
fields=[
('sitelanguage_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.SiteLanguage')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('language_setting', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='languages', to='core.Languages')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
bases=('core.sitelanguage', models.Model),
),
]
| bsd-2-clause | Python |
|
17b4efb401d36060f51e07da5ace83c008d421c5 | Create table charge_observation. | jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools | problem/charge_state/alembic/versions/2154afa58ba0_create_table_charge_observation.py | problem/charge_state/alembic/versions/2154afa58ba0_create_table_charge_observation.py | """Create table charge_observation.
Revision ID: 2154afa58ba0
Revises:
Create Date: 2020-01-05 12:18:25.331846
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2154afa58ba0'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('charge_observation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('stamp', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('charge_observation')
# ### end Alembic commands ###
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.