commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
d764a483497afc5d029a82db14cc5cc88f45f4c0 | Add an extension to allow for an addFixedIp action on instances | saleemjaveds/https-github.com-openstack-nova,bigswitch/nova,virtualopensystems/nova,leilihh/nova,cyx1231st/nova,rajalokan/nova,whitepages/nova,eneabio/nova,devendermishrajio/nova,virtualopensystems/nova,luogangyi/bcec-nova,tanglei528/nova,cloudbau/nova,sileht/deb-openstack-nova,tianweizhang/nova,mandeepdhami/nova,salv-orlando/MyRepo,cloudbase/nova,devoid/nova,sridevikoushik31/openstack,JioCloud/nova_test_latest,savi-dev/nova,yosshy/nova,klmitch/nova,cernops/nova,dstroppa/openstack-smartos-nova-grizzly,psiwczak/openstack,BeyondTheClouds/nova,CiscoSystems/nova,kimjaejoong/nova,mikalstill/nova,SUSE-Cloud/nova,takeshineshiro/nova,hanlind/nova,redhat-openstack/nova,adelina-t/nova,viggates/nova,shahar-stratoscale/nova,gspilio/nova,maoy/zknova,maheshp/novatest,LoHChina/nova,DirectXMan12/nova-hacking,edulramirez/nova,rahulunair/nova,double12gzh/nova,MountainWei/nova,leilihh/nova,j-carpentier/nova,yrobla/nova,NewpTone/stacklab-nova,eneabio/nova,viggates/nova,imsplitbit/nova,zhimin711/nova,JianyuWang/nova,houshengbo/nova_vmware_compute_driver,vmturbo/nova,Triv90/Nova,gooddata/openstack-nova,JianyuWang/nova,double12gzh/nova,mmnelemane/nova,jianghuaw/nova,Francis-Liu/animated-broccoli,devendermishrajio/nova_test_latest,dawnpower/nova,usc-isi/extra-specs,JioCloud/nova_test_latest,savi-dev/nova,mikalstill/nova,rajalokan/nova,psiwczak/openstack,Yusuke1987/openstack_template,usc-isi/nova,rrader/nova-docker-plugin,citrix-openstack-build/nova,maoy/zknova,yrobla/nova,psiwczak/openstack,ntt-sic/nova,Yuriy-Leonov/nova,kimjaejoong/nova,rrader/nova-docker-plugin,cloudbase/nova,barnsnake351/nova,eayunstack/nova,tianweizhang/nova,orbitfp7/nova,phenoxim/nova,maelnor/nova,nikesh-mahalka/nova,orbitfp7/nova,maelnor/nova,tudorvio/nova,shahar-stratoscale/nova,cloudbau/nova,mahak/nova,gooddata/openstack-nova,joker946/nova,CCI-MOC/nova,mmnelemane/nova,zaina/nova,mgagne/nova,j-carpentier/nova,plumgrid/plumgrid-nova,gooddata/openstack-nova,openstack/nova,salv-orlando/MyRepo,adelina-t/nova,watonyweng/nova,berrange/nova,OpenAcademy-OpenStack/nova-scheduler,NewpTone/stacklab-nova,rajalokan/nova,belmiromoreira/nova,plumgrid/plumgrid-nova,LoHChina/nova,eonpatapon/nova,devoid/nova,usc-isi/nova,iuliat/nova,tanglei528/nova,leilihh/novaha,akash1808/nova_test_latest,barnsnake351/nova,JioCloud/nova,mikalstill/nova,fajoy/nova,edulramirez/nova,ewindisch/nova,russellb/nova,Yusuke1987/openstack_template,sacharya/nova,yatinkumbhare/openstack-nova,jeffrey4l/nova,ntt-sic/nova,isyippee/nova,sridevikoushik31/nova,NoBodyCam/TftpPxeBootBareMetal,ted-gould/nova,shootstar/novatest,bclau/nova,whitepages/nova,petrutlucian94/nova,Stavitsky/nova,josephsuh/extra-specs,fajoy/nova,sridevikoushik31/openstack,maheshp/novatest,jianghuaw/nova,belmiromoreira/nova,dims/nova,devendermishrajio/nova_test_latest,klmitch/nova,badock/nova,luogangyi/bcec-nova,josephsuh/extra-specs,openstack/nova,tangfeixiong/nova,mgagne/nova,mahak/nova,alvarolopez/nova,fajoy/nova,spring-week-topos/nova-week,hanlind/nova,saleemjaveds/https-github.com-openstack-nova,JioCloud/nova,cloudbase/nova-virtualbox,KarimAllah/nova,qwefi/nova,redhat-openstack/nova,mahak/nova,berrange/nova,alexandrucoman/vbox-nova-driver,thomasem/nova,maheshp/novatest,NeCTAR-RC/nova,scripnichenko/nova,vladikr/nova_drafts,yosshy/nova,angdraug/nova,Metaswitch/calico-nova,hanlind/nova,zzicewind/nova,vmturbo/nova,gooddata/openstack-nova,blueboxgroup/nova,rahulunair/nova,sridevikoushik31/openstack,CCI-MOC/nova,MountainWei/nova,alaski/nova,savi-dev/nova,NoBodyCam/TftpPxeBootBareMetal,leilihh/novaha,paulmathews/nova,CiscoSystems/nova,BeyondTheClouds/nova,rickerc/nova_audit,raildo/nova,silenceli/nova,petrutlucian94/nova_dev,cyx1231st/nova,CloudServer/nova,russellb/nova,usc-isi/nova,salv-orlando/MyRepo,eonpatapon/nova,eharney/nova,apporc/nova,Triv90/Nova,iuliat/nova,cernops/nova,jianghuaw/nova,bclau/nova,eharney/nova,imsplitbit/nova,klmitch/nova,NoBodyCam/TftpPxeBootBareMetal,nikesh-mahalka/nova,petrutlucian94/nova,mandeepdhami/nova,felixma/nova,watonyweng/nova,DirectXMan12/nova-hacking,Triv90/Nova,eayunstack/nova,takeshineshiro/nova,felixma/nova,paulmathews/nova,jeffrey4l/nova,sebrandon1/nova,affo/nova,vmturbo/nova,usc-isi/extra-specs,qwefi/nova,alvarolopez/nova,apporc/nova,gspilio/nova,bgxavier/nova,zzicewind/nova,projectcalico/calico-nova,Metaswitch/calico-nova,alexandrucoman/vbox-nova-driver,blueboxgroup/nova,TieWei/nova,Tehsmash/nova,sileht/deb-openstack-nova,rajalokan/nova,affo/nova,Juniper/nova,bigswitch/nova,dstroppa/openstack-smartos-nova-grizzly,aristanetworks/arista-ovs-nova,aristanetworks/arista-ovs-nova,sebrandon1/nova,sridevikoushik31/nova,paulmathews/nova,eneabio/nova,TieWei/nova,Juniper/nova,yrobla/nova,NewpTone/stacklab-nova,sebrandon1/nova,CEG-FYP-OpenStack/scheduler,KarimAllah/nova,Tehsmash/nova,TwinkleChawla/nova,KarimAllah/nova,usc-isi/extra-specs,TwinkleChawla/nova,OpenAcademy-OpenStack/nova-scheduler,scripnichenko/nova,houshengbo/nova_vmware_compute_driver,maoy/zknova,ewindisch/nova,ruslanloman/nova,silenceli/nova,akash1808/nova,citrix-openstack-build/nova,sileht/deb-openstack-nova,sacharya/nova,houshengbo/nova_vmware_compute_driver,tudorvio/nova,gspilio/nova,projectcalico/calico-nova,zhimin711/nova,yatinkumbhare/openstack-nova,Brocade-OpenSource/OpenStack-DNRM-Nova,Yuriy-Leonov/nova,tangfeixiong/nova,CloudServer/nova,thomasem/nova,SUSE-Cloud/nova,rickerc/nova_audit,Juniper/nova,cloudbase/nova-virtualbox,badock/nova,aristanetworks/arista-ovs-nova,fnordahl/nova,phenoxim/nova,cernops/nova,spring-week-topos/nova-week,BeyondTheClouds/nova,shootstar/novatest,tealover/nova,alaski/nova,sridevikoushik31/nova,devendermishrajio/nova,russellb/nova,dstroppa/openstack-smartos-nova-grizzly,akash1808/nova,shail2810/nova,tealover/nova,raildo/nova,NeCTAR-RC/nova,fnordahl/nova,klmitch/nova,vladikr/nova_drafts,DirectXMan12/nova-hacking,cloudbase/nova,zaina/nova,rahulunair/nova,ted-gould/nova,jianghuaw/nova,josephsuh/extra-specs,sridevikoushik31/nova,petrutlucian94/nova_dev,isyippee/nova,noironetworks/nova,shail2810/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,Francis-Liu/animated-broccoli,ruslanloman/nova,dims/nova,bgxavier/nova,noironetworks/nova,varunarya10/nova_test_latest,dawnpower/nova,varunarya10/nova_test_latest,vmturbo/nova,Juniper/nova,Stavitsky/nova,angdraug/nova,openstack/nova,akash1808/nova_test_latest,CEG-FYP-OpenStack/scheduler,joker946/nova | nova/api/openstack/contrib/multinic.py | nova/api/openstack/contrib/multinic.py | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The multinic extension."""
from webob import exc
from nova import compute
from nova import log as logging
from nova.api.openstack import extensions
from nova.api.openstack import faults
LOG = logging.getLogger("nova.api.multinic")
class Multinic(extensions.ExtensionDescriptor):
def __init__(self, *args, **kwargs):
super(Multinic, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def get_name(self):
return "Multinic"
def get_alias(self):
return "NMN"
def get_description(self):
return "Multiple network support"
def get_namespace(self):
return "http://docs.openstack.org/ext/multinic/api/v1.1"
def get_updated(self):
return "2011-06-09T00:00:00+00:00"
def get_actions(self):
actions = []
# Add the add_fixed_ip action
act = extensions.ActionExtension("servers", "addFixedIp",
self._add_fixed_ip)
actions.append(act)
# Add the remove_fixed_ip action
act = extensions.ActionExtension("servers", "removeFixedIp",
self._remove_fixed_ip)
actions.append(act)
return actions
def _add_fixed_ip(self, input_dict, req, id):
"""Adds an IP on a given network to an instance."""
try:
# Validate the input entity
if 'networkId' not in input_dict['addFixedIp']:
LOG.exception(_("Missing 'networkId' argument for addFixedIp"))
return faults.Fault(exc.HTTPUnprocessableEntity())
# Add the fixed IP
network_id = input_dict['addFixedIp']['networkId']
self.compute_api.add_fixed_ip(req.environ['nova.context'], id,
network_id)
except Exception, e:
LOG.exception(_("Error in addFixedIp %s"), e)
return faults.Fault(exc.HTTPBadRequest())
return exc.HTTPAccepted()
def _remove_fixed_ip(self, input_dict, req, id):
# Not yet implemented
raise faults.Fault(exc.HTTPNotImplemented())
| apache-2.0 | Python |
|
c13d1347889cf574d3e6b9b835dadbca5fdc2d6c | Add wheel module for the salt key system | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/wheel/key.py | salt/wheel/key.py | '''
Wheel system wrapper for key system
'''
import salt.key
def list_all():
'''
List the keys under a named status
'''
skey = salt.key.Key(__opts__)
return skey.list_all()
def accept(match):
'''
Accept keys based on a glob match
'''
skey = salt.key.Key(__opts__)
return skey.accept(match)
def delete(match):
'''
Delete keys based on a glob match
'''
skey = salt.key.Key(__opts__)
return skey.delete(match)
def reject(match):
'''
Delete keys based on a glob match
'''
skey = salt.key.Key(__opts__)
return skey.reject(match)
def key_str(match):
'''
Return the key strings
'''
skey = salt.key.Key(__opts__)
return skey.key_str(match)
def finger(match):
'''
Return the matching key fingerprints
'''
skey = salt.key.Key(__opts__)
return skey.finger(match)
| apache-2.0 | Python |
|
c0ebb74ad0ee2eb210266e3610e0b44474628872 | add ismount function from python Lib/posixpath.py | thaim/ansible,thaim/ansible | lib/ansible/module_utils/ismount.py | lib/ansible/module_utils/ismount.py | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is based on
# Lib/posixpath.py of cpython
# It is licensed under the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
# are retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
import os
def ismount(path):
"""Test whether a path is a mount point
clone of os.path.ismount (from cpython Lib/posixpath.py)
fixed to solve https://github.com/ansible/ansible-modules-core/issues/2186
and workaround non-fixed http://bugs.python.org/issue2466
this should be rewritten as soon as python issue 2466 is fixed
probably check for python version and use os.path.ismount if fixed
to remove replace in this file ismount( -> os.path.ismount( and remove this
function"""
try:
s1 = os.lstat(path)
except OSError:
# the OSError should be handled with more care
# it could be a "permission denied" but path is still a mount
return False
else:
# A symlink can never be a mount point
if os.stat.S_ISLNK(s1.st_mode):
return False
parent = os.path.join(path, os.path.pardir)
parent = os.path.realpath(parent)
try:
s2 = os.lstat(parent)
except OSError:
# one should handle the returned OSError with more care to figure
# out whether this is still a mount
return False
if s1.st_dev != s2.st_dev:
return True # path/.. on a different device as path
if s1.st_ino == s2.st_ino:
return True # path/.. is the same i-node as path, i.e. path=='/'
return False
| mit | Python |
|
95a8ed6dcb19f322c9a14957da207efb8be10f5d | Customize makemessages to support ignoring fuzzy | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | hqscripts/management/commands/makemessages.py | hqscripts/management/commands/makemessages.py | from django.core.management.commands import makemessages
class Command(makemessages.Command):
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('--no-fuzzy', action='store_true', help='Remove fuzzy strings.')
def handle(self, *args, **options):
no_fuzzy = options['no_fuzzy']
if no_fuzzy:
# The underlying parser only passes custom msgattrib_options if '--no-obsolete' is true,
# so we have to do a bit of hacking here
no_obsolete = options['no_obsolete']
if no_obsolete:
# If we are removing obsolete messages already, just add in removing fuzzy messages
self.msgattrib_options += ['--no-fuzzy']
else:
# Otherwise, we need to fake obsolete messages while only actually removing fuzzy messages
options['no_obsolete'] = True
self.msgattrib_options = ['--no-fuzzy']
super().handle(*args, **options)
| bsd-3-clause | Python |
|
1bd6d53c7ab8d7b2c2fdfbb8eb2fab2e1cfa1537 | Implement statistics & logger class | reinikai/mugloar | mugloar/logger.py | mugloar/logger.py | from datetime import datetime
from tabulate import tabulate
import sys
RED = "\033[1;31m"
BLUE = "\033[1;34m"
CYAN = "\033[1;36m"
GREEN = "\033[0;32m"
RESET = "\033[0;0m"
BOLD = "\033[;1m"
class Logger:
stats = {'NMR': {'win': 0, 'lose': 0},
'FUNDEFINEDG': {'win': 0, 'lose': 0},
'HVA': {'win': 0, 'lose': 0},
'SRO': {'win': 0, 'lose': 0},
'T E': {'win': 0, 'lose': 0}}
weather_code = ""
def new_game(self, params):
text = '------------------------------------------\n' + \
time() + 'Started game id ' + str(params['gameId']) + ' against ' + params['knight']['name'] + \
' (\u2694: ' + str(params['knight']['attack']) + ', ' + \
'\u26E8: ' + str(params['knight']['armor']) + ', ' + \
'\N{RUNNER}: ' + str(params['knight']['agility']) + ', ' + \
'\N{RAM}: ' + str(params['knight']['endurance']) + ')\n'
sys.stdout.buffer.write(text.encode('utf8'))
def dragon(self, dragon, weather):
self.weather_code = weather['code']
text = time() + 'Sending dragon (\u26E8: ' + str(dragon.scaleThickness) + ', ' + \
'\u2694: ' + str(dragon.clawSharpness) + ', ' + \
'\N{DRAGON}: ' + str(dragon.wingStrength) + ', ' + \
'\N{FIRE}: ' + str(dragon.fireBreath) + \
') in ' + weather['code'] + ' weather.\n'
sys.stdout.buffer.write(text.encode('utf8'))
def result(self, result):
print(time(), end='')
if result['status'] == 'Victory':
sys.stdout.write(GREEN)
self.stats[self.weather_code]['win'] += 1
else:
sys.stdout.write(RED)
self.stats[self.weather_code]['lose'] += 1
print(result['status'], end='')
sys.stdout.write(RESET)
print(': ' + result['message'])
def print_stats(self):
print('------------------------------------------\n' +
'STATISTICS\n' +
'Iterations: ' + '\n\n' +
'By weather type:')
table = []
for weather_code, stat in self.stats.items():
table.append([weather_code, str(stat['win'] + stat['lose']), str(stat['win']), str(stat['lose']), str(survival_rate(stat['win'], stat['lose']))])
print(tabulate(table, headers=['Weather', 'Battles', 'Wins', 'Losses', 'Survival rate']))
print('\n\nOVERALL SUCCESS RATE: ', end='')
success_rate = 0
if success_rate < 60:
sys.stdout.write(RED)
else:
sys.stdout.write(GREEN)
print(str(success_rate) + '%')
def survival_rate(wins, losses):
total = wins + losses
if total == 0:
return '-'
if wins == 0:
return '0%'
rate = wins/total
return '{0:g}'.format(rate*100) + '%'
def time():
return '[' + datetime.now().strftime('%d.%m.%Y %H:%m:%S') + '] ' | mit | Python |
|
597a1c12223fec5deefcd31b3a00b06d1095b32d | Add check replication step | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/workflow/steps/util/region_migration/check_replication.py | dbaas/workflow/steps/util/region_migration/check_replication.py | # -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from time import sleep
LOG = logging.getLogger(__name__)
class CheckReplication(BaseStep):
def __unicode__(self):
return "Checking replication..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
instance = workflow_dict['source_instances'][0].future_instance
for attempt in range(0, 21):
LOG.info("Waiting 10s to check replication...")
sleep(10)
if driver.is_replication_ok(instance):
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
return True
| bsd-3-clause | Python |
|
fb6c84e7703092f495324fe57041717403803e7f | Add scrape_symbols.py placeholder. | scraperwiki/stock-tool,scraperwiki/stock-tool | scrape_symbols.py | scrape_symbols.py | #!/usr/bin/env python
# encoding: utf-8
def main():
pass
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
|
fe479bf2a8ec547922c6643bbdf0ba768eb79c9d | Add script to simulate multiple games | risteon/ludo_python | ludo/simulator.py | ludo/simulator.py | #!/usr/bin/env python3
from game import Game
print("Welcome to a game of ludo!")
average_throw_counter = 0
min_throws_per_game = 10000000
max_throws_per_game = 0
NUM_GAMES = 100
for i in range(0, NUM_GAMES):
game = Game()
throw_counter = 0
while game.next_move():
throw_counter += 1
average_throw_counter += throw_counter
if throw_counter < min_throws_per_game:
min_throws_per_game = throw_counter
if throw_counter > max_throws_per_game:
max_throws_per_game = throw_counter
print("Game:", i+1)
print("Average throws:", average_throw_counter/NUM_GAMES)
print("Min", min_throws_per_game)
print("Max", max_throws_per_game)
| mit | Python |
|
a8f172752a72d93537820322b9ce62b601be6c5f | Fix cpplint warning. | Gerhut/electron,bitemyapp/electron,thompsonemerson/electron,nekuz0r/electron,felixrieseberg/electron,jcblw/electron,brenca/electron,rhencke/electron,adamjgray/electron,lrlna/electron,soulteary/electron,vaginessa/electron,joaomoreno/atom-shell,ianscrivener/electron,soulteary/electron,setzer777/electron,baiwyc119/electron,neutrous/electron,kokdemo/electron,leolujuyi/electron,etiktin/electron,d-salas/electron,dkfiresky/electron,JussMee15/electron,seanchas116/electron,wolfflow/electron,deed02392/electron,oiledCode/electron,mubassirhayat/electron,zhakui/electron,thomsonreuters/electron,gabriel/electron,mjaniszew/electron,beni55/electron,jjz/electron,yalexx/electron,neutrous/electron,adcentury/electron,rprichard/electron,smczk/electron,tincan24/electron,Andrey-Pavlov/electron,deed02392/electron,RIAEvangelist/electron,bruce/electron,tincan24/electron,setzer777/electron,jacksondc/electron,IonicaBizauKitchen/electron,tomashanacek/electron,thingsinjars/electron,icattlecoder/electron,John-Lin/electron,jhen0409/electron,farmisen/electron,jjz/electron,tincan24/electron,trankmichael/electron,Andrey-Pavlov/electron,joneit/electron,kazupon/electron,bright-sparks/electron,BionicClick/electron,subblue/electron,bright-sparks/electron,bitemyapp/electron,ankitaggarwal011/electron,preco21/electron,leethomas/electron,pirafrank/electron,Neron-X5/electron,medixdev/electron,thingsinjars/electron,christian-bromann/electron,greyhwndz/electron,aichingm/electron,mattotodd/electron,RobertJGabriel/electron,aliib/electron,bright-sparks/electron,tonyganch/electron,Floato/electron,MaxWhere/electron,leftstick/electron,yan-foto/electron,DivyaKMenon/electron,RIAEvangelist/electron,gbn972/electron,jlhbaseball15/electron,timruffles/electron,baiwyc119/electron,iftekeriba/electron,howmuchcomputer/electron,mjaniszew/electron,Floato/electron,fireball-x/atom-shell,fffej/electron,matiasinsaurralde/electron,evgenyzinoviev/electron,voidbridge/electron,rsvip/electron,noikiy/electron,oiledCode/electron,tomashanacek/electron,Ivshti/electron,pirafrank/electron,soulteary/electron,sshiting/electron,leftstick/electron,aaron-goshine/electron,thingsinjars/electron,JesselJohn/electron,tomashanacek/electron,MaxWhere/electron,oiledCode/electron,kokdemo/electron,digideskio/electron,Ivshti/electron,iftekeriba/electron,shockone/electron,electron/electron,jannishuebl/electron,chrisswk/electron,matiasinsaurralde/electron,deed02392/electron,fomojola/electron,pandoraui/electron,sky7sea/electron,etiktin/electron,arturts/electron,Gerhut/electron,renaesop/electron,jlord/electron,jtburke/electron,the-ress/electron,voidbridge/electron,nicholasess/electron,tonyganch/electron,trigrass2/electron,noikiy/electron,matiasinsaurralde/electron,evgenyzinoviev/electron,smczk/electron,lrlna/electron,mattdesl/electron,eric-seekas/electron,farmisen/electron,davazp/electron,jaanus/electron,Faiz7412/electron,vipulroxx/electron,Andrey-Pavlov/electron,benweissmann/electron,Ivshti/electron,JesselJohn/electron,preco21/electron,xfstudio/electron,Rokt33r/electron,electron/electron,d-salas/electron,kikong/electron,voidbridge/electron,tylergibson/electron,nicobot/electron,takashi/electron,ervinb/electron,maxogden/atom-shell,kenmozi/electron,anko/electron,thompsonemerson/electron,dongjoon-hyun/electron,tincan24/electron,kazupon/electron,fomojola/electron,mrwizard82d1/electron,JesselJohn/electron,tonyganch/electron,Zagorakiss/electron,greyhwndz/electron,pombredanne/electron,minggo/electron,sky7sea/electron,wolfflow/electron,destan/electron,natgolov/electron,matiasinsaurralde/electron,fritx/electron,rsvip/electron,kostia/electron,Jonekee/electron,xfstudio/electron,Zagorakiss/electron,mirrh/electron,fritx/electron,sshiting/electron,natgolov/electron,fffej/electron,minggo/electron,iftekeriba/electron,greyhwndz/electron,ervinb/electron,wolfflow/electron,nicobot/electron,noikiy/electron,egoist/electron,jhen0409/electron,gabrielPeart/electron,bobwol/electron,shennushi/electron,xfstudio/electron,kostia/electron,jlhbaseball15/electron,jlhbaseball15/electron,bbondy/electron,jtburke/electron,joaomoreno/atom-shell,GoooIce/electron,leolujuyi/electron,shennushi/electron,systembugtj/electron,thomsonreuters/electron,tinydew4/electron,tinydew4/electron,kostia/electron,JussMee15/electron,kikong/electron,stevemao/electron,mrwizard82d1/electron,shiftkey/electron,pandoraui/electron,faizalpribadi/electron,rprichard/electron,darwin/electron,kokdemo/electron,IonicaBizauKitchen/electron,jacksondc/electron,aichingm/electron,rhencke/electron,stevekinney/electron,evgenyzinoviev/electron,davazp/electron,meowlab/electron,shiftkey/electron,MaxGraey/electron,systembugtj/electron,trankmichael/electron,micalan/electron,micalan/electron,robinvandernoord/electron,BionicClick/electron,tomashanacek/electron,vipulroxx/electron,ianscrivener/electron,jannishuebl/electron,jlord/electron,systembugtj/electron,RobertJGabriel/electron,gamedevsam/electron,rsvip/electron,ervinb/electron,darwin/electron,cqqccqc/electron,astoilkov/electron,pandoraui/electron,soulteary/electron,benweissmann/electron,deepak1556/atom-shell,noikiy/electron,abhishekgahlot/electron,arusakov/electron,leolujuyi/electron,leftstick/electron,beni55/electron,Jacobichou/electron,matiasinsaurralde/electron,maxogden/atom-shell,icattlecoder/electron,bbondy/electron,kazupon/electron,christian-bromann/electron,lzpfmh/electron,Gerhut/electron,howmuchcomputer/electron,simonfork/electron,biblerule/UMCTelnetHub,John-Lin/electron,minggo/electron,fireball-x/atom-shell,Ivshti/electron,ankitaggarwal011/electron,yan-foto/electron,RIAEvangelist/electron,renaesop/electron,pirafrank/electron,tinydew4/electron,kcrt/electron,robinvandernoord/electron,leethomas/electron,synaptek/electron,gamedevsam/electron,aaron-goshine/electron,seanchas116/electron,thingsinjars/electron,twolfson/electron,astoilkov/electron,micalan/electron,Evercoder/electron,howmuchcomputer/electron,dkfiresky/electron,timruffles/electron,gerhardberger/electron,jcblw/electron,maxogden/atom-shell,simonfork/electron,edulan/electron,posix4e/electron,kostia/electron,digideskio/electron,bright-sparks/electron,miniak/electron,electron/electron,arusakov/electron,d-salas/electron,trigrass2/electron,gerhardberger/electron,Faiz7412/electron,simongregory/electron,jhen0409/electron,the-ress/electron,webmechanicx/electron,Andrey-Pavlov/electron,systembugtj/electron,Rokt33r/electron,egoist/electron,Jacobichou/electron,shockone/electron,MaxWhere/electron,gabrielPeart/electron,anko/electron,stevekinney/electron,jacksondc/electron,carsonmcdonald/electron,Rokt33r/electron,dahal/electron,aliib/electron,jhen0409/electron,ianscrivener/electron,kenmozi/electron,anko/electron,christian-bromann/electron,subblue/electron,Evercoder/electron,deepak1556/atom-shell,electron/electron,preco21/electron,cqqccqc/electron,nicholasess/electron,beni55/electron,jlhbaseball15/electron,soulteary/electron,Evercoder/electron,fabien-d/electron,Faiz7412/electron,tylergibson/electron,coderhaoxin/electron,DivyaKMenon/electron,mattotodd/electron,shaundunne/electron,LadyNaggaga/electron,miniak/electron,jannishuebl/electron,aichingm/electron,chriskdon/electron,lzpfmh/electron,medixdev/electron,edulan/electron,gabrielPeart/electron,jlord/electron,yan-foto/electron,leftstick/electron,cos2004/electron,baiwyc119/electron,trigrass2/electron,xfstudio/electron,lrlna/electron,timruffles/electron,pombredanne/electron,rprichard/electron,brenca/electron,stevekinney/electron,webmechanicx/electron,fabien-d/electron,Neron-X5/electron,jlord/electron,darwin/electron,Neron-X5/electron,jiaz/electron,timruffles/electron,kostia/electron,shennushi/electron,felixrieseberg/electron,gerhardberger/electron,edulan/electron,soulteary/electron,mrwizard82d1/electron,gbn972/electron,astoilkov/electron,cos2004/electron,mhkeller/electron,oiledCode/electron,ervinb/electron,thomsonreuters/electron,adcentury/electron,felixrieseberg/electron,tinydew4/electron,sshiting/electron,abhishekgahlot/electron,bobwol/electron,RIAEvangelist/electron,trigrass2/electron,minggo/electron,yan-foto/electron,evgenyzinoviev/electron,the-ress/electron,coderhaoxin/electron,posix4e/electron,jhen0409/electron,vHanda/electron,jcblw/electron,synaptek/electron,gabrielPeart/electron,Neron-X5/electron,vipulroxx/electron,bruce/electron,cos2004/electron,eric-seekas/electron,coderhaoxin/electron,nekuz0r/electron,egoist/electron,Jacobichou/electron,Jonekee/electron,arusakov/electron,saronwei/electron,etiktin/electron,Rokt33r/electron,Neron-X5/electron,simongregory/electron,matiasinsaurralde/electron,Neron-X5/electron,zhakui/electron,Evercoder/electron,jiaz/electron,gbn972/electron,wan-qy/electron,chrisswk/electron,simonfork/electron,brenca/electron,posix4e/electron,jannishuebl/electron,davazp/electron,felixrieseberg/electron,jcblw/electron,wolfflow/electron,bwiggs/electron,yalexx/electron,bpasero/electron,bbondy/electron,digideskio/electron,hokein/atom-shell,electron/electron,kazupon/electron,mattdesl/electron,ianscrivener/electron,dongjoon-hyun/electron,brave/electron,kcrt/electron,thomsonreuters/electron,mhkeller/electron,GoooIce/electron,vaginessa/electron,minggo/electron,trankmichael/electron,mattdesl/electron,brenca/electron,BionicClick/electron,darwin/electron,dahal/electron,MaxWhere/electron,jiaz/electron,biblerule/UMCTelnetHub,rsvip/electron,davazp/electron,shaundunne/electron,joneit/electron,tomashanacek/electron,SufianHassan/electron,rreimann/electron,vipulroxx/electron,aecca/electron,smczk/electron,coderhaoxin/electron,tinydew4/electron,nagyistoce/electron-atom-shell,fabien-d/electron,jaanus/electron,gamedevsam/electron,setzer777/electron,davazp/electron,trankmichael/electron,Gerhut/electron,faizalpribadi/electron,twolfson/electron,wan-qy/electron,mirrh/electron,tylergibson/electron,icattlecoder/electron,rreimann/electron,beni55/electron,kokdemo/electron,bpasero/electron,shennushi/electron,digideskio/electron,bruce/electron,nicobot/electron,vaginessa/electron,rhencke/electron,rajatsingla28/electron,JussMee15/electron,felixrieseberg/electron,kokdemo/electron,nicholasess/electron,Gerhut/electron,simongregory/electron,synaptek/electron,jaanus/electron,trankmichael/electron,bwiggs/electron,arturts/electron,meowlab/electron,bwiggs/electron,cqqccqc/electron,farmisen/electron,pombredanne/electron,nicobot/electron,aichingm/electron,nicholasess/electron,dongjoon-hyun/electron,mubassirhayat/electron,mrwizard82d1/electron,kenmozi/electron,RobertJGabriel/electron,BionicClick/electron,arturts/electron,adcentury/electron,icattlecoder/electron,aecca/electron,greyhwndz/electron,kikong/electron,LadyNaggaga/electron,jjz/electron,fireball-x/atom-shell,miniak/electron,vaginessa/electron,twolfson/electron,xiruibing/electron,jonatasfreitasv/electron,maxogden/atom-shell,deepak1556/atom-shell,micalan/electron,digideskio/electron,MaxGraey/electron,joaomoreno/atom-shell,DivyaKMenon/electron,seanchas116/electron,egoist/electron,nagyistoce/electron-atom-shell,deed02392/electron,saronwei/electron,yalexx/electron,lzpfmh/electron,jtburke/electron,Faiz7412/electron,takashi/electron,greyhwndz/electron,mjaniszew/electron,kcrt/electron,leolujuyi/electron,meowlab/electron,gstack/infinium-shell,MaxGraey/electron,neutrous/electron,shaundunne/electron,jsutcodes/electron,fritx/electron,RobertJGabriel/electron,gstack/infinium-shell,systembugtj/electron,bitemyapp/electron,mattotodd/electron,mhkeller/electron,Zagorakiss/electron,bobwol/electron,christian-bromann/electron,deed02392/electron,kazupon/electron,ervinb/electron,electron/electron,takashi/electron,fomojola/electron,thingsinjars/electron,bpasero/electron,xfstudio/electron,vHanda/electron,tomashanacek/electron,darwin/electron,zhakui/electron,chriskdon/electron,bpasero/electron,shockone/electron,rajatsingla28/electron,anko/electron,mjaniszew/electron,MaxWhere/electron,nagyistoce/electron-atom-shell,Jacobichou/electron,joaomoreno/atom-shell,brenca/electron,brave/electron,abhishekgahlot/electron,voidbridge/electron,rhencke/electron,GoooIce/electron,vHanda/electron,seanchas116/electron,neutrous/electron,stevekinney/electron,arusakov/electron,mirrh/electron,jannishuebl/electron,mubassirhayat/electron,maxogden/atom-shell,xfstudio/electron,kazupon/electron,roadev/electron,ankitaggarwal011/electron,stevemao/electron,thingsinjars/electron,medixdev/electron,natgolov/electron,shaundunne/electron,astoilkov/electron,RobertJGabriel/electron,bbondy/electron,pirafrank/electron,John-Lin/electron,Gerhut/electron,aecca/electron,yan-foto/electron,d-salas/electron,shiftkey/electron,wan-qy/electron,webmechanicx/electron,subblue/electron,mattdesl/electron,arusakov/electron,Andrey-Pavlov/electron,simongregory/electron,kcrt/electron,michaelchiche/electron,joaomoreno/atom-shell,arturts/electron,electron/electron,brenca/electron,mattotodd/electron,webmechanicx/electron,brave/muon,leftstick/electron,bruce/electron,roadev/electron,saronwei/electron,eriser/electron,voidbridge/electron,Floato/electron,kenmozi/electron,eric-seekas/electron,roadev/electron,gabriel/electron,coderhaoxin/electron,simongregory/electron,thomsonreuters/electron,ankitaggarwal011/electron,thompsonemerson/electron,mirrh/electron,adamjgray/electron,Floato/electron,etiktin/electron,wolfflow/electron,sky7sea/electron,cqqccqc/electron,robinvandernoord/electron,brave/electron,chriskdon/electron,rsvip/electron,icattlecoder/electron,shaundunne/electron,fomojola/electron,d-salas/electron,edulan/electron,fffej/electron,gabriel/electron,christian-bromann/electron,rreimann/electron,pombredanne/electron,hokein/atom-shell,rajatsingla28/electron,rajatsingla28/electron,SufianHassan/electron,IonicaBizauKitchen/electron,ervinb/electron,mhkeller/electron,brave/electron,gbn972/electron,kcrt/electron,renaesop/electron,miniak/electron,michaelchiche/electron,eriser/electron,etiktin/electron,Floato/electron,lrlna/electron,LadyNaggaga/electron,vipulroxx/electron,synaptek/electron,Jonekee/electron,pandoraui/electron,gerhardberger/electron,ianscrivener/electron,baiwyc119/electron,gerhardberger/electron,rreimann/electron,astoilkov/electron,LadyNaggaga/electron,biblerule/UMCTelnetHub,SufianHassan/electron,kostia/electron,brave/muon,micalan/electron,the-ress/electron,bpasero/electron,michaelchiche/electron,leftstick/electron,cqqccqc/electron,jjz/electron,jaanus/electron,bbondy/electron,Andrey-Pavlov/electron,jsutcodes/electron,neutrous/electron,kokdemo/electron,shennushi/electron,eric-seekas/electron,digideskio/electron,tonyganch/electron,benweissmann/electron,sircharleswatson/electron,icattlecoder/electron,nagyistoce/electron-atom-shell,cqqccqc/electron,jlhbaseball15/electron,christian-bromann/electron,dkfiresky/electron,leethomas/electron,jtburke/electron,gabrielPeart/electron,stevekinney/electron,noikiy/electron,adamjgray/electron,JussMee15/electron,xiruibing/electron,fffej/electron,oiledCode/electron,twolfson/electron,mubassirhayat/electron,chrisswk/electron,JussMee15/electron,gbn972/electron,joneit/electron,bruce/electron,destan/electron,leethomas/electron,faizalpribadi/electron,nekuz0r/electron,saronwei/electron,felixrieseberg/electron,subblue/electron,JussMee15/electron,mjaniszew/electron,John-Lin/electron,pombredanne/electron,IonicaBizauKitchen/electron,tonyganch/electron,bbondy/electron,jsutcodes/electron,nicobot/electron,coderhaoxin/electron,tylergibson/electron,adcentury/electron,shiftkey/electron,destan/electron,aichingm/electron,Floato/electron,mhkeller/electron,eric-seekas/electron,sircharleswatson/electron,ankitaggarwal011/electron,michaelchiche/electron,dahal/electron,RIAEvangelist/electron,eriser/electron,JesselJohn/electron,IonicaBizauKitchen/electron,aecca/electron,jacksondc/electron,baiwyc119/electron,adamjgray/electron,eriser/electron,shockone/electron,robinvandernoord/electron,iftekeriba/electron,nicobot/electron,dongjoon-hyun/electron,saronwei/electron,farmisen/electron,abhishekgahlot/electron,stevekinney/electron,bitemyapp/electron,fritx/electron,trigrass2/electron,fireball-x/atom-shell,deepak1556/atom-shell,gamedevsam/electron,preco21/electron,michaelchiche/electron,setzer777/electron,fritx/electron,fomojola/electron,fabien-d/electron,thompsonemerson/electron,aaron-goshine/electron,benweissmann/electron,pirafrank/electron,bright-sparks/electron,adcentury/electron,xiruibing/electron,brave/muon,benweissmann/electron,iftekeriba/electron,ankitaggarwal011/electron,gstack/infinium-shell,pirafrank/electron,chriskdon/electron,jannishuebl/electron,LadyNaggaga/electron,sshiting/electron,joneit/electron,mirrh/electron,posix4e/electron,jonatasfreitasv/electron,aaron-goshine/electron,destan/electron,RIAEvangelist/electron,preco21/electron,shaundunne/electron,bitemyapp/electron,jonatasfreitasv/electron,cos2004/electron,lrlna/electron,yan-foto/electron,howmuchcomputer/electron,brave/electron,beni55/electron,tonyganch/electron,smczk/electron,chriskdon/electron,miniak/electron,jsutcodes/electron,LadyNaggaga/electron,bwiggs/electron,jonatasfreitasv/electron,beni55/electron,jaanus/electron,nagyistoce/electron-atom-shell,systembugtj/electron,jaanus/electron,sircharleswatson/electron,simonfork/electron,tylergibson/electron,arturts/electron,egoist/electron,bpasero/electron,bobwol/electron,DivyaKMenon/electron,arusakov/electron,JesselJohn/electron,nekuz0r/electron,takashi/electron,natgolov/electron,faizalpribadi/electron,nekuz0r/electron,wan-qy/electron,renaesop/electron,jtburke/electron,RobertJGabriel/electron,meowlab/electron,dongjoon-hyun/electron,DivyaKMenon/electron,tincan24/electron,cos2004/electron,IonicaBizauKitchen/electron,medixdev/electron,vHanda/electron,roadev/electron,mattdesl/electron,zhakui/electron,dahal/electron,jhen0409/electron,stevemao/electron,aliib/electron,Rokt33r/electron,rajatsingla28/electron,SufianHassan/electron,brave/muon,shiftkey/electron,benweissmann/electron,Zagorakiss/electron,vHanda/electron,kenmozi/electron,zhakui/electron,smczk/electron,aichingm/electron,SufianHassan/electron,shiftkey/electron,aaron-goshine/electron,natgolov/electron,brave/electron,rhencke/electron,the-ress/electron,nicholasess/electron,shockone/electron,fritx/electron,thompsonemerson/electron,twolfson/electron,aecca/electron,BionicClick/electron,yalexx/electron,MaxGraey/electron,fireball-x/atom-shell,iftekeriba/electron,chriskdon/electron,carsonmcdonald/electron,Ivshti/electron,gbn972/electron,smczk/electron,ianscrivener/electron,tincan24/electron,jsutcodes/electron,DivyaKMenon/electron,simonfork/electron,dahal/electron,chrisswk/electron,synaptek/electron,Jonekee/electron,jacksondc/electron,adamjgray/electron,jlord/electron,sshiting/electron,gabrielPeart/electron,leethomas/electron,Evercoder/electron,trankmichael/electron,yalexx/electron,joneit/electron,SufianHassan/electron,egoist/electron,kenmozi/electron,mrwizard82d1/electron,pandoraui/electron,carsonmcdonald/electron,aecca/electron,shockone/electron,rprichard/electron,trigrass2/electron,Evercoder/electron,aliib/electron,mirrh/electron,xiruibing/electron,destan/electron,MaxWhere/electron,gerhardberger/electron,jlhbaseball15/electron,tinydew4/electron,biblerule/UMCTelnetHub,Jacobichou/electron,fffej/electron,brave/muon,dkfiresky/electron,etiktin/electron,thomsonreuters/electron,preco21/electron,cos2004/electron,aliib/electron,the-ress/electron,John-Lin/electron,pombredanne/electron,kikong/electron,renaesop/electron,edulan/electron,mhkeller/electron,meowlab/electron,sky7sea/electron,gstack/infinium-shell,GoooIce/electron,gabriel/electron,faizalpribadi/electron,seanchas116/electron,noikiy/electron,tylergibson/electron,jiaz/electron,adamjgray/electron,leethomas/electron,kikong/electron,Zagorakiss/electron,leolujuyi/electron,sshiting/electron,faizalpribadi/electron,robinvandernoord/electron,carsonmcdonald/electron,carsonmcdonald/electron,anko/electron,neutrous/electron,fabien-d/electron,GoooIce/electron,hokein/atom-shell,brave/muon,natgolov/electron,saronwei/electron,dkfiresky/electron,greyhwndz/electron,sky7sea/electron,baiwyc119/electron,lzpfmh/electron,bobwol/electron,joaomoreno/atom-shell,edulan/electron,roadev/electron,xiruibing/electron,eric-seekas/electron,sircharleswatson/electron,xiruibing/electron,synaptek/electron,Zagorakiss/electron,twolfson/electron,vHanda/electron,takashi/electron,bpasero/electron,jiaz/electron,posix4e/electron,jtburke/electron,fffej/electron,simonfork/electron,aaron-goshine/electron,GoooIce/electron,MaxGraey/electron,Rokt33r/electron,medixdev/electron,gerhardberger/electron,yalexx/electron,posix4e/electron,bruce/electron,vipulroxx/electron,jcblw/electron,jiaz/electron,farmisen/electron,setzer777/electron,rhencke/electron,simongregory/electron,pandoraui/electron,seanchas116/electron,minggo/electron,mjaniszew/electron,jcblw/electron,howmuchcomputer/electron,mattotodd/electron,mattotodd/electron,aliib/electron,carsonmcdonald/electron,leolujuyi/electron,Jacobichou/electron,jonatasfreitasv/electron,Faiz7412/electron,hokein/atom-shell,bobwol/electron,gabriel/electron,hokein/atom-shell,abhishekgahlot/electron,micalan/electron,farmisen/electron,wan-qy/electron,chrisswk/electron,stevemao/electron,eriser/electron,nicholasess/electron,jsutcodes/electron,jjz/electron,JesselJohn/electron,medixdev/electron,voidbridge/electron,d-salas/electron,bright-sparks/electron,subblue/electron,mattdesl/electron,eriser/electron,jjz/electron,zhakui/electron,timruffles/electron,nekuz0r/electron,dongjoon-hyun/electron,evgenyzinoviev/electron,wolfflow/electron,sky7sea/electron,BionicClick/electron,meowlab/electron,setzer777/electron,deed02392/electron,renaesop/electron,thompsonemerson/electron,John-Lin/electron,bwiggs/electron,miniak/electron,michaelchiche/electron,vaginessa/electron,biblerule/UMCTelnetHub,mrwizard82d1/electron,Jonekee/electron,roadev/electron,subblue/electron,stevemao/electron,gamedevsam/electron,fomojola/electron,takashi/electron,webmechanicx/electron,jacksondc/electron,arturts/electron,webmechanicx/electron,rajatsingla28/electron,dkfiresky/electron,astoilkov/electron,evgenyzinoviev/electron,rreimann/electron,wan-qy/electron,gstack/infinium-shell,shennushi/electron,sircharleswatson/electron,biblerule/UMCTelnetHub,sircharleswatson/electron,oiledCode/electron,lrlna/electron,robinvandernoord/electron,lzpfmh/electron,rreimann/electron,Jonekee/electron,bwiggs/electron,the-ress/electron,vaginessa/electron,kcrt/electron,mubassirhayat/electron,jonatasfreitasv/electron,howmuchcomputer/electron,adcentury/electron,stevemao/electron,abhishekgahlot/electron,gabriel/electron,joneit/electron,dahal/electron,destan/electron,bitemyapp/electron,davazp/electron,gamedevsam/electron,anko/electron,deepak1556/atom-shell,lzpfmh/electron | script/cpplint.py | script/cpplint.py | #!/usr/bin/env python
import fnmatch
import os
import subprocess
import sys
IGNORE_FILES = [
'browser/atom_application_mac.h',
'browser/atom_application_delegate_mac.h',
'browser/native_window_mac.h',
'browser/resources/win/resource.h',
'browser/ui/cocoa/event_processing_window.h',
'browser/ui/cocoa/atom_menu_controller.h',
'browser/ui/cocoa/nsalert_synchronous_sheet.h',
'common/api/api_messages.cc',
'common/api/api_messages.h',
'common/atom_version.h',
'common/swap_or_assign.h',
]
SOURCE_ROOT = os.path.dirname(os.path.dirname(__file__))
def main():
os.chdir(SOURCE_ROOT)
files = list_files(['app', 'browser', 'common', 'renderer'],
['*.cc', '*.h'])
call_cpplint(list(set(files) - set(IGNORE_FILES)))
def list_files(directories, filters):
matches = []
for directory in directories:
for root, _, filenames, in os.walk(directory):
for f in filters:
for filename in fnmatch.filter(filenames, f):
matches.append(os.path.join(root, filename))
return matches
def call_cpplint(files):
cpplint = os.path.join(SOURCE_ROOT, 'vendor', 'depot_tools', 'cpplint.py')
rules = '--filter=-build/header_guard,-build/include_what_you_use'
subprocess.check_call([sys.executable, cpplint, rules] + files)
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
import fnmatch
import os
import subprocess
import sys
IGNORE_FILES = [
'app/win/resource.h',
'browser/atom_application_mac.h',
'browser/atom_application_delegate_mac.h',
'browser/native_window_mac.h',
'browser/ui/cocoa/event_processing_window.h',
'browser/ui/cocoa/atom_menu_controller.h',
'browser/ui/cocoa/nsalert_synchronous_sheet.h',
'common/api/api_messages.cc',
'common/api/api_messages.h',
'common/atom_version.h',
'common/swap_or_assign.h',
]
SOURCE_ROOT = os.path.dirname(os.path.dirname(__file__))
def main():
os.chdir(SOURCE_ROOT)
files = list_files(['app', 'browser', 'common', 'renderer'],
['*.cc', '*.h'])
call_cpplint(list(set(files) - set(IGNORE_FILES)))
def list_files(directories, filters):
matches = []
for directory in directories:
for root, _, filenames, in os.walk(directory):
for f in filters:
for filename in fnmatch.filter(filenames, f):
matches.append(os.path.join(root, filename))
return matches
def call_cpplint(files):
cpplint = os.path.join(SOURCE_ROOT, 'vendor', 'depot_tools', 'cpplint.py')
rules = '--filter=-build/header_guard,-build/include_what_you_use'
subprocess.check_call([sys.executable, cpplint, rules] + files)
if __name__ == '__main__':
sys.exit(main())
| mit | Python |
334aa288fc38636f10e25b0d8ab4ecb91d198c9b | Add example SNP analysis script. | MikkelSchubert/paleomix,MikkelSchubert/paleomix,MikkelSchubert/paleomix | examples/nature_protocols/phylogeny/summarize_heterozygosity.py | examples/nature_protocols/phylogeny/summarize_heterozygosity.py | #!/usr/bin/python
#
# Copyright (c) 2012 Mikkel Schubert <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import sys
import pysam
from pypeline.common.vcfwrap import \
get_ml_genotype
import pypeline.common.timer as timer
def read_bed_records(filename):
"""Reads a bed-file (i.e. for a set of regions of interest), and returns
a sorted list containing each line as a tuple containing the contig name,
the start position, and the end position."""
regions = []
bed_parser = pysam.asBed()
with open(filename) as bed_file:
for line in bed_file:
line = line.strip()
if not line or line.startswith('#'):
continue
regions.append(bed_parser(line, len(line)))
return regions
def select_vcf_records(bed_records, vcf_records):
"""Returns an iterable of VCF records, corresponding to the contents of each
region specified by the BED records. Records are returned at most once, even
if covered by multiple BED records."""
contigs = frozenset(vcf_records.contigs)
vcf_parser = pysam.asVCF()
# Timer class used processing progress; meant primarily for BAM files
progress = timer.BAMTimer(None)
# Cache of positions observed for this contig, to prevent returning
# positions in overlapping regions multiple times
contig_cache = None
contig_cache_name = None
for bed in sorted(bed_records):
if bed.contig not in contigs:
# Skip contigs for which no calls have been made (e.g. due to
# low coverage. Otherwise Pysam raises an exception.
continue
elif contig_cache_name != bed.contig:
# Reset cache per contig, to save memory
contig_cache = set()
contig_cache_name = bed.contig
for record in vcf_records.fetch(bed.contig, bed.start, bed.end, parser = vcf_parser):
progress.increment()
if record.pos in contig_cache:
# We've already reported this VCF record
continue
contig_cache.add(record.pos)
# Skip records filtered by VCF_filter
if record.filter in ('.', "PASS"):
yield record
progress.finalize()
def main(argv):
if len(argv) != 2:
sys.stderr.write("Usage: %s <BED-file> <VCF.bgz>\n")
return 1
sites = 0
sites_non_ref = 0
sites_homo_non_ref = 0
sites_het_one_non_ref = 0
sites_het_two_non_ref = 0
vcf_records = pysam.Tabixfile(argv[1])
bed_records = read_bed_records(argv[0])
for record in select_vcf_records(bed_records, vcf_records):
if record.alt != '.':
# Get the most likely diploid genotype
nt_a, nt_b = get_ml_genotype(record)
if (nt_a, nt_b) == ('N', 'N'):
# Skip sites with no most likely genotype
continue
sites += 1
sites_non_ref += 1
if nt_a == nt_b:
sites_homo_non_ref += 1
elif record.ref not in (nt_a, nt_b):
sites_het_two_non_ref += 1
else:
sites_het_one_non_ref += 1
else:
# Heterozygous for the reference allele
sites += 1
print
print "%i sites kept after filtering:" % (sites,)
print " % 10i homozygous sites containing the reference allele (%.2f%%)" % (sites - sites_non_ref, 100.0 * (sites - sites_non_ref) / float(sites))
print " % 10i heterozygous sites containing the reference and a non-reference allele (%.2f%%)" % (sites_het_one_non_ref, (100.0 * sites_het_one_non_ref) / sites)
print " % 10i homozygous sites containing a single non-reference allele (%.2f%%)" % (sites_homo_non_ref, (100.0 * sites_homo_non_ref) / sites)
print " % 10i heterozygous sites containing two different non-reference alleles (%.2f%%)" % (sites_het_two_non_ref, (100.0 * sites_het_two_non_ref) / sites)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit | Python |
|
4c5e4cb960a266482dac21eaeb0b568359c58b39 | Add py-backcall (#8701) | krafczyk/spack,iulian787/spack,mfherbst/spack,LLNL/spack,iulian787/spack,krafczyk/spack,krafczyk/spack,LLNL/spack,mfherbst/spack,iulian787/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-backcall/package.py | var/spack/repos/builtin/packages/py-backcall/package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyBackcall(PythonPackage):
"""Specifications for callback functions passed in to an API"""
homepage = "https://github.com/takluyver/backcall"
url = "https://pypi.io/packages/source/b/backcall/backcall-0.1.0.tar.gz"
version('0.1.0', '87ce0c7839808e6a3427d57df6a792e7')
| lgpl-2.1 | Python |
|
7975ef9f34cc578de968e1a1c8e6f731c164641a | Create 1.5_countstrings.py | HeyIamJames/CodingInterviewPractice,HeyIamJames/CodingInterviewPractice | CrackingCodingInterview/1.5_countstrings.py | CrackingCodingInterview/1.5_countstrings.py | """
given a string, return a string counting all the occurences
of each character if the count > 1
"""
def compress(string_to_compress):
if len(string_to_compress) < 2
return string_to_compress
groups = []
previous_character = string_to_compress[0]
counter = 1
for c in string_to_compress[1:]:
if c == previous_character:
counter += 1
else:
groups.append(previous_character + str(counter))
previous_character = c
counter = 1
groups.append(c + str(counter))
result = ''.join(groups)
if len(result) < len(string_to_compress):
return result
else:
return string_to_compress
| mit | Python |
|
c89cce1a47c1e379958d7cced624ec0317cd3407 | Add demo for non-blocking with poll(). | easies/xmpp2 | examples/demo3.py | examples/demo3.py | import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import logging
import threading
import xmpp2
import time
import select
from xmpp2 import XML
# non-blocking, poll example.
USERNAME = 'yourusername'
PASSWORD = 'yourpassword'
SERVER = 'example.com'
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('xmpp2.xml.handler').setLevel(logging.INFO)
c = xmpp2.Client(SERVER, stream_log_level=xmpp2.LOG_NONE)
c.connect()
c.auth(USERNAME, password=PASSWORD)
c.write(XML.presence.add(XML.priority.add(1)))
c.setblocking(False)
po = select.poll()
po.register(c, select.POLLIN)
while True:
for fd, event in po.poll():
msg = c.gen.next()
if msg:
sys.stdout.write(msg.pretty_print() + '\n')
| mit | Python |
|
bb1ce480184d4e78f121f9e473e58f47b80de53a | Create FirstLinuxFile.py | diejia/python-study | FirstLinuxFile.py | FirstLinuxFile.py | #!/usr/bin
| unlicense | Python |
|
be5db45702c01aadb5ac323cbb6b0ef53c5d1d4c | add mobility/debug.py | melon-li/netem,melon-li/netem | mobility/debug.py | mobility/debug.py | #!/usr/bin/python
#coding:utf-8
import numpy as np
import math
import sys
import os
import time
import matplotlib.pyplot as plt
from pprint import pprint
import matplotlib.animation as animation
import cPickle as pickle
from copy import deepcopy
def load_coordiantes(file_path):
with open(file_path, 'rb') as f:
coordiantes_l = pickle.load(f)
for coordiantes in coordiantes_l:
yield coordiantes
def init2():
cmd = ""
cmd = cmd + "/usr/bin/ntpq -c peers & "
cmd = cmd + "/usr/bin/ntpq -c assoc & "
os.system(cmd)
flag = 0
def main():
try:
start_time = float(sys.argv[1])
#default time_unit 997
time_unit = int(sys.argv[2])
except:
help_info = "Usage:%s <start_time> <time_unit(ms)>\n" % sys.argv[0]
colors = ['b', 'g', 'r', 'c', 'm' , 'y', 'k', 'slategrey', 'orange',
'mediumblue', 'brown', 'orchid']
# print(len(colors))
file_path = './coordiantes_l'
init2()
coordiantes_iter = load_coordiantes(file_path)
patchs = []
fig = plt.figure()
fig.set_dpi(100)
fig.set_size_inches(7, 6.5)
axes = plt.axes(xlim=(0, 3000), ylim=(0, 1500))
axes.grid(True)
# axes.set_xticks([0, 750, 1500, 2250, 3000])
# axes.set_xticks(range(0, 3750, 375))
# axes.set_yticks(range(0, 1750, 250))
axes.set_xticks(range(0, 3750, 750))
axes.set_yticks(range(0, 1750, 500))
def init():
global flag
if flag == 0:
flag = 1
else:
return tuple(patchs)
coordiantes = coordiantes_iter.next()
print "只执行一次"
print time.time()
for index, coordiante in enumerate(coordiantes):
i = index/5
# if i >0: break
# coordiante = [int(e) for e in coordiante]
patch = plt.Circle(coordiante, radius=50, color=colors[i],alpha=0.5)
patchs.append(patch)
axes.add_patch(patch)
axes.axis('equal')
axes.margins(0)
return tuple(patchs)
def animate(i):
try:
coordiantes = coordiantes_iter.next()
except:
sys.exit(0)
for index,coordiante in enumerate(coordiantes):
patchs[index].center = coordiante
return tuple(patchs)
print "Waiting for starting"
while time.time() < (start_time - 5):
time.sleep(0.1)
print "Start now!!!"
print time.time()
#anim = animation.FuncAnimation(fig, animate,
# init_func=init,
# frames=360,
# interval=time_unit,
# blit=True)
#plt.title('IPLAB Community Mobility Models', fontsize=10)
init()
plt.xlabel('1500m')
plt.ylabel('3000m')
plt.show()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
f724f5b488f23a6ceb2314aa18933b5fac3f5aab | Add courseware migration. | edx-solutions/edx-platform,cpennington/edx-platform,edx/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,msegado/edx-platform,cpennington/edx-platform,mitocw/edx-platform,edx/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,appsembler/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,mitocw/edx-platform,cpennington/edx-platform,msegado/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,appsembler/edx-platform,angelapper/edx-platform,edx/edx-platform,mitocw/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,mitocw/edx-platform,appsembler/edx-platform,edx-solutions/edx-platform,stvstnfrd/edx-platform,edx-solutions/edx-platform,eduNEXT/edunext-platform,edx/edx-platform,edx-solutions/edx-platform,msegado/edx-platform,cpennington/edx-platform | lms/djangoapps/courseware/migrations/0013_auto_20191001_1858.py | lms/djangoapps/courseware/migrations/0013_auto_20191001_1858.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2019-10-01 18:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courseware', '0012_adjust_fields'),
]
operations = [
migrations.AlterModelOptions(
name='coursedynamicupgradedeadlineconfiguration',
options={},
),
migrations.AlterModelOptions(
name='orgdynamicupgradedeadlineconfiguration',
options={},
),
]
| agpl-3.0 | Python |
|
1e65555a08ff3ee1a06e92d9dd054abf3cfaf711 | Add a migration to update to final tree fields | samluescher/django-media-tree,samluescher/django-media-tree,samluescher/django-media-tree | media_tree/migrations/0003_alter_tree_fields.py | media_tree/migrations/0003_alter_tree_fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('media_tree', '0002_mptt_to_treebeard'),
]
operations = [
migrations.AlterField(
model_name='filenode',
name='depth',
field=models.PositiveIntegerField(db_index=True),
),
migrations.AlterField(
model_name='filenode',
name='lft',
field=models.PositiveIntegerField(db_index=True),
),
migrations.AlterField(
model_name='filenode',
name='rgt',
field=models.PositiveIntegerField(db_index=True),
),
migrations.AlterField(
model_name='filenode',
name='tree_id',
field=models.PositiveIntegerField(db_index=True),
),
]
| bsd-3-clause | Python |
|
fb5f6bf999b2cd8b674bc2c89f74f1413fc8ee1e | Add command line interface to play | rickerbh/tictactoe_py | command_line_tic_tac_toe.py | command_line_tic_tac_toe.py | #!/usr/bin/env python3
import cmd
from tictactoe.ai_player import AIPlayer
from tictactoe.human_player import HumanPlayer
from tictactoe.game_controller import GameController
from tictactoe.board_stringification import BoardStringification
class CommandLineTicTacToe(cmd.Cmd):
def __init__(self,
intro="Tic Tac Toe CLI. Type help for help.\n\nHuman. You are X. Good luck. Your move\n\n",
prompt="→ "):
cmd.Cmd.__init__(self)
self.intro = intro
self.prompt = prompt
self._human = HumanPlayer("X", self._notify_move)
self._ai = AIPlayer("O", "X")
self._controller = GameController(self._human, self._ai, self._won_notification, self._draw_notification)
def _won_notification(self):
print("Game over. It was won\n\n")
self._print_board()
self.do_reset(None)
def _draw_notification(self):
print("Game over. It was a draw\n\n")
self._print_board()
self.do_reset(None)
def do_end(self, args):
return True
def help_end(self):
print("End session")
do_EOF = do_end
help_EOF = help_end
def do_reset(self, args):
self.do_human_start(None)
def help_reset(self):
print("Reset the current game")
def do_move(self, args):
print("Move passed in is: {0}".format(args))
try:
self._controller.place_move(self._human, int(args))
except ValueError as e:
print("Sorry, can't make that move: {0}".format(e.args[0]))
def help_move(self):
print("move x: Make a move at position x on the board")
def do_show_board(self, args):
print("Current game state\n")
self._print_board()
def help_show_board(self):
print("Shows the current state of the game")
def do_ai_start(self, args):
self._controller = GameController(self._ai, self._human, self._won_notification, self._draw_notification)
self._controller.notify_play()
def help_ai_start(self):
print("Initiate a new game where the AI starts")
def do_human_start(self, args):
self._controller = GameController(self._human, self._ai, self._won_notification, self._draw_notification)
self._controller.notify_play()
def help_human_start(self):
print("Initiate a new game where the AI starts")
def _notify_move(self):
print("Human, your move:\n")
self._print_board()
def _print_board(self):
print(BoardStringification().print_game_positions(self._controller._board))
if __name__ == '__main__':
cli = CommandLineTicTacToe()
cli.cmdloop()
| mit | Python |
|
f91db461b5745689ed356dd740ed7ff3b27524e4 | Add page base classes | matthiask/feincms3,matthiask/feincms3,matthiask/feincms3 | feincms3/pages.py | feincms3/pages.py | from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import signals
from django.dispatch import receiver
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from mptt.models import MPTTModel, TreeForeignKey
from mptt.signals import node_moved
@python_2_unicode_compatible
class AbstractPage(MPTTModel):
parent = TreeForeignKey(
'self',
on_delete=models.CASCADE,
null=True, blank=True, related_name='children', db_index=True)
is_active = models.BooleanField(_('is active'), default=True)
title = models.CharField(_('title'), max_length=200)
slug = models.SlugField(_('slug'))
# Who even cares about MySQL
path = models.CharField(
_('path'), max_length=1000, blank=True, unique=True,
help_text=_('Generated automatically if \'static path\' is unset.'),
validators=[RegexValidator(
regex=r'^/(|.+/)$',
message=_('Path must start and end with a slash (/).'),
)])
static_path = models.BooleanField(_('static path'), default=False)
class Meta:
abstract = True
verbose_name = _('page')
verbose_name_plural = _('pages')
def __str__(self):
return self.title
def save(self, *args, **kwargs):
save_descendants = kwargs.pop('save_descendants', True)
if not self.static_path:
self.path = '{0}{1}/'.format(
self.parent.path if self.parent else '/',
self.slug)
super(AbstractPage, self).save(*args, **kwargs)
if save_descendants:
nodes = {self.pk: self}
for node in self.get_descendants():
# Assign already-saved instance
node.parent = nodes[node.parent_id]
# Descendants of inactive nodes cannot be active themselves.
if not node.parent.is_active:
node.is_active = False
node.save(save_descendants=False)
nodes[node.id] = node
save.alters_data = True
def get_absolute_url(self):
if self.path == '/':
return reverse('pages:root')
return reverse('pages:page', kwargs={'path': self.path.strip('/')})
@receiver(node_moved)
def handle_node_moved(instance, **kwargs):
print(instance, kwargs)
if not instance._meta.abstract and 'position' in kwargs:
# We were called from move_node, not from save()
instance.save()
class MenuMixin(models.Model):
menu = models.CharField(
_('menu'),
max_length=20,
blank=True,
)
class Meta:
abstract = True
@receiver(signals.class_prepared)
def _fill_in_menu_choices(sender, **kwargs):
if issubclass(sender, MenuMixin) and not sender._meta.abstract:
field = sender._meta.get_field('menu')
field.choices = sender.MENUS
field.default = field.choices[0][0]
class TemplatesMixin(models.Model):
template_key = models.CharField(_('template'), max_length=100)
class Meta:
abstract = True
@property
def template(self):
for t in self.TEMPLATES:
if t.key == self.template_key:
return t
else:
return None
@property
def regions(self):
return self.template.regions if self.template else []
@receiver(signals.class_prepared)
def _fill_template_choices(sender, **kwargs):
if issubclass(sender, TemplatesMixin) and not sender._meta.abstract:
field = sender._meta.get_field('template_key')
field.choices = [
(t.key, t.title) for t in sender.TEMPLATES
]
field.default = sender.TEMPLATES[0].key
| bsd-3-clause | Python |
|
6d59e6d37d6f33f3513a1c6b1cb7d0d9062f391e | Create ClassesandInstances.py | madhurilalitha/Python-Projects | EmployeeManagementSystem/Findings/ClassesandInstances.py | EmployeeManagementSystem/Findings/ClassesandInstances.py | #Creating and instantiating python classes
#classes - they allow us to logically group data(attributes) and functions (methods)
'''class Employee:
pass
print ("Class (Blueprint) vs Instance")
emp1 = Employee()
emp2 = Employee()
print (emp1)
print (emp2)
print ("instance variables contains data unique to each instance")
emp1.first ='Manoj'
emp1.last = 'Putchala'
emp1.email = '[email protected]'
emp1.pay = 5000
emp2.first ='Lalitha'
emp2.last = 'Putchala'
emp2.email = '[email protected]'
emp2.pay = 6000
print (emp1.email)
print (emp2.email)
'''
class Employee:
#Constructor or Initializer #Instance is called as self(by default should be used)
#Name, email and pay are attributes
def __init__(self,first,last, pay):
self.fname = first
self.lname = last
self.epay = pay
self.email = first+'.'+last+'@company.com'
def fullname(self): #method for code reuse #self should not be forgotten (one common mistake)
return '{} {}'.format(emp1.fname,self.lname)
emp1 = Employee('Manoj','Kumar',100000)
print (emp1.epay)
print (emp1.fullname())
print (Employee.fullname(emp1)) # another way of calling the instance using class
| mit | Python |
|
86618e2e30aa4a129041bd2b6b8c312b00de9ce5 | use separate modules for netlink | tijko/shadow,tijko/shadow | shadow/netlink.py | shadow/netlink.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import struct
import os
# Flag values
NLM_F_REQUEST = 1
NLM_F_MULTI = 2
NLM_F_ACK = 4
NLM_F_ECHO = 8
NLM_F_DUMP_INTR = 16
# Modifiers to GET request
NLM_F_ROOT = 0x100
NLM_F_MATCH = 0x200
NLM_F_ATOMIC = 0x400
NLM_F_DUMP = (NLM_F_ROOT | NLM_F_MATCH)
# Modifiers to NEW requests
NLM_F_REPLACE = 0x100
NLM_F_EXCL = 0x200
NLM_F_CREATE = 0x400
NLM_F_APPEND = 0x800
NETLINK_GENERIC = 16
NLMSG_ALIGNTO = 4
NLMSG_MIN_TYPE = 0x10
NLMSG_ERROR = 0x2
GENL_ID_CTRL = NLMSG_MIN_TYPE
class Nlmsghdr(object):
'''
The NetlinkMessage class handles the assembly of netlink headers.
'''
def __init__(self):
super(NetlinkMessage, self).__init__()
self.pid = os.getpid()
self.flags = NLM_F_REQUEST
self.genl_version = 0
def build_nlmsghdr(self, nlmsg_type, nlmsg_len):
seq = 0
nlmsg_len += struct.calcsize('IHHII')
hdr = [nlmsg_len, nlmsg_type, self.flags, seq, self.pid]
nlmsghdr = struct.pack('IHHII', *hdr)
return nlmsghdr
# Genetlink Controller command and attribute values
CTRL_CMD_UNSPEC = 0
CTRL_CMD_NEWFAMILY = 1
CTRL_CMD_DELFAMILY = 2
CTRL_CMD_GETFAMILY = 3
CTRL_CMD_NEWOPS = 4
CTRL_CMD_DELOPS = 5
CTRL_CMD_GETOPS = 6
CTRL_CMD_NEWMCAST_GRP = 7
CTRL_CMD_DELMCAST_GRP = 8
CTRL_CMD_GETMCAST_GRP = 9
__CTRL_CMD_MAX = 10
CTRL_ATTR_UNSPEC = 0
CTRL_ATTR_FAMILY_ID = 1
CTRL_ATTR_FAMILY_NAME = 2
CTRL_ATTR_VERSION = 3
CTRL_ATTR_HDRSIZE = 4
CTRL_ATTR_MAXATTR = 5
CTRL_ATTR_OPS = 6
CTRL_ATTR_MCAST_GROUPS = 7
__CTRL_ATTR_MAX = 8
CTRL_ATTR_OP_UNSPEC = 0
CTRL_ATTR_OP_ID = 1
CTRL_ATTR_OP_FLAGS = 2
__CTRL_ATTR_OP_MAX = 3
class Genlmsghdr(object):
def __init__(self):
pass
def build_genlmsghdr(self, cmd):
genlhdr = struct.pack('BBxx', cmd, self.genl_version)
genl_len = struct.calcsize('BBxx')
return genlhdr, genl_len
class Nlattr(object):
def __init__(self):
pass
def build_nlattr(self, nla_type, nla_data):
if isinstance(nla_data, str):
padding = self.calc_alignment(nla_data)
nla_len = struct.calcsize('HH') + padding
nla_hdr = struct.pack('HH', nla_len, nla_type)
data = struct.pack('%ds' % padding, nla_data)
nlattr = b''.join([nla_hdr, data])
elif isinstance(nla_data, int):
nla_len = struct.calcsize('HHI')
nla = [nla_len, nla_type, nla_data]
nlattr = struct.pack('HHI', *nla)
else:
return [], 0
return nlattr, nla_len
| mit | Python |
|
ec22c2d82ff4f045b992014d17ada850359c2ab6 | change folder layout | abhishek-sehgal954/SVG_Digital_Halftoning | patterning_algorithm/color_halftone.py | patterning_algorithm/color_halftone.py | # This program takes a raster color image and produces its raster color halftone using patterning algorithm .
# Split the image into C, M, Y, K.
# Rotate each separated image by 0, 15, 30, and 45 degrees respectively.
# Take the half-tone of each image (dot size will be proportional to the intensity).
# Rotate back each half-toned image.
# Now you have your colour separated images. The rotation step reduces
# dot alignment issues (which would mess everything up), and things like Moire pattern
# effects will be reasonably minimized.
import numpy as np
from PIL import Image
from patterning_clustered_dot import intensity, patterning
def gcr(im, percentage):
# basic "Gray Component Replacement" function. Returns a CMYK image with
# percentage gray component removed from the CMY halftones and put in the
# K halftone, ie. for percentage=100, (41, 100, 255, 0) >> (0, 59, 214, 41)
cmyk_im = im.convert('CMYK')
if not percentage:
return cmyk_im
cmyk_im = cmyk_im.split()
cmyk = []
for i in range(4):
cmyk.append(cmyk_im[i].load())
for x in range(im.size[0]):
for y in range(im.size[1]):
gray = min(cmyk[0][x,y], cmyk[1][x,y], cmyk[2][x,y]) * percentage / 100
for i in range(3):
cmyk[i][x,y] = cmyk[i][x,y] - gray
cmyk[3][x,y] = gray
return Image.merge('CMYK', cmyk_im)
def color_halftoning_with_rotation(cmyk,increment_in_angle):
dots=[]
angle=0
for i in range(4):
channel = Image.fromarray(patterning(cmyk[i].rotate(angle,expand=1))).convert('L')
channel = channel.rotate(-angle,expand=1)
width_half, height_half = channel.size
xx = (width_half-cmyk[i].size[0]*3) / 2
yy = (height_half-cmyk[i].size[1]*3) / 2
channel = channel.crop((xx, yy, xx + cmyk[i].size[0]*3, yy + cmyk[i].size[1]*3))
dots.append(channel)
angle += increment_in_angle
return dots
def main():
fname = 'tree.jpg'
image = Image.open(fname)
image = gcr(image,100)
cmyk = image.split()
dots = color_halftoning_with_rotation(cmyk,15)
new_cmyk = Image.merge('CMYK',dots)
new_cmyk.save("output.jpg")
new_cmyk.show()
if __name__=="__main__":
main()
| mit | Python |
|
eced1499c4b82ce83f954a0364b02f2116a11326 | Add quick verification checker. | danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel | src/Scripts/verify.py | src/Scripts/verify.py | # Take a ground truth file produced by the verifier and a match file and compare them.
# Output is in fully normalized format, the same as VerifyCommand.cpp produces.
#
# TODO: remove hardcoded paths.
# file format:
# term,docId,[0-3]
# 0: true positive
# 1: false postive
# 2: false negative
# 3: unverified
from collections import defaultdict
import csv
true_matches = defaultdict(set)
with open("/tmp/groundTruth.csv") as f:
reader = csv.reader(f)
for row in reader:
if (len(row) == 3 and (row[2] == '0' or row[2] == '2')):
true_matches[row[0]].add(row[1])
with open("/tmp/unknowns.csv") as f:
reader = csv.reader(f)
for row in reader:
# TODO: assert that value is '3'
#
# TODO: handle false negatives. Could keep a counter of how many matches
# we've seen and compare, then iterate over the set in the rare instance
# we see a false negative.
if (len(row) == 3):
if row[1] in true_matches[row[0]]:
print(row[0] + "," + row[1] + ",0")
else:
print(row[0] + "," + row[1] + ",1")
| mit | Python |
|
d16d66e520c5f80870957c63694708118d6f9f69 | Add module for MOC (music on console) | ncoop/i3pystatus,yang-ling/i3pystatus,m45t3r/i3pystatus,fmarchenko/i3pystatus,drwahl/i3pystatus,richese/i3pystatus,schroeji/i3pystatus,Arvedui/i3pystatus,richese/i3pystatus,schroeji/i3pystatus,facetoe/i3pystatus,teto/i3pystatus,drwahl/i3pystatus,enkore/i3pystatus,m45t3r/i3pystatus,teto/i3pystatus,ncoop/i3pystatus,facetoe/i3pystatus,enkore/i3pystatus,fmarchenko/i3pystatus,Arvedui/i3pystatus,yang-ling/i3pystatus | i3pystatus/moc.py | i3pystatus/moc.py | import re
from i3pystatus import IntervalModule
from i3pystatus import formatp
from i3pystatus.core.command import run_through_shell
from i3pystatus.core.util import TimeWrapper
class Moc(IntervalModule):
"""
Display various information from MOC (musci on console)
.. rubric:: Available formatters
* `{status}` — current status icon (paused/playing/stopped)
* `{song_elapsed}` — song elapsed time (mm:ss format)
* `{song_length}` — total song duration (mm:ss format)
* `{artist}` — artist
* `{title}` — title
* `{album}` — album
* `{tracknumber}` — tracknumber
* `{file}` — file or url name
"""
settings = (
('format', 'formatp string'),
('format_not_running', 'Text to show if cmus is not running'),
('color', 'The color of the text'),
('color_not_running', 'The color of the text, when cmus is not running'),
('status', 'Dictionary mapping status to output'),
)
color = '#ffffff'
color_not_running = '#ffffff'
format = '{status} {song_elapsed}/{song_length} {artist} - {title}'
format_not_running = 'Not running'
interval = 1
status = {
'pause': '▷',
'play': '▶',
'stop': '◾',
}
on_leftclick = 'toggle_pause'
on_rightclick = 'next_song'
on_upscroll = 'next_song'
on_downscroll = 'previous_song'
def _moc_command(self, command):
cmdline = 'mocp --{command}'.format(command=command)
return run_through_shell(cmdline, enable_shell=True)
def _query_cmus(self):
response = {}
# Get raw information
cmd = self._moc_command('info')
# Now we make it useful
if not cmd.rc:
for line in cmd.out.splitlines():
key, _, value = line.partition(': ')
response[key] = value
return response
def run(self):
response = self._query_cmus()
if response:
fdict = {
'album': response.get('Album', ''),
'artist': response.get('Artist', ''),
'file': response.get('File', ''),
'song_elapsed': TimeWrapper(response.get('CurrentSec', 0)),
'song_length': TimeWrapper(response.get('TotalSec', 0)),
'status': self.status[response['State'].lower()],
'title': response.get('SongTitle', ''),
'tracknumber': re.match(r'(\d*).*', response.get('Title', '')).group(1) or 0,
}
self.output = {
'full_text': formatp(self.format, **fdict),
'color': self.color,
}
else:
self.output = {
'full_text': self.format_not_running,
'color': self.color_not_running,
}
def toggle_pause(self):
self._moc_command('toggle-pause')
def next_song(self):
self._moc_command('next')
def previous_song(self):
self._moc_command('previous')
| mit | Python |
|
b1ef133904540b7f49e22ac52a0f844963be829e | Add basic test for discovery loader | ojengwa/nose2,ezigman/nose2,little-dude/nose2,ojengwa/nose2,leth/nose2,ptthiem/nose2,ptthiem/nose2,little-dude/nose2,leth/nose2,ezigman/nose2 | nose2/tests/functional/test_discovery_loader.py | nose2/tests/functional/test_discovery_loader.py | from nose2.tests._common import FunctionalTestCase, support_file
from nose2 import events, loader, session
from nose2.plugins.loader.discovery import DiscoveryLoader
class Watcher(events.Plugin):
def __init__(self):
self.called = []
def loadTestsFromModule(self, event):
self.called.append(event)
class DiscoveryFunctionalTest(FunctionalTestCase):
def setUp(self):
self.session = session.Session()
self.plug = DiscoveryLoader(session=self.session)
self.loader = loader.PluggableTestLoader(self.session)
def test_createTests_hook(self):
self.plug.start_dir = support_file('scenario/tests_in_package')
watcher = Watcher(session=self.session)
watcher.register()
event = events.CreateTestsEvent(self.loader, None, None)
result = self.session.hooks.createTests(event)
assert isinstance(result, self.loader.suiteClass)
assert watcher.called
| bsd-2-clause | Python |
|
2de3ab69c0725312663ecd94378c5b267a6c5ab1 | Add graph_data.py with a graph_ratings function | jvanbrug/netflix,jvanbrug/netflix | graph_data.py | graph_data.py | """Graph properties and patterns of the raw data
.. moduleauthor:: Jan Van Bruggen <[email protected]>
"""
import matplotlib.pyplot as plt
def graph_ratings():
num_points = 1e5
ratings = rating_counts('data/mu/all.dta', num_points)
rating_numbers = sorted(ratings.keys())
x = [i - 0.4 for i in rating_numbers]
y = [ratings[i] for i in rating_numbers]
plt.bar(x, y, width=0.8)
plt.title('Number of Ratings by Rating ({:n} points)'.format(num_points))
plt.xlabel('Rating')
plt.xlim(-0.4, 5.4)
plt.ylabel('Number of Ratings')
plt.show()
def rating_counts(data_file_name, num_points=float('inf'), rating_column=3):
ratings = {}
count = 0
with open(data_file_name, 'r') as data_file:
for line in data_file:
count += 1
if count > num_points:
break
values = line.split()
rating = int(values[rating_column])
try:
ratings[rating] += 1
except KeyError:
ratings[rating] = 1
return ratings
def run():
graph_ratings()
if __name__ == '__main__':
run()
| mit | Python |
|
7a861623987225bd786301dfe6dea78173ddaf1a | Create generator.py | wolfdale/Hadoop-BDA,wolfdale/Hadoop-BDA | Testing_Hadoop/generator.py | Testing_Hadoop/generator.py | import time
start_time = time.time()
fo = open("hadoop_test_data.txt", "wb")
for i in range(0,9):
for i in range(0,10000000):
fo.write("Hadoop ");
fo.close()
print("--- %s seconds ---" % (time.time() - start_time))
| mit | Python |
|
963aa3fd9830d1a4817a26a2e8a5676174e30d19 | Add new migration | jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip | planner/migrations/0005_auto_20150711_1117.py | planner/migrations/0005_auto_20150711_1117.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('planner', '0004_auto_20150616_1926'),
]
operations = [
migrations.RenameField(
model_name='route',
old_name='destination',
new_name='end',
),
migrations.RenameField(
model_name='route',
old_name='origin',
new_name='start',
),
]
| apache-2.0 | Python |
|
ea9b6920c88ac40a72aadd70199a52f27a1c097e | Create RespostaListar.py | AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb | backend/Models/Predio/RespostaListar.py | backend/Models/Predio/RespostaListar.py | from Framework.Resposta import Resposta
from Models.Predio.Predio import Predio as ModelPredio
class RespostaListar(Resposta):
def __init__(self,predios):
self.corpo = []
for predio in predios:
self.corpo.append(ModelPredio(predio))
| mit | Python |
|
87804aef17874339e7b58df0c3bcb29338fa412a | add country regions include Minsk | opendataby/osm-geodata | belarus_region_borders_include_minsk.py | belarus_region_borders_include_minsk.py | from _helpers import cursor_wrap, dump
@cursor_wrap
def main(cursor):
sql = """
SELECT r.osm_id, c.name AS country, r.name AS region, ST_AsGeoJSON(r.way)
FROM osm_polygon c
LEFT JOIN osm_polygon r ON ST_Contains(c.way, r.way)
WHERE c.osm_id = -59065 AND r.admin_level = '4'
AND r.osm_id IN (-59189, -59506, -59161, -59275, -59162)
UNION
SELECT -59752, FIRST(c.name) AS country, FIRST(r.name) AS region, ST_AsGeoJSON(ST_Union(r.way))
FROM osm_polygon c
LEFT JOIN osm_polygon r ON ST_Contains(c.way, r.way)
WHERE c.osm_id = -59065 AND r.admin_level = '4'
AND r.osm_id IN (-59752, -59195)
"""
cursor.execute(sql)
dump(__file__, sorted(cursor.fetchall(), key=lambda item: item[1:3]),
('osmid', 'country', 'region', 'geojson'))
if __name__ == '__main__':
main()
| mit | Python |
|
b5bc7827fb2452e82789129b918861157010c58e | Create pokebot.py | LukeBob/teamspeak-tools | pokebot.py | pokebot.py | #!/usr/bin/python3
#
# Author: Luke
import time, ts3, sys, traceback
USER = 'serveradmin' # Query user
PASS = '' # Query Password
HOST = 'localhost' # Query Server-host
PORT = '10011' # Query Server-Port
SID = 1 # Serveradmin sid (dont touch)
def usage():
print ('\n./Poke-bot.py <Name> <how many times> <Message>\n')
sys.exit(0)
def Poke(ts3conn,target,timesMany,msg):
try:
clientlist = ts3conn.clientlist()
clientlist = [client for client in clientlist \
if client["client_type"] != "1"]
for client in clientlist:
clid = client['clid']
nickname = client['client_nickname']
if str(nickname) == str(target):
print (' \nFound target',target,'\n\nPoking now!...\n')
for i in range(int(timesMany)):
time.sleep(0.5)
ts3conn.clientpoke(clid=clid, msg=msg)
sys.exit(0)
sys.exit(0)
except KeyboardInterrupt:
print (' \nExiting...\n')
except Exception:
traceback.print_exc(file=sys.stdout)
sys.exit(0)
def main(target,timesMany,message):
with ts3.query.TS3Connection(HOST,PORT) as ts3conn:
try:
ts3conn.login(client_login_name=USER, client_login_password=PASS)
ts3conn.use(sid=SID)
Poke(ts3conn,target,timesMany,message)
except ts3.query.TS3QueryError as err:
if err.resp.error["id"] == "520":
print ('\nWrong Username Or Password!\n')
sys.exit(0)
if __name__ == '__main__':
try:
if len(sys.argv) != 4:
usage()
int(sys.argv[2])
main(sys.argv[1],sys.argv[2],sys.argv[3])
except ValueError:
print ('\nSecond Arg \''+sys.argv[2]+'\' Must Be Integer Value!\n')
usage()
| mit | Python |
|
4f87a0e144bf738e523cd1f8d914f39090275fee | add review status to individuals | ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/seqr,ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,ssadedin/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/xbrowse,ssadedin/seqr,ssadedin/seqr,macarthur-lab/seqr | xbrowse_server/base/migrations/0008_individual_review_status.py | xbrowse_server/base/migrations/0008_individual_review_status.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-10-05 09:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0007_auto_20160826_1327'),
]
operations = [
migrations.AddField(
model_name='individual',
name='review_status',
field=models.CharField(blank=True, choices=[(b'A', b'Accepted'), (b'E', b'Accepted - Exome'), (b'G', b'Accepted - Genome'), (b'R', b'Not Accepted'), (b'N', b'See Notes'), (b'H', b'Hold')], default=b'', max_length=1, null=True),
),
]
| agpl-3.0 | Python |
|
34bc4b9e5731c94ae4655deb338d67aa3f9a1f63 | Create project.py | eddie-jeon/empty-app | project.py | project.py | from ggame import App, RectangleAsset, ImageAsset, SoundAsset, Sprite, Sound
from ggame import LineStyle, Color
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
green = Color(0x00ff00, 1)
black = Color(0, 1)
noline = LineStyle(0, black)
bg_asset = RectangleAsset(SCREEN_WIDTH, SCREEN_HEIGHT, noline, green)
bg = Sprite(bg_asset, (0,0))
# Sounds
pew1_asset = SoundAsset("sounds/pew1.mp3")
pew1 = Sound(pew1_asset)
pop_asset = SoundAsset("sounds/reappear.mp3")
pop = Sound(pop_asset)
# A ball! This is already in the ggame-tutorials repository
ball_asset = ImageAsset("images/orb-150545_640.png")
ball = Sprite(ball_asset, (0, 0))
# Original image is too big. Scale it to 1/10 its original size
ball.scale = 0.1
ball.y = 200
# custom attributes
ball.dir = 1
ball.go = True
# Sounds
pew1_asset = SoundAsset("sounds/pew1.mp3")
pew1 = Sound(pew1_asset)
pop_asset = SoundAsset("sounds/reappear.mp3")
pop = Sound(pop_asset)
def reverse(b):
b.dir *= -1
pop.play()
# Set up function for handling screen refresh
def step():
if ball.go:
ball.x += ball.dir
if ball.x + ball.width > SCREEN_WIDTH or ball.x < 0:
ball.x -= ball.dir
reverse(ball)
# Handle the space key
def spaceKey(event):
ball.go = not ball.go
# Handle the "reverse" key
def reverseKey(event):
reverse(ball)
# Handle the mouse click
def mouseClick(event):
ball.x = event.x
ball.y = event.y
pew1.play()
myapp = App(SCREEN_WIDTH, SCREEN_HEIGHT)
# Set up event handlers for the app
myapp.listenKeyEvent('keydown', 'space', spaceKey)
myapp.listenKeyEvent('keydown', 'r', reverseKey)
myapp.listenMouseEvent('click', mouseClick)
myapp.run(step)
| mit | Python |
|
080df88609ac25eff0b4379e31acb63654d3314c | Create randfor.py | FelBalazard/Prediction-with-Haplotypes,FelBalazard/Prediction-with-Haplotypes,FelBalazard/Prediction-with-Haplotypes | randfor.py | randfor.py | #!/usr/bin/env python
#This script performs randomforests on the blocks for the three variation of the method.
import sys
blocn=sys.argv[1]
min_samples_leaf=int(sys.argv[2])
import math
#The function evi for evidence is meant to make the result homogeneous to
#logistic regression. The if loop avoids having any infinity in the output as
#evidence is not defined for 0 or 1.
def evi(x):
if x<0.001:
return -7
elif x>0.999:
return 7
else:
return math.log(x/(1-x))
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
dischaps=pd.read_csv("discovery.phased."+str(blocn)+".haps",sep=" ",header=None)
validhaps=pd.read_csv("validation.phased."+str(blocn)+".haps",sep=" ",header=None)
dischaps=dischaps.ix[:,5:]
validhaps=validhaps.ix[:,5:]
dischaps=dischaps.T
validhaps=validhaps.T
dischaps=np.array(dischaps)
validhaps=np.array(validhaps)
label=pd.read_csv("discovery.double.label",header=None)
label=np.array(label)
#c refers to PH. Each haplotype is treated as an observation and then the evidence is combined
#to create a new variable.
rfc = RandomForestClassifier(n_estimators=500, max_features="auto",min_samples_leaf=min_samples_leaf,oob_score=True)
rfc.fit(dischaps,np.ravel(label))
predc=rfc.oob_decision_function_
predc=predc[:,1]
predc=map(evi,predc)
predc=np.array([predc[i] for i in range(0,len(predc),2)]) +np.array([predc[i] for i in range(1,len(predc),2)])
predc=pd.DataFrame(predc)
predc.to_csv("c.disc.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
validc=rfc.predict_proba(validhaps)
validc=validc[:,1]
validc=map(evi,validc)
validc=np.array([validc[i] for i in range(0,len(validc),2)])+np.array([validc[i] for i in range(1,len(validc),2)])
validc=pd.DataFrame(validc)
validc.to_csv("c.valid.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
#To see the interest of using haplotypes, we take this information out and see what happens.
disc=np.array([dischaps[i,:] for i in range(0,len(dischaps),2)])+np.array([dischaps[i,:] for i in range(1,len(dischaps),2)])
valid=np.array([validhaps[i,:] for i in range(0,len(validhaps),2)])+np.array([validhaps[i,:] for i in range(1,len(validhaps),2)])
labelsimple=[label[i] for i in range(0,len(label),2)]
#b refers to PwoH
rfc = RandomForestClassifier(n_estimators=500, max_features="auto", min_samples_leaf=min_samples_leaf,oob_score=True)
rfc.fit(disc,np.ravel(labelsimple))
predb=rfc.oob_decision_function_
predb=predb[:,1]
predb=map(evi,predb)
predb=pd.DataFrame(predb)
predb.to_csv("b.disc.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
validb=rfc.predict_proba(valid)
validb=validb[:,1]
validb=map(evi,validb)
validb=pd.DataFrame(validb)
validb.to_csv("b.valid.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
#This is to try and capture dominace effect. We concatenate the two haplotypes twice (in the two possible orders)
#and we take the mean of the prediction. d refers to PHd
swch=[1,-1]*len(disc)
swch=np.array(range(len(dischaps)))+swch
discd=np.concatenate((dischaps,dischaps[swch,:]),axis=1)
swch=[1,-1]*len(valid)
swch=np.array(range(len(validhaps)))+swch
validd=np.concatenate((validhaps,validhaps[swch,:]),axis=1)
rfc = RandomForestClassifier(n_estimators=500, max_features="auto", min_samples_leaf=min_samples_leaf,oob_score=True)
rfc.fit(dischaps,np.ravel(label))
predd=rfc.oob_decision_function_
predd=predd[:,1]
predd=map(evi,predd)
predd=np.array([predd[i] for i in range(0,len(predd),2)]) +np.array([predd[i] for i in range(1,len(predd),2)])
predd=pd.DataFrame(predd)
predd.to_csv("d.disc.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
validd=rfc.predict_proba(validhaps)
validd=validd[:,1]
validd=map(evi,validd)
validd=np.array([validd[i] for i in range(0,len(validd),2)])+np.array([validd[i] for i in range(1,len(validd),2)])
validd=pd.DataFrame(validd)
validd.to_csv("d.valid.bloc"+str(blocn),na_rep='NA',sep=" ",line_terminator=" ",header=False,index=False)
| mit | Python |
|
681cc0a4160373fe82de59946b52e0e21611af84 | Print out all links on a page | NilanjanaLodh/PyScripts,NilanjanaLodh/PyScripts | linkLister.py | linkLister.py | import requests
import re
url = raw_input("Enter URL with http or https prefix : " )
print url
website= requests.get(url)
html = website.text
print html
linklist = re.findall('"((http|ftp)s?://.*?)"',html)
print linklist
for link in linklist:
print link[0]
| mit | Python |
|
51f8b228ff1096769a06b47d026e81a166503a82 | add missing unit tests for previous commit | Bismarrck/pymatgen,ctoher/pymatgen,rousseab/pymatgen,ctoher/pymatgen,ctoher/pymatgen,yanikou19/pymatgen,sonium0/pymatgen,Bismarrck/pymatgen,rousseab/pymatgen,rousseab/pymatgen,migueldiascosta/pymatgen,Dioptas/pymatgen,migueldiascosta/pymatgen,sonium0/pymatgen,Bismarrck/pymatgen,yanikou19/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,migueldiascosta/pymatgen,Dioptas/pymatgen,yanikou19/pymatgen,sonium0/pymatgen | pymatgen/util/tests/test_decorators.py | pymatgen/util/tests/test_decorators.py | import unittest
from pymatgen.util.decorators import lru_cache
class TestLRUCache(unittest.TestCase):
def test_function(self):
@lru_cache(2)
def cached_func(a, b):
return a + b
#call a few times to get some stats
self.assertEqual(cached_func(1, 2), 3)
self.assertEqual(cached_func(3, 2), 5)
self.assertEqual(cached_func(3, 2), 5)
self.assertEqual(cached_func(1, 2), 3)
self.assertEqual(cached_func(4, 2), 6)
self.assertEqual(cached_func(4, 2), 6)
self.assertEqual(cached_func(3, 2), 5)
self.assertEqual(cached_func(1, 2), 3)
self.assertEqual(cached_func.cache_info().hits, 3)
self.assertEqual(cached_func.cache_info().misses, 5)
def test_class_method(self):
class TestClass():
@lru_cache(10)
def cached_func(self, x):
return x
a = TestClass()
b = TestClass()
self.assertEqual(a.cached_func(1), 1)
self.assertEqual(b.cached_func(2), 2)
self.assertEqual(b.cached_func(3), 3)
self.assertEqual(a.cached_func(3), 3)
self.assertEqual(a.cached_func(1), 1)
self.assertEqual(a.cached_func.cache_info().hits, 1)
self.assertEqual(a.cached_func.cache_info().misses, 4)
| mit | Python |
|
3554160654a1cb8e7000ebeea06aecdabc91af8e | Create JustPremium.py | vuolter/pyload,vuolter/pyload,vuolter/pyload | module/plugins/hooks/JustPremium.py | module/plugins/hooks/JustPremium.py | # -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: mazleu
"""
from module.plugins.Hook import Hook
from module.plugins.Account import Account
from module.plugins.Hoster import Hoster
class JustPremium(Hook):
__name__ = "JustPremium"
__version__ = "0.15"
__description__ = "If you add multiple links with at least one premium hoster link, all non premium links get removed"
__config__ = [("activated", "bool", "Activated", "False"),
("freehosters","bool", "Allow all freehosters and other unknown sites", "false"),
("nicehoster", "str", "unblock this hosters (comma seperated)", "Zippyshare.com")]
__author_name__ = ("mazleu")
__author_mail__ = ("[email protected]")
event_list = ["linksAdded"]
def coreReady(self) :
accs=str(self.core.accountManager.getAccountInfos())
global badhosts
global hosts
hosts = ""
while "[{" in accs:
startid=accs.rfind("[], ", 0, accs.find("[{"))+2
endid=accs.find("}]",startid)+2
hosts=hosts+","+accs[startid+3:accs.find("'",startid+3)]
accs=accs[0:startid]+accs[endid:]
badhosts=accs.replace("': [], '",",")[2:-6]
hosts=hosts[1:]
hosts=hosts+","+self.getConfig("nicehoster")
self.logDebug("good hosts:",hosts)
self.logDebug("bad hosts:",badhosts)
def filterLinks(self, t):
links = self.core.api.checkURLs(t)
hosterlist =""
bhosters = [x.strip() for x in badhosts.split(",")]
ghosters = [x.strip() for x in hosts.split(",")]
premhoster = False
for hoster in links:
self.logDebug(hoster)
if hoster in ghosters:
premhoster = True
if premhoster :
for hoster in links:
if self.getConfig("freehosters"):
if hoster in bhosters:
for link in links[hoster]:
t.remove(link)
self.logDebug("removed link '%s'because hoster was: '%s' " % (link,hoster))
else:
if not hoster in ghosters:
for link in links[hoster]:
t.remove(link)
self.logDebug("removed link '%s'because hoster was: '%s' " % (link,hoster))
def linksAdded(self, links, pid):
self.filterLinks(links)
| agpl-3.0 | Python |
|
61a6f6468462ed5db6c8e6c55bf29f0c503ff899 | add solution for H-Index | zhyu/leetcode,zhyu/leetcode | algorithms/hIndex/hIndex.py | algorithms/hIndex/hIndex.py | class Solution(object):
def hIndex(self, citations):
"""
:type citations: List[int]
:rtype: int
"""
n = len(citations)
c = collections.Counter([min(x, n) for x in citations])
s = reduce(lambda a, x: a + [a[-1] + c[x]], reversed(range(n)), [c[n]])
return next((n-i for i, v in enumerate(s) if v >= n-i), 0)
| mit | Python |
|
c2f0f5184665250949c32d16db0b521c357e3aa7 | Add solution to linkedListCycle problem. | TheGhostHuCodes/leetCode | python/src/linkedListCycle/linkedListCycle.py | python/src/linkedListCycle/linkedListCycle.py | # Given a linked list, determine if it has a cycle in it.
# Follow up:
# Can you solve it without using extra space?
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param head, a ListNode
# @return a boolean
def hasCycle0(self, head):
"""Solving the problem with a visited array: O(n^2) run time and
O(n) memory."""
visited = []
while head is not None:
if head in visited:
return True
visited.append(head)
head = head.next
return False
def hasCycle1(self, head):
"""Solving the problem iteratively with the tortise and the hare
pointers: O(n) run time and O(1) memory."""
if head is None or head.next is None:
return False
tortise = head.next
hare = head.next.next
while hare is not None and hare.next is not None:
if tortise == hare:
return True
else:
tortise = tortise.next
hare = hare.next.next
return False
def hasCycle(self, head):
"""Solving the problem recursively with the tortise and the hare
pointers: O(n) run time and O(1) memory."""
if head is None or head.next is None:
return False
else:
return self.hasCycleRecurse(head.next, head.next.next)
def hasCycleRecurse(self, tortise, hare):
"""Used in above recursive solution."""
if hare is None or hare.next is None:
return False
elif tortise == hare:
return True
else:
return self.hasCycleRecurse(tortise.next, hare.next.next)
| mit | Python |
|
2ab5d0bfdfe90279f3fffeeb51882cdbcb4e9135 | test genesis tests | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/unit/modules/genesis_test.py | tests/unit/modules/genesis_test.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Rupesh Tare <[email protected]>`
'''
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
from salt.modules import genesis
# Globals
genesis.__grains__ = {}
genesis.__salt__ = {}
genesis.__context__ = {}
genesis.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class GenesisTestCase(TestCase):
'''
Test cases for salt.modules.genesis
'''
def test_bootstrap(self):
'''
Test for Create an image for a specific platform.
'''
mock = MagicMock(return_value=False)
with patch.dict(genesis.__salt__, {'file.directory_exists': mock}):
mock = MagicMock(side_effect=Exception('foo'))
with patch.dict(genesis.__salt__, {'file.mkdir': mock}):
self.assertEqual(genesis.bootstrap('platform', 'root'),
{'Error': "Exception('foo',)"})
with patch.object(genesis, '_bootstrap_yum', return_value='A'):
self.assertEqual(genesis.bootstrap('rpm', 'root', 'dir1'), 'A')
with patch.object(genesis, '_bootstrap_deb', return_value='A'):
self.assertEqual(genesis.bootstrap('deb', 'root', 'dir1'), 'A')
with patch.object(genesis, '_bootstrap_pacman', return_value='A'):
self.assertEqual(genesis.bootstrap('pacman', 'root', 'dir1'), 'A')
def test_avail_platforms(self):
'''
Test for Return which platforms are available
'''
self.assertFalse(genesis.avail_platforms()['deb'])
def test_pack(self):
'''
Test for Pack up a directory structure, into a specific format
'''
with patch.object(genesis, '_tar', return_value='tar'):
self.assertEqual(genesis.pack('name', 'root'), None)
def test_unpack(self):
'''
Test for Unpack an image into a directory structure
'''
with patch.object(genesis, '_untar', return_value='untar'):
self.assertEqual(genesis.unpack('name', 'root'), None)
if __name__ == '__main__':
from integration import run_tests
run_tests(GenesisTestCase, needs_daemon=False)
| apache-2.0 | Python |
|
4ec7abe5df2bdd4a68528fc9af14288b57fd72cc | add integration utest on Session | anybox/anybox.recipe.odoo | tests_with_openerp/test_session.py | tests_with_openerp/test_session.py | from unittest import TestCase
from anybox.recipe.odoo.runtime.session import Session
from openerp.tests.common import get_db_name
class SessionTestCase(TestCase):
def setUp(self):
super(SessionTestCase, self).setUp()
self.session = Session(None, None, parse_config=False)
def open_session(self):
self.session.open(db=get_db_name())
def test_env_after_install_module(self):
self.open_session()
self.assertAdminPresentWithV8API()
self.session.install_modules(['decimal_precision'])
self.assertAdminPresentWithV8API()
def assertAdminPresentWithV8API(self):
self.assertEqual(
u"Administrator",
self.session.env['res.users'].search([('login', '=', 'admin')]).name
)
| agpl-3.0 | Python |
|
da1bda146b4762bc572cb28da30cfb09b1d083aa | add hikvision (#243) | balloob/netdisco | netdisco/discoverables/hikvision.py | netdisco/discoverables/hikvision.py | """Discover Hikvision cameras."""
from . import MDNSDiscoverable
class Discoverable(MDNSDiscoverable):
"""Add support for discovering Hikvision cameras."""
def __init__(self, nd):
"""Initialize Hikvision camera discovery."""
super(Discoverable, self).__init__(nd, '_http._tcp.local.')
def get_entries(self):
return self.find_by_device_name('HIKVISION')
| mit | Python |
|
f7b2b511bd6cca122782b39c9eb75ed4a4736717 | add benchmark | daxlab/Play-Store-API,daxlab/Play-Store-API | test/benchmark.py | test/benchmark.py | import urllib2
import json
url = "http://localhost:3000/api?package=com.whatsapp"
for i in range(5):
print 'Downloading '+ str(i)
res = urllib2.urlopen(url).read()
file = "data-"+str(i)+".json"
with open(file, 'w') as outfile:
json.dump(res, outfile)
| mit | Python |
|
27622185e04bb652284597783287262e23bafa7d | Add minimal test case (failing) | evernym/zeno,evernym/plenum | plenum/test/node_request/test_apply_stashed_partially_ordered.py | plenum/test/node_request/test_apply_stashed_partially_ordered.py | import pytest
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.startable import Mode
from plenum.common.txn_util import reqToTxn
from plenum.test.delayers import cDelay
from plenum.test.helper import sdk_get_and_check_replies, sdk_send_random_requests, logger
from plenum.test.node_catchup.helper import ensure_all_nodes_have_same_data
from plenum.test.stasher import delay_rules
from plenum.test.test_node import getNonPrimaryReplicas
from stp_core.loop.eventually import eventually
TOTAL_REQUESTS = 10
@pytest.fixture(scope="module")
def tconf(tconf):
old_max_batch_wait = tconf.Max3PCBatchWait
old_max_batch_size = tconf.Max3PCBatchSize
tconf.Max3PCBatchWait = 1000
tconf.Max3PCBatchSize = TOTAL_REQUESTS
yield tconf
tconf.Max3PCBatchWait = old_max_batch_wait
tconf.Max3PCBatchSize = old_max_batch_size
def test_apply_stashed_partially_ordered(looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client):
test_node = getNonPrimaryReplicas(txnPoolNodeSet)[0].node
test_stasher = test_node.nodeIbStasher
ledger_size = max(node.domainLedger.size for node in txnPoolNodeSet)
def check_pool_ordered_some_requests():
assert max(node.domainLedger.size for node in txnPoolNodeSet) > ledger_size
def check_test_node_has_stashed_ordered_requests():
assert len(test_node.stashedOrderedReqs) > 0
with delay_rules(test_stasher, cDelay()):
reqs = sdk_send_random_requests(looper, sdk_pool_handle, sdk_wallet_client, TOTAL_REQUESTS)
looper.run(eventually(check_pool_ordered_some_requests))
test_node.mode = Mode.syncing
looper.run(eventually(check_test_node_has_stashed_ordered_requests))
req_idr = test_node.stashedOrderedReqs[0].reqIdr
req_idr = req_idr[:len(req_idr) // 2]
assert len(req_idr) > 1
ledger_info = test_node.ledgerManager.getLedgerInfoByType(DOMAIN_LEDGER_ID)
for id in req_idr:
txn = reqToTxn(test_node.requests[id].finalised)
ledger_info.ledger.add(txn)
ledger_info.postTxnAddedToLedgerClbk(DOMAIN_LEDGER_ID, txn)
test_node.mode = Mode.participating
test_node.processStashedOrderedReqs()
ensure_all_nodes_have_same_data(looper, txnPoolNodeSet)
sdk_get_and_check_replies(looper, reqs)
| apache-2.0 | Python |
|
95da7f3b6c03d3d8e711aea4195017a17cb63d5f | Add another version of write libsvm data format. | Wayne82/libsvm-practice,Wayne82/libsvm-practice,Wayne82/libsvm-practice | scripts/python/write_libsvm_data_format_v2.py | scripts/python/write_libsvm_data_format_v2.py | """
A script to write out lib svm expected data format from my collecting data
"""
import os
import sys
import csv
import json
import getopt
import subprocess
CMD_USAGE = """
usage: write_libsvm_data_format.py --inputs="/inputs/folder/" --output="/output/lib_svm_data" <options>
<options>:
-f, --features specify the feature space size, default is 10
-v, --verify the tool to verify output data no format error
"""
# by default feature space to be 10
FEATURE_SPACE = 10
LIMIT_SAMPLE_SIZE = 1000
def write_libsvm_data(input_files, output_file):
"""
:param input_files:
:param output_file:
"""
with open(output_file, 'wb') as output_csv_file:
output_writer = csv.writer(output_csv_file, delimiter=' ')
for input_file in input_files:
print "Write file: ", input_file
with open(input_file, 'rb') as signal_file:
json_data = json.load(signal_file)
beacons = json_data['interestedBeacons']
for beacon in beacons:
rssis = beacon['rssis']
length = len(rssis)
if length < FEATURE_SPACE:
continue
rows = 0
ending = FEATURE_SPACE
while ending <= length:
line = [beacon['realDistance']]
for idx, val in enumerate(rssis[(ending - FEATURE_SPACE):ending]):
if val != 0:
line.append(':'.join([str(idx + 1), str(val)]))
output_writer.writerow(line)
ending += FEATURE_SPACE
rows += 1
if rows >= LIMIT_SAMPLE_SIZE:
break
def check_data(check_tool, data_file):
"""
:param data_file: the input lib svm format data, to be verified.
"""
check_py = check_tool
if not os.path.exists(check_py):
print("checkdata.py not exist.")
return
subprocess.call([os.path.abspath(check_py), data_file], shell=True)
def main(argv):
"""
:param argv: command line arguments
:rtype : error code, success 0 and fail 1
"""
try:
optlist, _ = getopt.getopt(argv[1:], "hi:o:f:v:",
["help", "inputs=", "output=", "features=", "verify"])
except getopt.GetoptError:
print("Command line arguments error, please try --help for help")
return 1
inputs = ''
output_file = ''
check_tool = ''
for opt, opt_arg in optlist:
if opt in ("-h", "--help"):
print CMD_USAGE
return 0
if opt in ("-i", "--inputs"):
inputs = opt_arg
if not os.path.exists(inputs):
print("Input files folder not exist")
return 1
elif opt in ("-o", "--output"):
output_file = opt_arg
elif opt in ("-f", "--features"):
global FEATURE_SPACE
FEATURE_SPACE = int(opt_arg)
elif opt in ("-v", "--verify"):
check_tool = opt_arg
# print the messages
print("Inputs folder: " + inputs)
print("Output file: " + output_file)
print("Feature space size: " + str(FEATURE_SPACE))
print("Check tool: " + check_tool)
assert isinstance(output_file, str)
assert isinstance(inputs, str)
input_files = []
for root, _, files in os.walk(inputs):
for name in files:
if name.endswith('.raw_filter'):
input_files.append(os.path.abspath(os.path.join(root, name)))
if len(input_files) == 0:
print("No input files.")
return 1
write_libsvm_data(input_files, output_file)
check_data(check_tool, output_file)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv)) | bsd-3-clause | Python |
|
cc7eb329a7d132947861ca1f2d4713cba1e4274a | Add tests! | mplewis/iverilog-netlist-processing | test_processor.py | test_processor.py | from ivl_enums import IvlElabType, IvlPortType, IvlDataDirection
from parsers import parse_modules_and_elabs
from utils import IvlNetManager
import pytest
import sure # noqa
@pytest.yield_fixture
def read_netlist():
# Read a netlist and parse it into modules and elabs.
# Create a new net manager.
with open('test.netlist') as f:
test_netlist = f.read()
net_manager = IvlNetManager()
modules, elabs = parse_modules_and_elabs(test_netlist, net_manager)
yield (modules, elabs, net_manager)
def test_counts(read_netlist):
# Make sure the right number of things are produced
modules, elabs, net_manager = read_netlist
len(modules).should.be.equal(6)
len(elabs).should.be.equal(27)
def test_types(read_netlist):
modules, elabs, net_manager = read_netlist
# Make sure the right types appear
len([m for m in modules if m.xtype == 'tff']).should.be.equal(3)
net_part_selects = [e for e in elabs if
e.xtype is IvlElabType.net_part_select]
len(net_part_selects).should.be.equal(18)
posedges = [e for e in elabs if e.xtype is IvlElabType.posedge]
len(posedges).should.be.equal(3)
logics = [e for e in elabs if e.xtype is IvlElabType.logic]
len(logics).should.be.equal(6)
def test_ports(read_netlist):
# Make sure ports are generated properly
modules, elabs, net_manager = read_netlist
tb = [m for m in modules if m.xtype == 'bargraph_testbench'][0]
len(tb.ports).should.be.equal(3)
regs = [p for p in tb.ports if p.xtype is IvlPortType.reg]
len(regs).should.be.equal(1)
wires = [p for p in tb.ports if p.xtype is IvlPortType.wire]
len(wires).should.be.equal(2)
def test_local_ports(read_netlist):
# Check for generation of local wire-type ports
modules, elabs, net_manager = read_netlist
bg = [m for m in modules if m.xtype == 'bargraph3'][0]
local_ports = [p for p in bg.ports if p.is_local]
len(local_ports).should.be.equal(15)
def test_port_types(read_netlist):
# Check for proper port typing
modules, elabs, net_manager = read_netlist
tff = [m for m in modules if m.xtype == 'tff'][0]
inputs = [p for p in tff.ports if
p.direction is IvlDataDirection.input]
len(inputs).should.be.equal(2)
outputs = [p for p in tff.ports if
p.direction is IvlDataDirection.output]
len(outputs).should.be.equal(2)
def test_nets(read_netlist):
# Check for proper net generation
modules, elabs, net_manager = read_netlist
to_bg = net_manager.get_net('0x7fbd08d0a950')
len(to_bg.members).should.be.equal(3)
| mit | Python |
|
823d10795b22b751647e79e77eecd381cf7a809d | create test file | mkolodny/3taps | test_threetaps.py | test_threetaps.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for threetaps."""
import unittest
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
f9fd2e3dcc4c25fd7561f8898e3845992553a8a8 | add wrapper script to launch tests | paradoxon82/qdjango,ericLemanissier/qdjango,ericLemanissier/qdjango,paradoxon82/qdjango,ericLemanissier/qdjango,jlaine/qdjango,jlaine/qdjango,paradoxon82/qdjango,paradoxon82/qdjango,jlaine/qdjango,jlaine/qdjango,ericLemanissier/qdjango,ericLemanissier/qdjango,jlaine/qdjango,paradoxon82/qdjango | tests/run.py | tests/run.py | #!/usr/bin/python
import os
root = os.path.join(os.path.dirname(__file__), '..')
prog = os.path.join(os.path.dirname(__file__), 'qdjango-tests')
path = []
for component in ['db', 'http', 'script']:
path.append(os.path.join(root, 'src', component))
os.system("LD_LIBRARY_PATH=%s %s" % (':'.join(path), prog))
| lgpl-2.1 | Python |
|
e8a6c0adc3aa77f8e0b1399fe076b43720acb823 | Test the API can run | antoinearnoud/openfisca-france,antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france | tests/test_api.py | tests/test_api.py | # -*- coding: utf-8 -*-
import subprocess
import requests
from unittest import TestCase
from nose.tools import assert_equal
class Test(TestCase):
def setUp(self):
self.process = subprocess.Popen("openfisca-serve")
def tearDown(self):
self.process.terminate()
def test_response(self):
assert_equal(
requests.get("http://localhost:2000").status_code,
200
)
| agpl-3.0 | Python |
|
690c08b2b35df2d81dc0977d8bd593c45806e1c2 | Add dumb log view test cases | bosondata/badwolf,bosondata/badwolf,bosondata/badwolf | tests/test_log.py | tests/test_log.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import url_for
def test_view_build_log(test_client):
test_client.get(url_for('log.build_log', sha='123456'))
def test_view_lint_log(test_client):
test_client.get(url_for('log.lint_log', sha='123456'))
| mit | Python |
|
4d500d9abe2da28cdd9bd95019048de445aac265 | Add a history demo in documentation. | tantale/deprecated | docs/source/tutorial/v5/history_demo.py | docs/source/tutorial/v5/history_demo.py | # coding: utf-8
from deprecated.history import deprecated
from deprecated.history import versionadded
from deprecated.history import versionchanged
@deprecated(
reason="""
This is deprecated, really. So you need to use another function.
But I don\'t know which one.
- The first,
- The second.
Just guess!
""",
version='0.3.0')
@versionchanged(
reason='Well, I add a new feature in this function. '
'It is very useful as you can see in the example below, so try it. '
'This is a very very very very very long sentence.',
version='0.2.0')
@versionadded(
reason='Here is my new function.',
version='0.1.0')
def successor(n):
"""
Calculate the successor of a number.
:param n: a number
:return: number + 1
"""
return n + 1
help(successor)
| mit | Python |
|
361333f8b214097469389d0219f339fc59ea469b | Add permissions.py | migonzalvar/teamroulette,mfernandezmsistemas/phyton1 | teams/permisssions.py | teams/permisssions.py | from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated()
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
| mit | Python |
|
20ac8a830ef59abc51afe13ac102521767d47c22 | test uffd bad socket path scenarios | firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker | tests/integration_tests/functional/test_uffd.py | tests/integration_tests/functional/test_uffd.py | # Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Test UFFD related functionality when resuming from snapshot."""
import logging
import os
from framework.artifacts import SnapshotMemBackendType
from framework.builder import MicrovmBuilder, SnapshotBuilder
from framework.utils import run_cmd
import host_tools.network as net_tools
def create_snapshot(bin_cloner_path):
"""Create a snapshot of a microVM."""
vm_builder = MicrovmBuilder(bin_cloner_path)
vm_instance = vm_builder.build_vm_nano()
basevm = vm_instance.vm
root_disk = vm_instance.disks[0]
ssh_key = vm_instance.ssh_key
basevm.start()
ssh_connection = net_tools.SSHConnection(basevm.ssh_config)
# Verify if guest can run commands.
exit_code, _, _ = ssh_connection.execute_command("sync")
assert exit_code == 0
# Create a snapshot builder from a microvm.
snapshot_builder = SnapshotBuilder(basevm)
# Create base snapshot.
snapshot = snapshot_builder.create([root_disk.local_path()],
ssh_key)
basevm.kill()
return snapshot
def test_bad_socket_path(bin_cloner_path, test_microvm_with_api):
"""
Test error scenario when socket path does not exist.
@type: negative
"""
logger = logging.getLogger("uffd_bad_socket_path")
logger.info("Create snapshot")
snapshot = create_snapshot(bin_cloner_path)
logger.info("Load snapshot, mem %s", snapshot.mem)
vm = test_microvm_with_api
vm.spawn()
jailed_vmstate = vm.create_jailed_resource(snapshot.vmstate)
response = vm.snapshot.load(
mem_backend={
'type': SnapshotMemBackendType.UFFD,
'path': 'inexsistent'
},
snapshot_path=jailed_vmstate
)
assert vm.api_session.is_status_bad_request(response.status_code)
assert "Load microVM snapshot error: Cannot connect to UDS in order to " \
"send information on handling guest memory page-faults due to: " \
"No such file or directory (os error 2)" in response.text
def test_unbinded_socket(bin_cloner_path, test_microvm_with_api):
"""
Test error scenario when PF handler has not yet called bind on socket.
@type: negative
"""
logger = logging.getLogger("uffd_unbinded_socket")
logger.info("Create snapshot")
snapshot = create_snapshot(bin_cloner_path)
logger.info("Load snapshot, mem %s", snapshot.mem)
vm = test_microvm_with_api
vm.spawn()
jailed_vmstate = vm.create_jailed_resource(snapshot.vmstate)
socket_path = os.path.join(vm.path, "firecracker-uffd.sock")
run_cmd("touch {}".format(socket_path))
jailed_sock_path = vm.create_jailed_resource(socket_path)
response = vm.snapshot.load(
mem_backend={
'type': SnapshotMemBackendType.UFFD,
'path': jailed_sock_path
},
snapshot_path=jailed_vmstate
)
assert vm.api_session.is_status_bad_request(response.status_code)
assert "Load microVM snapshot error: Cannot connect to UDS in order to" \
" send information on handling guest memory page-faults due to: " \
"Connection refused (os error 111)" in response.text
| apache-2.0 | Python |
|
7cb839279bc62b95eb7367814ef71c046d4b2184 | Add 'examples' module which contains some examplary function examples. | mansenfranzen/tssim | tssim/examples.py | tssim/examples.py | """This module contains example time functions"""
import numpy as np
def rand_lin_noise():
beta = np.random.normal()
return lambda x: beta * x + np.random.random(size=len(x))
def const_lin_noise(x):
beta = np.random.normal()
return beta * x + np.random.random(size=len(x))
def random_walk(x):
return np.cumsum(np.random.normal(size=x.shape[0]))
def random_walk_limit(limit=2):
vals = {"current": 0}
def walk(value):
new_val = np.random.normal()
if vals["current"] >= limit:
new_val = -abs(new_val)
elif vals["current"] <= -limit:
new_val = abs(new_val)
vals["current"] += new_val
return vals["current"]
return walk
| mit | Python |
|
c156ad1379d842924b928c6c80f668f9875e840a | Remove page-filter flag. (which is now user-filter) | TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1 | tools/telemetry/telemetry/story/story_filter.py | tools/telemetry/telemetry/story/story_filter.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import re
from telemetry.internal.util import command_line
class _StoryMatcher(object):
def __init__(self, pattern):
self._regex = None
self.has_compile_error = False
if pattern:
try:
self._regex = re.compile(pattern)
except re.error:
self.has_compile_error = True
def __nonzero__(self):
return self._regex is not None
def HasMatch(self, story):
return self and bool(
self._regex.search(story.display_name) or
(story.name and self._regex.search(story.name)))
class _StoryLabelMatcher(object):
def __init__(self, labels_str):
self._labels = labels_str.split(',') if labels_str else None
def __nonzero__(self):
return self._labels is not None
def HasLabelIn(self, story):
return self and bool(story.labels.intersection(self._labels))
class StoryFilter(command_line.ArgumentHandlerMixIn):
"""Filters stories in the story set based on command-line flags."""
@classmethod
def AddCommandLineArgs(cls, parser):
group = optparse.OptionGroup(parser, 'User story filtering options')
group.add_option('--story-filter',
help='Use only stories whose names match the given filter regexp.')
group.add_option('--story-filter-exclude',
help='Exclude stories whose names match the given filter regexp.')
group.add_option('--story-label-filter',
help='Use only stories that have any of these labels')
group.add_option('--story-label-filter-exclude',
help='Exclude stories that have any of these labels')
parser.add_option_group(group)
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
cls._include_regex = _StoryMatcher(args.story_filter)
cls._exclude_regex = _StoryMatcher(args.story_filter_exclude)
cls._include_labels = _StoryLabelMatcher(args.story_label_filter)
cls._exclude_labels = _StoryLabelMatcher(args.story_label_filter_exclude)
if cls._include_regex.has_compile_error:
raise parser.error('--story-filter: Invalid regex.')
if cls._exclude_regex.has_compile_error:
raise parser.error('--story-filter-exclude: Invalid regex.')
@classmethod
def IsSelected(cls, story):
# Exclude filters take priority.
if cls._exclude_labels.HasLabelIn(story):
return False
if cls._exclude_regex.HasMatch(story):
return False
if cls._include_labels and not cls._include_labels.HasLabelIn(story):
return False
if cls._include_regex and not cls._include_regex.HasMatch(story):
return False
return True
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import re
from telemetry.internal.util import command_line
class _StoryMatcher(object):
def __init__(self, pattern):
self._regex = None
self.has_compile_error = False
if pattern:
try:
self._regex = re.compile(pattern)
except re.error:
self.has_compile_error = True
def __nonzero__(self):
return self._regex is not None
def HasMatch(self, story):
return self and bool(
self._regex.search(story.display_name) or
(story.name and self._regex.search(story.name)))
class _StoryLabelMatcher(object):
def __init__(self, labels_str):
self._labels = labels_str.split(',') if labels_str else None
def __nonzero__(self):
return self._labels is not None
def HasLabelIn(self, story):
return self and bool(story.labels.intersection(self._labels))
class StoryFilter(command_line.ArgumentHandlerMixIn):
"""Filters stories in the story set based on command-line flags."""
@classmethod
def AddCommandLineArgs(cls, parser):
group = optparse.OptionGroup(parser, 'User story filtering options')
group.add_option('--story-filter',
help='Use only stories whose names match the given filter regexp.')
group.add_option('--page-filter', dest='story_filter',
help='Deprecated. Use --story-filter instead.')
group.add_option('--story-filter-exclude',
help='Exclude stories whose names match the given filter regexp.')
group.add_option('--story-label-filter',
help='Use only stories that have any of these labels')
group.add_option('--story-label-filter-exclude',
help='Exclude stories that have any of these labels')
parser.add_option_group(group)
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
cls._include_regex = _StoryMatcher(args.story_filter)
cls._exclude_regex = _StoryMatcher(args.story_filter_exclude)
cls._include_labels = _StoryLabelMatcher(args.story_label_filter)
cls._exclude_labels = _StoryLabelMatcher(args.story_label_filter_exclude)
if cls._include_regex.has_compile_error:
raise parser.error('--story-filter: Invalid regex.')
if cls._exclude_regex.has_compile_error:
raise parser.error('--story-filter-exclude: Invalid regex.')
@classmethod
def IsSelected(cls, story):
# Exclude filters take priority.
if cls._exclude_labels.HasLabelIn(story):
return False
if cls._exclude_regex.HasMatch(story):
return False
if cls._include_labels and not cls._include_labels.HasLabelIn(story):
return False
if cls._include_regex and not cls._include_regex.HasMatch(story):
return False
return True
| bsd-3-clause | Python |
c39c086f51963678769c1066637ca573c721e827 | Create a simple static gallery script. | strickyak/aphid,strickyak/aphid,strickyak/aphid,strickyak/aphid,strickyak/aphid,strickyak/aphid,strickyak/aphid | static_gallery.py | static_gallery.py | from . import flag
#from go import html
from go import os
from go import path/filepath
def ReadAlbumDirs(input_dir):
f = os.Open(input_dir)
with defer f.Close():
names = f.Readdirnames(-1)
for name in names:
stat = os.Stat(filepath.Join(input_dir, name))
if stat.IsDir():
yield name
def RenderDir(album_names, output_dir):
index = filepath.Join(output_dir, 'index.html')
f = os.Create(index)
with defer f.Close():
f.Write('<html><body><h3>Gallery %s</h3> <ul>\n' % output_dir)
for name in album_names:
f.Write('<li><a href="%s">%q</a></li>\n' % (name, name))
def ReadPhotosInDir(input_dir):
f = os.Open(input_dir)
with defer f.Close():
names = f.Readdirnames(-1)
for name in names:
stat = os.Stat(filepath.Join(input_dir, name))
if stat.IsDir() == False:
yield name
def RenderAlbum(photo_names, output_dir):
index = filepath.Join(output_dir, 'index.html')
f = os.Create(index)
with defer f.Close():
f.Write('<html><body><h3>Album %s</h3> <ul>\n' % output_dir)
for name in photo_names:
f.Write('<li><a href="%s"><img src="%s" /></a></li>\n' % (name, name))
def LinkPhotos(photo_names, input_dir, output_dir):
for photo in photo_names:
photo_orig = filepath.Join(input_dir, photo)
photo_dest = filepath.Join(output_dir, photo)
os.Link(photo_orig, photo_dest)
input_dir = flag.String('input', '', 'The input directory.')
output_dir = flag.String('output', '', 'The output directory.')
def main(argv):
argv = flag.Munch(argv)
album_dirs = list(ReadAlbumDirs(input_dir.X))
RenderDir(album_dirs, output_dir.X)
for dir in album_dirs:
photo_dir = filepath.Join(input_dir.X, dir)
output_dir = filepath.Join(output_dir.X, dir)
photos = list(ReadPhotosInDir(photo_dir))
os.MkdirAll(output_dir, os.ModePerm)
RenderAlbum(photos, output_dir)
LinkPhotos(photos, photo_dir, output_dir)
| mit | Python |
|
f083789e5615d15715f49a7dbdb25505aa5efae2 | Initialize P1_assignChores | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P1_assignChores.py | books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P1_assignChores.py | # Write a program that takes a list of people’s email addresses and a list of chores
# that need to be done and randomly assigns chores to people. Email each person their
# assigned chores.
#
# If you’re feeling ambitious, keep a record of each person’s previously assigned
# chores so that you can make sure the program avoids assigning anyone the same chore
# they did last time.
#
# For another possible feature, schedule the program to run once a week automatically.
| mit | Python |
|
54a9b637aad85a20f3e865185ffed0abfd4192cd | Create tutorial4.py | danielwilson2017/ggame-tutorials | tutorial4.py | tutorial4.py | from ggame import App, RectangleAsset, ImageAsset, Sprite, LineStyle, Color, Frame
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
class SpaceShip(Sprite):
"""
Animated space ship
"""
asset = ImageAsset("images/four_spaceship_by_albertov_with_thrust.png",
Frame(227,0,292-227,125), 4, 'vertical')
def __init__(self, position):
super().__init__(SpaceShip.asset, position)
class SpaceGame(App):
"""
Tutorial4 space game example.
"""
def __init__(self, width, height):
super().__init__(width, height)
# Background
black = Color(0, 1)
noline = LineStyle(0, black)
bg_asset = RectangleAsset(SCREEN_WIDTH, SCREEN_HEIGHT, noline, black)
bg = Sprite(bg_asset, (0,0))
myapp = SpaceGame(SCREEN_WIDTH, SCREEN_HEIGHT)
myapp.run()
| mit | Python |
|
8d1ef1f33cc6f10a58cdeacc0fd840dea245e7a6 | Create typecheck.py with all the code | jacopofar/runtime_typecheck | typecheck.py | typecheck.py | #!/usr/bin/env python3
from typing import (Union,
Tuple,
Any,
TypeVar,
Type,
List)
def check_type(obj, candidate_type, reltype='invariant') -> bool:
if reltype not in ['invariant', 'covariant', 'contravariant']:
raise ValueError(f' Variadic type {reltype} is unknown')
# builtin type like str, or a class
if type(candidate_type) == type and reltype in ['invariant']:
return isinstance(obj, candidate_type)
if type(candidate_type) == type and reltype in ['covariant']:
return issubclass(obj.__class__, candidate_type)
if type(candidate_type) == type and reltype in ['contravariant']:
return issubclass(candidate_type, obj.__class__)
# Any accepts everything
if type(candidate_type) == type(Any):
return True
# Union, at least one match in __args__
if type(candidate_type) == type(Union):
return any(check_type(obj, t, reltype) for t in candidate_type.__args__)
# Tuple, each element matches the corresponding type in __args__
if type(candidate_type) == type(Tuple):
if not hasattr(obj, '__len__'):
return False
if len(candidate_type.__args__) != len(obj):
return False
return all(check_type(o, t, reltype) for (o, t) in zip(obj, candidate_type.__args__))
# List, each element matches the type in __args__
if type(candidate_type) == type(List):
if not hasattr(obj, '__len__'):
return False
return all(check_type(o, candidate_type.__args__[0], reltype) for o in obj)
# TypeVar, this is tricky
if type(candidate_type) == type(TypeVar):
# TODO consider contravariant, variant and bound
# invariant with a list of constraints, acts like a Tuple
if not (candidate_type.__covariant__ or candidate_type.__contracovariant__) and len(candidate_type.__constraints__) > 0:
return any(check_type(obj, t) for t in candidate_type.__constraints__)
if type(candidate_type) == type(Type):
return check_type(obj, candidate_type.__args__[0], reltype='covariant')
raise ValueError(f'Cannot check against {reltype} type {candidate_type}')
assert check_type(3, Any)
assert check_type([5, "hi"], Any)
assert check_type(3, int)
assert not check_type(3, float)
assert check_type(3, Union[int, str])
assert check_type("hello", Union[int, str])
assert not check_type(4.78, Union[int, str])
assert check_type((1, 67), Tuple[int, int])
assert not check_type((1, "new york"), Tuple[int, int])
# NOTE not a tuple, but the whole object is immutable being a JSON received from HTTP
assert check_type([1, "new york"], Tuple[int, str])
assert check_type((1, 67, "Amsterdam"), Tuple[int, int, str])
assert not check_type(("Amsterdam", 1, 67), Tuple[int, int, str])
assert check_type([1, 27, 33, 1956], List[int])
assert not check_type([1.11, 27, 33, 1956], List[int])
assert not check_type([1, 27, 33, 1956, "h", 42], List[int])
assert check_type([1, 27, 33, 1956], List[Union[str, int]])
assert check_type([(12, "Texas"), (-5, "Particle")], List[Tuple[int, str]])
assert not check_type([(1.9, "Texas"), (-5, "Particle")], List[Tuple[int, str]])
assert not check_type([1.11, 27, 33, 1956], List[Tuple[int, str]])
| mit | Python |
|
1a9302d984e8fd0e467a04c87428b64d874e5f04 | refactor customerWallet | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | usermanage/views/customerWallet.py | usermanage/views/customerWallet.py | from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.models import User, Group
from django.contrib.auth.decorators import login_required, user_passes_test, permission_required
from django.contrib.auth.forms import UserCreationForm
from customermanage.models import Coupon, Wallet
from storemanage.models import Ticket
# Create your views here.
from usermanage import models
@login_required()
@permission_required('usermanage.customer_rights',raise_exception=True)
def customerWallet(request):
user = request.user
wallets = [{'name':w.currency.name,'amount':w.amount} for w in Wallet.objects.filter(user=user)]
print(wallets)
return render(request, 'index/wallet.html',{'wallets':wallets})
| mit | Python |
|
b9f28570ba619db5adacb05a7eadab77f140e876 | Create __init__.py | Kalimaha/fake_data_crud_service | fake_data_crud_service/rest/__init__.py | fake_data_crud_service/rest/__init__.py | __package__ = 'rest'
__author__ = 'Barbaglia, Guido'
__email__ = '[email protected];'
__license__ = 'MIT'
| mit | Python |
|
192e60955051f8ffb34f6cc1f1e3f226acb1b5fb | add missing primary key constraints (#7129) | pypa/warehouse,pypa/warehouse,pypa/warehouse,pypa/warehouse | warehouse/migrations/versions/b5bb5d08543d_create_missing_primary_key_constraints.py | warehouse/migrations/versions/b5bb5d08543d_create_missing_primary_key_constraints.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
create missing primary key constraints
Revision ID: b5bb5d08543d
Revises: 08aedc089eaf
Create Date: 2019-12-19 14:27:47.230249
"""
from alembic import op
revision = "b5bb5d08543d"
down_revision = "08aedc089eaf"
def upgrade():
op.create_primary_key(None, "release_files", ["id"])
op.create_primary_key(None, "release_dependencies", ["id"])
op.create_primary_key(None, "roles", ["id"])
def downgrade():
raise RuntimeError("Order No. 227 - Ни шагу назад!")
| apache-2.0 | Python |
|
a3d3040f16a604b534406d2f59a841d7ef6cebfa | Test HTTPMediaWikiAPI.get_content() | Lodifice/mfnf-pdf-export,Lodifice/mfnf-pdf-export,Lodifice/mfnf-pdf-export | tests/test_api.py | tests/test_api.py | import requests
from unittest import TestCase
from mfnf.api import HTTPMediaWikiAPI
class TestHTTPMediaWikiAPI(TestCase):
def setUp(self):
self.api = HTTPMediaWikiAPI(requests.Session())
def test_get_content(self):
content = self.api.get_content("Mathe für Nicht-Freaks: Epsilon-Delta-Kriterium der Stetigkeit")
self.assertTrue(content.startswith("{{#invoke:Mathe für Nicht-Freaks"))
| apache-2.0 | Python |
|
8139dc9e04025da001323122521951f5ed2c391b | Fix mysql encoding for users.profile.reason | sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap | users/migrations/0010_users-profile-encoding.py | users/migrations/0010_users-profile-encoding.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-25 01:43
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0009_remove_profile_active'),
]
operations = [
migrations.RunSQL("ALTER DATABASE default CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci;"),
migrations.RunSQL("ALTER TABLE users_profile CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"),
migrations.RunSQL("ALTER TABLE users_profile MODIFY reason LONGTEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"),
]
| mit | Python |
|
328901c74d1ee103a1ee5b2f26aa391ddeda465b | Add unit test for webpage creation and description | appeltel/AutoCMS,appeltel/AutoCMS,appeltel/AutoCMS | tests/test_web.py | tests/test_web.py | """Test the AutoCMS web reporting functionality."""
import os
import shutil
import unittest
import re
from autocms.core import load_configuration
from autocms.web import (
produce_default_webpage
)
class TestWebPageCreation(unittest.TestCase):
"""Test the accurate creation of test webpages."""
def setUp(self):
self.config = load_configuration('autocms.cfg.example')
self.config['AUTOCMS_WEBDIR'] = self.config['AUTOCMS_BASEDIR']
# call the scratch directory 'uscratch' instead of 'scratch'
# so that in pathological cases one does not resolve to
# /scratch which is often used.
self.testdir = os.path.join(self.config['AUTOCMS_BASEDIR'],
'uscratch')
os.makedirs(self.testdir)
self.page_description = 'AutoCMS Web Unit Test Description'
description_file = os.path.join(self.testdir, 'description.html')
with open(description_file, 'w') as description_filehandle:
description_filehandle.write(self.page_description)
def tearDown(self):
shutil.rmtree(os.path.join(self.config['AUTOCMS_BASEDIR'],
'uscratch'))
def test_create_webpage_with_description(self):
"""Test that a default webpage is created with description."""
records = []
produce_default_webpage(records, 'uscratch', self.config)
webpage_path = os.path.join(self.config['AUTOCMS_WEBDIR'],
'uscratch/index.html')
self.assertTrue(os.path.isfile(webpage_path))
with open(webpage_path) as webpage:
webpage_contents = webpage.read()
self.assertTrue(re.search(self.page_description, webpage_contents))
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
e6b086f3baef34cf1e5278e930a034a92f4eee76 | Add test for DirectionalGridCRF | wattlebird/pystruct,amueller/pystruct,massmutual/pystruct,d-mittal/pystruct,massmutual/pystruct,pystruct/pystruct,wattlebird/pystruct,d-mittal/pystruct,amueller/pystruct,pystruct/pystruct | tests/test_directional_crf.py | tests/test_directional_crf.py | import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
#from nose.tools import assert_almost_equal
import pystruct.toy_datasets as toy
from pystruct.lp_new import lp_general_graph
from pystruct.inference_methods import _make_grid_edges
from pystruct.crf import DirectionalGridCRF
def test_inference():
# Test inference with different weights in different directions
X, Y = toy.generate_blocks_multinomial(noise=2, n_samples=1, seed=1)
x, y = X[0], Y[0]
n_states = x.shape[-1]
edges = _make_grid_edges(x, neighborhood=4)
edge_list = _make_grid_edges(x, 4, return_lists=True)
edges = np.vstack(edge_list)
pw_horz = -1 * np.eye(n_states)
xx, yy = np.indices(pw_horz.shape)
# linear ordering constraint horizontally
pw_horz[xx > yy] = 1
# high cost for unequal labels vertically
pw_vert = -1 * np.eye(n_states)
pw_vert[xx != yy] = 1
pw_vert *= 10
# generate edge weights
edge_weights_horizontal = np.repeat(pw_horz[np.newaxis, :, :],
edge_list[0].shape[0], axis=0)
edge_weights_vertical = np.repeat(pw_vert[np.newaxis, :, :],
edge_list[1].shape[0], axis=0)
edge_weights = np.vstack([edge_weights_horizontal, edge_weights_vertical])
# do inference
res = lp_general_graph(-x.reshape(-1, n_states), edges, edge_weights,
exact=False)
# sam inference through CRF inferface
crf = DirectionalGridCRF(n_states=3, inference_method='lp')
w = np.hstack([np.ones(3), -pw_horz.ravel(), -pw_vert.ravel()])
y_pred = crf.inference(x, w, relaxed=True)
assert_array_almost_equal(res[0], y_pred[0].reshape(-1, n_states))
assert_array_almost_equal(res[1], y_pred[1])
assert_array_equal(y, np.argmax(y_pred[0], axis=-1))
| bsd-2-clause | Python |
|
439e4b740f6903341e81e158e6591c9cbd242a4c | Check in a tool that dumps graphviz output. | csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp | tools/graphviz.py | tools/graphviz.py | #!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Using the JSON dumped by the dump-dependency-json generator,
generate input suitable for graphviz to render a dependency graph of
targets."""
import collections
import json
import sys
def ParseTarget(target):
target, _, suffix = target.partition('#')
filename, _, target = target.partition(':')
return filename, target, suffix
def LoadEdges(filename, targets):
"""Load the edges map from the dump file, and filter it to only
show targets in |targets| and their depedendents."""
file = open('dump.json')
edges = json.load(file)
file.close()
# Copy out only the edges we're interested in from the full edge list.
target_edges = {}
to_visit = targets[:]
while to_visit:
src = to_visit.pop()
if src in target_edges:
continue
target_edges[src] = edges[src]
to_visit.extend(edges[src])
return target_edges
def WriteGraph(edges):
"""Print a graphviz graph to stdout.
|edges| is a map of target to a list of other targets it depends on."""
# Bucket targets by file.
files = collections.defaultdict(list)
for src, dst in edges.items():
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)
print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
# to those nodes.
for filename, targets in files.items():
if len(targets) == 1:
# If there's only one node for this file, simplify
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print '}'
if __name__ == '__main__':
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
sys.exit(1)
edges = LoadEdges('dump.json', sys.argv[1:])
WriteGraph(edges)
| bsd-3-clause | Python |
|
80d9a407d76f11573af5ccb6783f837b939b5466 | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/erfinv/benchmark/python/benchmark.scipy.py | lib/node_modules/@stdlib/math/base/special/erfinv/benchmark/python/benchmark.scipy.py | #!/usr/bin/env python
"""Benchmark scipy.special.erfinv."""
import timeit
name = "erfinv"
repeats = 3
iterations = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from scipy.special import erfinv; from random import random;"
stmt = "y = erfinv(2.0*random() - 1.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(3):
print("# python::" + name)
elapsed = t.timeit(number=iterations)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(repeats, repeats)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
|
3133bbfcb5ee56c88ea20be21778519bffe77299 | Add another different type of book | palfrey/book-blog | literotica.py | literotica.py | from common import *
from sys import argv
from urlgrab import Cache
from re import compile, DOTALL, MULTILINE
cache = Cache()
url = argv[1]
titlePattern = compile("<h1>([^<]+)</h1>")
contentPattern = compile("<div class=\"b-story-body-x x-r15\">(.+?)</div><div class=\"b-story-stats-block\">" , DOTALL|MULTILINE)
nextPattern = compile("\"([^\"]+)\">Next</a>")
page = cache.get(url, max_age = -1)
data = page.read()
open("dump", "wb").write(data)
title = titlePattern.findall(data)
print title
title = title[0]
content = u""
while True:
contentMatch = contentPattern.findall(data)
print page.headers.headers
print type(data)
content += contentMatch[0]
#print content
nextMatch = nextPattern.findall(data)
if nextMatch == []:
break
nextURL = nextMatch[0]
print nextURL
page = cache.get(nextURL, max_age=-1)
data = page.read()
toc = tocStart(title)
generatePage(url, title, content, title, toc)
tocEnd(toc)
| agpl-3.0 | Python |
|
f31d6730a0cfbc50c55e9260391f399e77c3d631 | access the repository from console | vipints/genomeutils,vipints/genomeutils,vipints/genomeutils | utils/__init__.py | utils/__init__.py | __version__="0.1"
| bsd-3-clause | Python |
|
893679baff0367538bdf3b52b04f8bae72732be8 | Add migration to remove system avatar source. | sharmaeklavya2/zulip,isht3/zulip,joyhchen/zulip,susansls/zulip,zulip/zulip,punchagan/zulip,blaze225/zulip,PhilSk/zulip,samatdav/zulip,hackerkid/zulip,christi3k/zulip,brainwane/zulip,mahim97/zulip,kou/zulip,jackrzhang/zulip,amyliu345/zulip,paxapy/zulip,niftynei/zulip,samatdav/zulip,vikas-parashar/zulip,shubhamdhama/zulip,vabs22/zulip,AZtheAsian/zulip,vabs22/zulip,SmartPeople/zulip,Galexrt/zulip,jainayush975/zulip,j831/zulip,jrowan/zulip,susansls/zulip,dattatreya303/zulip,aakash-cr7/zulip,jackrzhang/zulip,paxapy/zulip,reyha/zulip,Jianchun1/zulip,ryanbackman/zulip,rht/zulip,susansls/zulip,zulip/zulip,vikas-parashar/zulip,souravbadami/zulip,verma-varsha/zulip,TigorC/zulip,amanharitsh123/zulip,niftynei/zulip,brockwhittaker/zulip,jphilipsen05/zulip,christi3k/zulip,rht/zulip,isht3/zulip,j831/zulip,zacps/zulip,brockwhittaker/zulip,amanharitsh123/zulip,jphilipsen05/zulip,sonali0901/zulip,AZtheAsian/zulip,zulip/zulip,cosmicAsymmetry/zulip,dhcrzf/zulip,tommyip/zulip,souravbadami/zulip,j831/zulip,jackrzhang/zulip,SmartPeople/zulip,blaze225/zulip,sharmaeklavya2/zulip,eeshangarg/zulip,vaidap/zulip,SmartPeople/zulip,ryanbackman/zulip,Jianchun1/zulip,susansls/zulip,rishig/zulip,kou/zulip,dawran6/zulip,zulip/zulip,rishig/zulip,peguin40/zulip,ryanbackman/zulip,tommyip/zulip,TigorC/zulip,eeshangarg/zulip,hackerkid/zulip,AZtheAsian/zulip,aakash-cr7/zulip,timabbott/zulip,zulip/zulip,Diptanshu8/zulip,synicalsyntax/zulip,calvinleenyc/zulip,blaze225/zulip,verma-varsha/zulip,peguin40/zulip,zacps/zulip,grave-w-grave/zulip,christi3k/zulip,j831/zulip,andersk/zulip,synicalsyntax/zulip,arpith/zulip,zacps/zulip,brainwane/zulip,JPJPJPOPOP/zulip,aakash-cr7/zulip,christi3k/zulip,JPJPJPOPOP/zulip,brainwane/zulip,arpith/zulip,PhilSk/zulip,susansls/zulip,andersk/zulip,amyliu345/zulip,brainwane/zulip,synicalsyntax/zulip,TigorC/zulip,AZtheAsian/zulip,jainayush975/zulip,calvinleenyc/zulip,samatdav/zulip,JPJPJPOPOP/zulip,zacps/zulip,dattatreya303/zulip,brockwhittaker/zulip,amyliu345/zulip,Juanvulcano/zulip,cosmicAsymmetry/zulip,SmartPeople/zulip,hackerkid/zulip,verma-varsha/zulip,reyha/zulip,reyha/zulip,jainayush975/zulip,punchagan/zulip,j831/zulip,PhilSk/zulip,dawran6/zulip,souravbadami/zulip,calvinleenyc/zulip,jrowan/zulip,reyha/zulip,Diptanshu8/zulip,timabbott/zulip,eeshangarg/zulip,KingxBanana/zulip,eeshangarg/zulip,sonali0901/zulip,aakash-cr7/zulip,punchagan/zulip,vabs22/zulip,TigorC/zulip,rht/zulip,andersk/zulip,KingxBanana/zulip,peguin40/zulip,kou/zulip,Diptanshu8/zulip,cosmicAsymmetry/zulip,timabbott/zulip,brainwane/zulip,souravbadami/zulip,paxapy/zulip,shubhamdhama/zulip,niftynei/zulip,ryanbackman/zulip,dattatreya303/zulip,arpith/zulip,vaidap/zulip,cosmicAsymmetry/zulip,tommyip/zulip,KingxBanana/zulip,tommyip/zulip,amyliu345/zulip,verma-varsha/zulip,mahim97/zulip,brockwhittaker/zulip,punchagan/zulip,dhcrzf/zulip,jainayush975/zulip,jphilipsen05/zulip,vaidap/zulip,samatdav/zulip,SmartPeople/zulip,JPJPJPOPOP/zulip,vabs22/zulip,grave-w-grave/zulip,kou/zulip,dawran6/zulip,joyhchen/zulip,dhcrzf/zulip,mahim97/zulip,vaidap/zulip,peguin40/zulip,susansls/zulip,brockwhittaker/zulip,showell/zulip,grave-w-grave/zulip,TigorC/zulip,synicalsyntax/zulip,rishig/zulip,eeshangarg/zulip,showell/zulip,isht3/zulip,andersk/zulip,dattatreya303/zulip,jackrzhang/zulip,j831/zulip,joyhchen/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,sonali0901/zulip,zacps/zulip,sharmaeklavya2/zulip,Galexrt/zulip,paxapy/zulip,joyhchen/zulip,jrowan/zulip,aakash-cr7/zulip,samatdav/zulip,arpith/zulip,shubhamdhama/zulip,souravbadami/zulip,jphilipsen05/zulip,hackerkid/zulip,joyhchen/zulip,rht/zulip,vaidap/zulip,niftynei/zulip,rht/zulip,synicalsyntax/zulip,Diptanshu8/zulip,jackrzhang/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,shubhamdhama/zulip,rishig/zulip,showell/zulip,vaidap/zulip,jainayush975/zulip,blaze225/zulip,rht/zulip,verma-varsha/zulip,cosmicAsymmetry/zulip,sharmaeklavya2/zulip,Diptanshu8/zulip,Juanvulcano/zulip,calvinleenyc/zulip,vabs22/zulip,hackerkid/zulip,paxapy/zulip,grave-w-grave/zulip,shubhamdhama/zulip,brainwane/zulip,JPJPJPOPOP/zulip,calvinleenyc/zulip,zulip/zulip,showell/zulip,aakash-cr7/zulip,arpith/zulip,andersk/zulip,Juanvulcano/zulip,showell/zulip,jphilipsen05/zulip,showell/zulip,zacps/zulip,vikas-parashar/zulip,christi3k/zulip,dhcrzf/zulip,KingxBanana/zulip,eeshangarg/zulip,blaze225/zulip,hackerkid/zulip,dattatreya303/zulip,Galexrt/zulip,Jianchun1/zulip,Juanvulcano/zulip,arpith/zulip,PhilSk/zulip,andersk/zulip,dhcrzf/zulip,grave-w-grave/zulip,sonali0901/zulip,rishig/zulip,PhilSk/zulip,synicalsyntax/zulip,jrowan/zulip,mahim97/zulip,amanharitsh123/zulip,jackrzhang/zulip,punchagan/zulip,PhilSk/zulip,jrowan/zulip,jainayush975/zulip,Jianchun1/zulip,brockwhittaker/zulip,Galexrt/zulip,Galexrt/zulip,vikas-parashar/zulip,kou/zulip,JPJPJPOPOP/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,KingxBanana/zulip,mahim97/zulip,brainwane/zulip,rishig/zulip,AZtheAsian/zulip,sonali0901/zulip,isht3/zulip,sonali0901/zulip,joyhchen/zulip,peguin40/zulip,showell/zulip,andersk/zulip,reyha/zulip,niftynei/zulip,Jianchun1/zulip,amanharitsh123/zulip,paxapy/zulip,vabs22/zulip,cosmicAsymmetry/zulip,verma-varsha/zulip,amyliu345/zulip,isht3/zulip,kou/zulip,tommyip/zulip,tommyip/zulip,amanharitsh123/zulip,punchagan/zulip,souravbadami/zulip,tommyip/zulip,mahim97/zulip,hackerkid/zulip,dhcrzf/zulip,AZtheAsian/zulip,dawran6/zulip,calvinleenyc/zulip,timabbott/zulip,dawran6/zulip,amyliu345/zulip,timabbott/zulip,ryanbackman/zulip,grave-w-grave/zulip,punchagan/zulip,amanharitsh123/zulip,ryanbackman/zulip,Juanvulcano/zulip,reyha/zulip,shubhamdhama/zulip,vikas-parashar/zulip,blaze225/zulip,rishig/zulip,christi3k/zulip,dhcrzf/zulip,shubhamdhama/zulip,Juanvulcano/zulip,Galexrt/zulip,eeshangarg/zulip,rht/zulip,kou/zulip,isht3/zulip,samatdav/zulip,peguin40/zulip,Diptanshu8/zulip,SmartPeople/zulip,vikas-parashar/zulip,TigorC/zulip,zulip/zulip,jphilipsen05/zulip,Galexrt/zulip,niftynei/zulip,Jianchun1/zulip,jrowan/zulip,dawran6/zulip,KingxBanana/zulip | zerver/migrations/0031_remove_system_avatar_source.py | zerver/migrations/0031_remove_system_avatar_source.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0030_realm_org_type'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='avatar_source',
field=models.CharField(choices=[('G', 'Hosted by Gravatar'), ('U', 'Uploaded by user')], max_length=1, default='G'),
),
]
| apache-2.0 | Python |
|
ff89cda5f77bec569c7451c9ee72ef7c028f7552 | Add sample extraction script | lisongze/SForecast,lisongze/SForecast,lisongze/SForecast,lisongze/SForecast | extract_samples.py | extract_samples.py | import sys, os
import numpy as np
import pandas as pd
import datetime
if __name__ == '__main__':
infile = sys.argv[1]
csv_content = pd.read_csv(infile, [0])
| mit | Python |
|
73f47cc6a8a98b2026ee27985f8c3042352c941b | Add lc066_plus_one.py | bowen0701/algorithms_data_structures | lc066_plus_one.py | lc066_plus_one.py | """Leetcode 66. Plus One
Easy
URL: https://leetcode.com/problems/plus-one/
Given a non-empty array of digits representing a non-negative integer,
plus one to the integer.
The digits are stored such that the most significant digit is at the
head of the list, and each element in the array contain a single digit.
You may assume the integer does not contain any leading zero,
except the number 0 itself.
Example 1:
Input: [1,2,3]
Output: [1,2,4]
Explanation: The array represents the integer 123.
Example 2:
Input: [4,3,2,1]
Output: [4,3,2,2]
Explanation: The array represents the integer 4321.
"""
class Solution(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
a620cc46d97f80ef658c46130f0448c36844d847 | Add alembic revision | HERA-Team/hera_mc,HERA-Team/Monitor_and_Control,HERA-Team/hera_mc | alembic/versions/63b625cf7b06_add_white_rabbit_status.py | alembic/versions/63b625cf7b06_add_white_rabbit_status.py | """add white rabbit status
Revision ID: 63b625cf7b06
Revises: e83aa47e530b
Create Date: 2019-12-06 02:45:01.418693+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '63b625cf7b06'
down_revision = 'e83aa47e530b'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('node_white_rabbit_status',
sa.Column('node_time', sa.BigInteger(), nullable=False),
sa.Column('node', sa.Integer(), nullable=False),
sa.Column('board_info_str', sa.String(), nullable=True),
sa.Column('aliases', sa.String(), nullable=True),
sa.Column('ip', sa.String(), nullable=True),
sa.Column('mode', sa.String(), nullable=True),
sa.Column('serial', sa.String(), nullable=True),
sa.Column('temperature', sa.Float(), nullable=True),
sa.Column('build_date', sa.BigInteger(), nullable=True),
sa.Column('gw_date', sa.BigInteger(), nullable=True),
sa.Column('gw_version', sa.String(), nullable=True),
sa.Column('gw_id', sa.String(), nullable=True),
sa.Column('build_hash', sa.String(), nullable=True),
sa.Column('manufacture_tag', sa.String(), nullable=True),
sa.Column('manufacture_device', sa.String(), nullable=True),
sa.Column('manufacture_date', sa.BigInteger(), nullable=True),
sa.Column('manufacture_partnum', sa.String(), nullable=True),
sa.Column('manufacture_serial', sa.String(), nullable=True),
sa.Column('manufacture_vendor', sa.String(), nullable=True),
sa.Column('port0_ad', sa.Integer(), nullable=True),
sa.Column('port0_link_asymmetry_ps', sa.Integer(), nullable=True),
sa.Column('port0_manual_phase_ps', sa.Integer(), nullable=True),
sa.Column('port0_clock_offset_ps', sa.Integer(), nullable=True),
sa.Column('port0_cable_rt_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_master_slave_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_master_rx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_slave_rx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_master_tx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_slave_tx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port0_hd', sa.Integer(), nullable=True),
sa.Column('port0_link', sa.Boolean(), nullable=True),
sa.Column('port0_lock', sa.Boolean(), nullable=True),
sa.Column('port0_md', sa.Integer(), nullable=True),
sa.Column('port0_rt_time_ps', sa.Integer(), nullable=True),
sa.Column('port0_nsec', sa.Integer(), nullable=True),
sa.Column('port0_packets_received', sa.Integer(), nullable=True),
sa.Column('port0_phase_setpoint_ps', sa.Integer(), nullable=True),
sa.Column('port0_servo_state', sa.String(), nullable=True),
sa.Column('port0_sv', sa.Integer(), nullable=True),
sa.Column('port0_sync_source', sa.String(), nullable=True),
sa.Column('port0_packets_sent', sa.Integer(), nullable=True),
sa.Column('port0_update_counter', sa.Integer(), nullable=True),
sa.Column('port0_time', sa.BigInteger(), nullable=True),
sa.Column('port1_ad', sa.Integer(), nullable=True),
sa.Column('port1_link_asymmetry_ps', sa.Integer(), nullable=True),
sa.Column('port1_manual_phase_ps', sa.Integer(), nullable=True),
sa.Column('port1_clock_offset_ps', sa.Integer(), nullable=True),
sa.Column('port1_cable_rt_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_master_slave_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_master_rx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_slave_rx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_master_tx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_slave_tx_phy_delay_ps', sa.Integer(), nullable=True),
sa.Column('port1_hd', sa.Integer(), nullable=True),
sa.Column('port1_link', sa.Boolean(), nullable=True),
sa.Column('port1_lock', sa.Boolean(), nullable=True),
sa.Column('port1_md', sa.Integer(), nullable=True),
sa.Column('port1_rt_time_ps', sa.Integer(), nullable=True),
sa.Column('port1_nsec', sa.Integer(), nullable=True),
sa.Column('port1_packets_received', sa.Integer(), nullable=True),
sa.Column('port1_phase_setpoint_ps', sa.Integer(), nullable=True),
sa.Column('port1_servo_state', sa.String(), nullable=True),
sa.Column('port1_sv', sa.Integer(), nullable=True),
sa.Column('port1_sync_source', sa.String(), nullable=True),
sa.Column('port1_packets_sent', sa.Integer(), nullable=True),
sa.Column('port1_update_counter', sa.Integer(), nullable=True),
sa.Column('port1_time', sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint('node_time', 'node')
)
def downgrade():
op.drop_table('node_white_rabbit_status')
| bsd-2-clause | Python |
|
b51398d602a157ce55fd7e08eedd953051f716a1 | Add script to update uploaded files. | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org | backend/scripts/updatedf.py | backend/scripts/updatedf.py | #!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
| mit | Python |
|
ba3582d1e4521c040ef9f43c3a4760eb4fd694da | add lib/config_loader.py | izakp/hokusai | hokusai/lib/config_loader.py | hokusai/lib/config_loader.py | import os
import tempfile
import shutil
from urlparse import urlparse
import boto3
import yaml
from hokusai.lib.common import get_region_name
from hokusai.lib.exceptions import HokusaiError
class ConfigLoader
def __init__(self, uri):
self.uri = uri
def load(self):
uri = urlparse(self.uri)
if not uri.path.endswith('yaml') or not uri.path.endswith('yml'):
raise HokusaiError('Uri must be of Yaml file type')
tmpdir = tempfile.mkdtemp()
switch(uri.scheme):
case 's3':
client = boto3.client('s3', region_name=get_region_name())
tmp_configfile = os.path.join(tmpdir, 'config')
client.download_file(uri.netloc, uri.path.lstrip('/'), tmp_configfile)
default:
tmp_configfile = uri.path
with open(tmp_configfile, 'r') as f:
struct = yaml.safe_load(f.read())
if type(struct) is not obj:
raise HokusaiError('Yaml is invalid')
return struct
| mit | Python |
|
21e766688e3cc4d08339f81c35dba43d26010a6d | edit vehicle form | jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk | vehicles/forms.py | vehicles/forms.py | from django import forms
class EditVehicleForm(forms.Form):
fleet_number = forms.CharField(label='Fleet number', required=False)
reg = forms.CharField(label='Registration', required=False)
vehicle_type = forms.CharField(label='Type', required=False)
colours = forms.CharField(label='Colours', required=False)
notes = forms.CharField(label='Notes', required=False)
| mpl-2.0 | Python |
|
fbf36a2fb52b5ed1aceaec4c1d1075448584a97d | Test that modules can be imported in any order | lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor | tests/test_imports.py | tests/test_imports.py | """Test that all modules/packages in the lektor tree are importable in any order
Here we import each module by itself, one at a time, each in a new
python interpreter.
"""
import pkgutil
import sys
from subprocess import run
import pytest
import lektor
def iter_lektor_modules():
for module in pkgutil.walk_packages(lektor.__path__, f"{lektor.__name__}."):
yield module.name
@pytest.fixture(params=iter_lektor_modules())
def module(request):
return request.param
def test_import(module):
python = sys.executable
assert run([python, "-c", f"import {module}"], check=False).returncode == 0
| bsd-3-clause | Python |
|
e3bdccc8c7ef23b449a53043f4a048fe71cd642c | Use an explicit list due to the filter-object type of python3 | dulaccc/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting | accounting/apps/connect/views.py | accounting/apps/connect/views.py | from django.views import generic
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from accounting.apps.books.models import Organization
from .steps import (
CreateOrganizationStep,
ConfigureTaxRatesStep,
ConfigureBusinessSettingsStep,
ConfigureFinancialSettingsStep,
AddEmployeesStep,
ConfigurePayRunSettingsStep,
AddFirstClientStep,
AddFirstInvoiceStep)
class RootRedirectionView(generic.View):
"""
Redirect to the books if an organization is already configured
Otherwise we begin the step by step creation process to help the user
begin and configure his books
"""
def get(self, *args, **kwargs):
if Organization.objects.all().count():
return HttpResponseRedirect(reverse('books:dashboard'))
class GettingStartedView(generic.TemplateView):
template_name = "connect/getting_started.html"
def get_steps(self, request):
user = request.user
steps = steps = [
CreateOrganizationStep(user),
ConfigureTaxRatesStep(user),
ConfigureBusinessSettingsStep(user),
ConfigureFinancialSettingsStep(user),
AddEmployeesStep(user),
ConfigurePayRunSettingsStep(user),
AddFirstClientStep(user),
AddFirstInvoiceStep(user),
]
return steps
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
request = self.request
steps = self.get_steps(self.request)
uncomplete_filter = lambda s: not s.completed(request)
uncompleted_steps = list(filter(uncomplete_filter, steps))
try:
next_step = next(s for s in uncompleted_steps)
except StopIteration:
next_step = None
ctx['steps'] = steps
ctx['next_step'] = next_step
ctx['all_steps_completed'] = bool(next_step is None)
return ctx
def post(self, request, *args, **kwargs):
steps = self.get_steps(request)
uncompleted_steps = filter(lambda s: not s.completed(request), steps)
if not len(uncompleted_steps):
return super().post(request, *args, **kwargs)
# unmark the session as getting started
request.sessions['getting_started_done'] = True
return HttpResponseRedirect(reverse('books:dashboard'))
| from django.views import generic
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from accounting.apps.books.models import Organization
from .steps import (
CreateOrganizationStep,
ConfigureTaxRatesStep,
ConfigureBusinessSettingsStep,
ConfigureFinancialSettingsStep,
AddEmployeesStep,
ConfigurePayRunSettingsStep,
AddFirstClientStep,
AddFirstInvoiceStep)
class RootRedirectionView(generic.View):
"""
Redirect to the books if an organization is already configured
Otherwise we begin the step by step creation process to help the user
begin and configure his books
"""
def get(self, *args, **kwargs):
if Organization.objects.all().count():
return HttpResponseRedirect(reverse('books:dashboard'))
class GettingStartedView(generic.TemplateView):
template_name = "connect/getting_started.html"
def get_steps(self, request):
user = request.user
steps = steps = [
CreateOrganizationStep(user),
ConfigureTaxRatesStep(user),
ConfigureBusinessSettingsStep(user),
ConfigureFinancialSettingsStep(user),
AddEmployeesStep(user),
ConfigurePayRunSettingsStep(user),
AddFirstClientStep(user),
AddFirstInvoiceStep(user),
]
return steps
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
request = self.request
steps = self.get_steps(self.request)
uncompleted_steps = filter(lambda s: not s.completed(request), steps)
try:
next_step = next(uncompleted_steps)
except StopIteration:
next_step = None
ctx['steps'] = steps
ctx['next_step'] = next_step
ctx['all_steps_completed'] = bool(next_step is None)
return ctx
def post(self, request, *args, **kwargs):
steps = self.get_steps(request)
uncompleted_steps = filter(lambda s: not s.completed(request), steps)
if not len(uncompleted_steps):
return super().post(request, *args, **kwargs)
# unmark the session as getting started
request.sessions['getting_started_done'] = True
return HttpResponseRedirect(reverse('books:dashboard'))
| mit | Python |
b7fff47b228fbe8774c9f465c383ae1015c598fe | use cvmfs.py for openRootCatalog.py | DrDaveD/cvmfs,alhowaidi/cvmfsNDN,trshaffer/cvmfs,DrDaveD/cvmfs,trshaffer/cvmfs,alhowaidi/cvmfsNDN,Moliholy/cvmfs,alhowaidi/cvmfsNDN,MicBrain/cvmfs,djw8605/cvmfs,MicBrain/cvmfs,reneme/cvmfs,Gangbiao/cvmfs,cvmfs-testing/cvmfs,djw8605/cvmfs,alhowaidi/cvmfsNDN,MicBrain/cvmfs,Gangbiao/cvmfs,cvmfs-testing/cvmfs,cvmfs/cvmfs,alhowaidi/cvmfsNDN,DrDaveD/cvmfs,cvmfs-testing/cvmfs,reneme/cvmfs,MicBrain/cvmfs,trshaffer/cvmfs,Gangbiao/cvmfs,Moliholy/cvmfs,DrDaveD/cvmfs,cvmfs/cvmfs,Gangbiao/cvmfs,Gangbiao/cvmfs,cvmfs/cvmfs,Moliholy/cvmfs,djw8605/cvmfs,cvmfs/cvmfs,Moliholy/cvmfs,cvmfs-testing/cvmfs,djw8605/cvmfs,DrDaveD/cvmfs,DrDaveD/cvmfs,cvmfs/cvmfs,trshaffer/cvmfs,MicBrain/cvmfs,DrDaveD/cvmfs,cvmfs/cvmfs,djw8605/cvmfs,cvmfs-testing/cvmfs,reneme/cvmfs,cvmfs/cvmfs,trshaffer/cvmfs,reneme/cvmfs,Moliholy/cvmfs,reneme/cvmfs | add-ons/tools/openRootCatalog.py | add-ons/tools/openRootCatalog.py | #!/usr/bin/python
import cvmfs
import sys
def usage():
print sys.argv[0] + " <repository path | repository url>"
print "This script decompresses the root catalog file to a temporary storage"
print "and opens this directly with sqlite3."
print "WARNING: changes to this database will not persist, as it is only a temp"
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
repo = cvmfs.OpenRepository(sys.argv[1])
root_clg = repo.RetrieveRootCatalog()
root_clg.OpenInteractive()
main()
| #!/usr/bin/python
import sys
import zlib
import tempfile
import subprocess
def getRootCatalogName(cvmfspublished):
try:
cvmfspubdata = open(cvmfspublished, 'rb').read()
except:
print "cannot open .cvmfspublished"
sys.exit(1)
lines = cvmfspubdata.split('\n')
if len(lines) < 1:
print ".cvmfspublished is malformed"
sys.exit(1)
return lines[0][1:]
def decompressCatalog(filename, destination):
str_object1 = open(filename, 'rb').read()
str_object2 = zlib.decompress(str_object1)
f = open(destination, 'wb')
f.write(str_object2)
f.close()
def openCatalog(filename):
subprocess.call(['sqlite3', filename])
def usage():
print sys.argv[0] + " <repository name>"
print "This script decompresses the root catalog file to a temporary storage"
print "and opens this directly with sqlite3."
print "WARNING: changes to this database will not persist, as it is only a temp"
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
repoDir = "/srv/cvmfs/" + sys.argv[1] + "/";
rootCatalog = getRootCatalogName(repoDir + ".cvmfspublished")
myTmpFile = tempfile.NamedTemporaryFile('wb')
decompressCatalog(repoDir + "data/" + rootCatalog[:2] + "/" + rootCatalog[2:] + "C", myTmpFile.name)
openCatalog(myTmpFile.name)
myTmpFile.close()
main() | bsd-3-clause | Python |
a8ca46a8d964907038f6c096a316175543bc2518 | add mask_iou test | chainer/chainercv,pfnet/chainercv,yuyu2172/chainercv,chainer/chainercv,yuyu2172/chainercv | tests/utils_tests/mask_tests/test_mask_iou.py | tests/utils_tests/mask_tests/test_mask_iou.py | from __future__ import division
import unittest
import numpy as np
from chainer import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.utils import mask_iou
@testing.parameterize(
{'mask_a': np.array(
[[[False, False], [True, True]],
[[True, True], [False, False]]],
dtype=np.bool),
'mask_b': np.array(
[[[False, False], [True, True]],
[[True, True], [False, False]],
[[True, False], [True, True]],
[[True, True], [False, True]]],
dtype=np.bool),
'expected': np.array(
[[1., 0., 2 / 3, 1 / 4],
[0., 1., 1 / 4, 2 / 3]],
dtype=np.float32)
},
{'mask_a': np.array(
[[[False, False], [True, True]],
[[True, True], [False, False]],
[[True, True], [True, False]],
[[False, True], [True, True]]],
dtype=np.bool),
'mask_b': np.array(
[[[False, False], [True, True]],
[[True, True], [False, False]]],
dtype=np.bool),
'expected': np.array(
[[1., 0.], [0., 1.], [1 / 4, 2 / 3], [2 / 3, 1 / 4]],
dtype=np.float32)
},
{'mask_a': np.zeros((0, 2, 2), dtype=np.bool),
'mask_b': np.array([[[False, False], [False, False]]], dtype=np.bool),
'expected': np.zeros((0, 1), dtype=np.float32)
},
)
class TestMaskIou(unittest.TestCase):
def check(self, mask_a, mask_b, expected):
iou = mask_iou(mask_a, mask_b)
self.assertIsInstance(iou, type(expected))
np.testing.assert_equal(
cuda.to_cpu(iou),
cuda.to_cpu(expected))
def test_mask_iou_cpu(self):
self.check(self.mask_a, self.mask_b, self.expected)
@attr.gpu
def test_mask_iou_gpu(self):
self.check(
cuda.to_gpu(self.mask_a),
cuda.to_gpu(self.mask_b),
cuda.to_gpu(self.expected))
@testing.parameterize(
{'mask_a': np.array([[[False], [True, True]]], dtype=np.bool),
'mask_b': np.array([[[False, False], [True, True]]], dtype=np.bool)
},
{'mask_a': np.array([[[False, False, True], [True, True]]], dtype=np.bool),
'mask_b': np.array([[[False, False], [True, True]]], dtype=np.bool)
},
{'mask_a': np.array([[[False, False], [True, True]]], dtype=np.bool),
'mask_b': np.array([[[False], [True, True]]], dtype=np.bool)
},
{'mask_a': np.array([[[False, False], [True, True]]], dtype=np.bool),
'mask_b': np.array([[[False, False, True], [True, True]]], dtype=np.bool)
},
)
class TestMaskIouInvalidShape(unittest.TestCase):
def test_mask_iou_invalid(self):
with self.assertRaises(IndexError):
mask_iou(self.mask_a, self.mask_b)
testing.run_module(__name__, __file__)
| mit | Python |
|
a377195fa95b819924ddfbd3fb564cffbe08f9ae | Add an example for solvent model to customize solvent cavity | sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf | examples/solvent/30-custom_solvent_cavity.py | examples/solvent/30-custom_solvent_cavity.py | #!/usr/bin/env python
'''
Custom solvent cavity
'''
import numpy
from pyscf import gto, qmmm, solvent
#
# Case 1. Cavity for dummy atoms with basis on the dummy atoms
#
mol = gto.M(atom='''
C 0.000000 0.000000 -0.542500
O 0.000000 0.000000 0.677500
H 0.000000 0.9353074360871938 -1.082500
H 0.000000 -0.9353074360871938 -1.082500
X-C 0.000000 0.000000 -1.5
X-O 0.000000 0.000000 1.6
''',
verbose = 4)
sol = solvent.ddCOSMO(mol)
cavity_radii = sol.get_atomic_radii()
cavity_radii[4] = 3.0 # Bohr, for X-C
cavity_radii[5] = 2.5 # Bohr, for X-O
# Overwrite the get_atom_radii method to feed the custom cavity into the solvent model
sol.get_atomic_radii = lambda: cavity_radii
mf = mol.RHF().ddCOSMO(sol)
mf.run()
#
# Case 2. Cavity for dummy atoms (without basis)
#
mol = gto.M(atom='''
C 0.000000 0.000000 -0.542500
O 0.000000 0.000000 0.677500
H 0.000000 0.9353074360871938 -1.082500
H 0.000000 -0.9353074360871938 -1.082500
''',
verbose = 4)
# Use a MM molecule to define cavity from dummy atoms.
# See also the example 22-with_qmmm.py
coords = numpy.array([
[0, 0, -1.5],
[0, 0, 1.6],
])
charges = numpy.array([0, 0])
mm_atoms = [('X', c) for c in coords]
mm_mol = qmmm.create_mm_mol(mm_atoms, charges)
# Make a giant system include both QM and MM particles
qmmm_mol = mol + mm_mol
# The solvent model is based on the giant system
sol = solvent.ddCOSMO(qmmm_mol)
cavity_radii = sol.get_atomic_radii()
# Custom cavity
cavity_radii[4] = 3.0 # Bohr
cavity_radii[5] = 2.5 # Bohr
# Overwrite the get_atom_radii method to feed the custom cavity into the solvent model
sol.get_atomic_radii = lambda: cavity_radii
mf = mol.RHF().QMMM(coords, charges)
mf = mf.ddCOSMO(sol)
mf.run()
| apache-2.0 | Python |
|
97ecb8f7dbcb36cfa9e2d180f29d29002eea127e | add elasticsearch import | snowch/biginsight-examples,snowch/biginsight-examples | examples/ElasticsearchIntegrationWithSpark/import_from_elasticsearch.py | examples/ElasticsearchIntegrationWithSpark/import_from_elasticsearch.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
from operator import add
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.sql import SQLContext
if __name__ == "__main__":
if len(sys.argv) != 7:
print("Usage: export_to_elasticsearch.py <host> <port> <user> <pass> <tmpdir> <tmpHdfsDir>", file=sys.stderr)
exit(-1)
host = sys.argv[1]
port = sys.argv[2]
user = sys.argv[3]
password = sys.argv[4]
tmpDir = sys.argv[5]
tmpHdfsDir = sys.argv[6]
conf = SparkConf().setAppName("Elasticsearch example")
# see https://www.elastic.co/guide/en/elasticsearch/hadoop/current/configuration.html
conf.set("es.nodes",host)
conf.set("es.port",str(port))
conf.set("es.net.http.auth.user",user)
conf.set("es.net.http.auth.pass",password)
conf.set("es.net.ssl","true")
conf.set("es.net.ssl.truststore.location","truststore.jks")
conf.set("es.net.ssl.truststore.pass","mypassword")
conf.set("es.nodes.wan.only","true")
sc = SparkContext(conf=conf)
sqlContext = SQLContext(sc)
# read the data from elasticsearch
esdata = sqlContext.read.format("es").load("spark/{0}".format(tmpDir))
# save to hdfs
esdata.rdd.saveAsTextFile(tmpHdfsDir)
sc.stop()
| apache-2.0 | Python |
|
8b419fefc93f9084b8d504b7382fd51087e4645f | add migration script that removes table 'regressions' | PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild | benchbuild/db/versions/001_Remove_RegressionTest_table.py | benchbuild/db/versions/001_Remove_RegressionTest_table.py | """
Remove unneeded Regressions table.
This table can and should be reintroduced by an experiment that requires it.
"""
from sqlalchemy import Table, Column, ForeignKey, Integer, String
from benchbuild.utils.schema import metadata
META = metadata()
REGRESSION = Table('regressions', META,
Column(
'run_id',
Integer,
ForeignKey(
'run.id', onupdate="CASCADE", ondelete="CASCADE"),
index=True,
primary_key=True), Column('name', String),
Column('module', String), Column('project_name', String))
def upgrade(migrate_engine):
META.bind = migrate_engine
REGRESSION.drop()
def downgrade(migrate_engine):
META.bind = migrate_engine
REGRESSION.create()
| mit | Python |
|
847232f2890a4700e4983cd971ef2cd1a76a4b1d | rebuild cases | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/cleanup/management/commands/rebuild_cases.py | corehq/apps/cleanup/management/commands/rebuild_cases.py | from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from corehq.form_processor.backends.sql.processor import FormProcessorSQL
from corehq.form_processor.models import RebuildWithReason
logger = logging.getLogger('rebuild_cases')
logger.setLevel('DEBUG')
class Command(BaseCommand):
help = ('Rebuild given cases')
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('cases_csv_file')
def handle(self, domain, cases_csv_file, **options):
cases = []
with open(cases_csv_file, 'r') as f:
lines = f.readlines()
cases = [l.strip() for l in lines]
rebuild_cases(domain, cases, logger)
def rebuild_cases(domain, cases, logger):
detail = RebuildWithReason(reason='undo UUID clash')
for case_id in cases:
try:
FormProcessorSQL.hard_rebuild_case(domain, case_id, detail)
logger.info('Case %s rebuilt' % case_id)
except Exception as e:
logger.error("Exception rebuilding case %s".format(case_id))
logger.exception("message")
| bsd-3-clause | Python |
|
0919661333c8099a85e7c12c6ce9393ced8c985b | create the lib directory to hold vendored libraries | imzhulei/ceph-deploy,ddiss/ceph-deploy,jumpstarter-io/ceph-deploy,ceph/ceph-deploy,shenhequnying/ceph-deploy,Vicente-Cheng/ceph-deploy,rtulke/ceph-deploy,ghxandsky/ceph-deploy,alfredodeza/ceph-deploy,branto1/ceph-deploy,ddiss/ceph-deploy,shenhequnying/ceph-deploy,isyippee/ceph-deploy,zhouyuan/ceph-deploy,rtulke/ceph-deploy,codenrhoden/ceph-deploy,ktdreyer/ceph-deploy,zhouyuan/ceph-deploy,jumpstarter-io/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,Vicente-Cheng/ceph-deploy,ghxandsky/ceph-deploy,imzhulei/ceph-deploy,branto1/ceph-deploy,ktdreyer/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,trhoden/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,SUSE/ceph-deploy,isyippee/ceph-deploy,SUSE/ceph-deploy,ceph/ceph-deploy,codenrhoden/ceph-deploy,alfredodeza/ceph-deploy,osynge/ceph-deploy | ceph_deploy/lib/__init__.py | ceph_deploy/lib/__init__.py | """
This module is meant for vendorizing Python libraries. Most libraries will need
to have some ``sys.path`` alterations done unless they are doing relative
imports.
Do **not** add anything to this module that does not represent a vendorized
library.
"""
import remoto
| mit | Python |
|
6303ffeee0118a2fef1cb0a9abfe931a04ee6974 | Fix web app. #79 | joebowen/ChannelWorm,openworm/ChannelWorm,joebowen/ChannelWorm,cheelee/ChannelWorm,VahidGh/ChannelWorm,joebowen/ChannelWorm,cheelee/ChannelWorm,cheelee/ChannelWorm,openworm/ChannelWorm,gsarma/ChannelWorm,VahidGh/ChannelWorm,gsarma/ChannelWorm,openworm/ChannelWorm,gsarma/ChannelWorm,joebowen/ChannelWorm,VahidGh/ChannelWorm,cheelee/ChannelWorm,openworm/ChannelWorm,VahidGh/ChannelWorm,gsarma/ChannelWorm | channelworm/web_app/wsgi.py | channelworm/web_app/wsgi.py | """
WSGI config for myproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| mit | Python |
|
e5ed4497fd8aee709dd441cfcddc9a1a91c538d4 | add theilsen | rlowrance/re-local-linear,rlowrance/re-local-linear,rlowrance/re-local-linear | chart-02-theilsen-median-of-root-median-squared-errors.py | chart-02-theilsen-median-of-root-median-squared-errors.py | # create files for chart-02-theilsen-median-of-root-mdian-squared-errors
# with these choices
# metric in median-root-median-squared-errors
# model in theilsen
# ndays in 30 60 ... 360
# predictors in act actlog ct ctlog
# responses in price logprice
# usetax in yes no
# year in 2008
# invocations and files created
# python chart-02X.py makefile -> src/chart-02X.makefile
# python chart-02X.py data -> data/working/chart-02X.data
# python chart-02X.py txt -> data/working/chart-02X.txt
# python chart-02X.py txtY -> data/working/chart-02X-Y.txt
import sys
from Bunch import Bunch
from chart_02_template import chart
def main():
specs = Bunch(metric='median-of-root-median-squared-errors',
title='Median of Root Median Squared Errors',
model='theilsen',
training_periods=['30', '60', '90', '120', '150', '180',
'210', '240', '270', '300', '330', '360'],
feature_sets=['act', 'actlog', 'ct', 'ctlog'],
responses=['price', 'logprice'],
year='2008')
chart(specs=specs,
argv=sys.argv)
if __name__ == '__main__':
main()
| mit | Python |
|
2df737f2690925e2752ae7633f1db05f952209bc | Create led_record.py | H-SG/rpi-camrecord,H-SG/rpi-camrecord | led_record.py | led_record.py | #!/usr/bin/env python
import RPi.GPIO as GPIO
from time import sleep
import os
import subprocess
# Setup getting an image
def get_video(state):
folderName = "/home/pi/HumphreyData/"
if os.path.isdir(folderName)== False:
os.makedirs(folderName)
fileNumber = 1
filePath = folderName + str(fileNumber) + ".h264"
while os.path.isfile(filePath):
fileNumber += 1
filePath = folderName + str(fileNumber) + ".h264"
fileName = str(fileNumber)
cmdStr = "sudo raspivid -n -w 1024 -h 768 -t 0 -fps 2 -o %s/%s.h264" %(folderName, fileName)
if state:
capture = subprocess.Popen(cmdStr, shell=True)
else:
pid = "sudo pkill -15 -f raspivid"
os.system(pid)
# Setup LED control
def switch_LED(state):
for item in LEDpins:
GPIO.output(item, state)
# Setup GPIO config
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
# Setup GPIO pins
LEDpins = [19, 21]
switchState = 23
# If true, LEDS are off -> GPIO pins are current sinks
lOn = False
lOff = True
# Configure LED GPIO pins
for item in LEDpins:
GPIO.setup(item, GPIO.OUT)
GPIO.output(item, lOff)
# Configure switch GPIO pins
GPIO.setup(switchState, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Scipt ready flash
flashes = 1
while flashes < 4:
switch_LED(lOn)
sleep(0.5)
switch_LED(lOff)
sleep(0.5)
flashes += 1
# Pin check loop
while True:
if GPIO.input(switchState):
captureState = False
switch_LED(lOff)
else:
captureState = True
switch_LED(lOn)
get_video(captureState)
GPIO.wait_for_edge(switchState, GPIO.BOTH)
sleep(0.2)
# Script cleanup
GPIO.cleanup()
| mit | Python |
|
f68689e3b6caaad2d143d92af5395f7c12316525 | add simple test file | mattjj/pylds | test.py | test.py | from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
from pybasicbayes.distributions import Gaussian, Regression
from autoregressive.distributions import AutoRegression
from pyhsmm.util.text import progprint_xrange
from models import LDS
np.random.seed(0)
#########################
# set some parameters #
#########################
mu_init = np.array([0.,1.])
sigma_init = 0.01*np.eye(2)
A = 0.99*np.array([[np.cos(np.pi/24), -np.sin(np.pi/24)],
[np.sin(np.pi/24), np.cos(np.pi/24)]])
# A = 0.99*np.eye(2)
sigma_states = 0.01*np.eye(2)
C = np.array([[10.,0.]])
sigma_obs = 0.01*np.eye(1)
###################
# generate data #
###################
truemodel = LDS(
init_dynamics_distn=Gaussian(mu=mu_init,sigma=sigma_init),
dynamics_distn=AutoRegression(A=A,sigma=sigma_states),
emission_distn=Regression(A=C,sigma=sigma_obs)
)
data, stateseq = truemodel.generate(2000)
###############
# fit model #
###############
model = LDS(
init_dynamics_distn=Gaussian(mu_0=np.zeros(2),nu_0=3.,sigma_0=3*np.eye(2),kappa_0=1.),
dynamics_distn=AutoRegression(nu_0=3.,S_0=np.eye(2),M_0=np.zeros((2,2)),K_0=5*np.eye(2)),
emission_distn=Regression(nu_0=2.,S_0=np.eye(1),M_0=np.zeros((1,2)),K_0=5*np.eye(2)),
)
model.add_data(data,stateseq=stateseq)
# model.add_data(data)
model.resample_parameters()
for _ in progprint_xrange(100):
model.resample_model()
print np.linalg.eigvals(A)
print np.linalg.eigvals(model.dynamics_distn.A)
| mit | Python |
|
2e4bb9ca00c992dab0967b3238d8aebd8710d79d | Create controller.py | bit0001/trajectory_tracking,bit0001/trajectory_tracking | src/controller.py | src/controller.py | #!/usr/bin/env python
import rospy
if __name__ == '__main__':
pass
| mit | Python |
|
c3748579854ae06c995cb12ea45a1be4de8f827d | Add gallery migration | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | features/galleries/migrations/0003_auto_20170421_1109.py | features/galleries/migrations/0003_auto_20170421_1109.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-21 09:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('galleries', '0002_auto_20170421_0934'),
]
operations = [
migrations.AlterField(
model_name='galleryimage',
name='gallery',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gallery_images', to='content2.Content'),
),
]
| agpl-3.0 | Python |
|
a6381765ad8e15624a5dabb848283e92b0e90d8c | Create rpkm_bin.py | ShaopengLiu1/Atac-seq_Quality_Control_pipe,ShaopengLiu1/Atac-seq_Quality_Control_pipe,ShaopengLiu1/Atac-seq_Quality_Control_pipe | code_collection/rpkm_bin.py | code_collection/rpkm_bin.py | import sys
peak=[]
with open(sys.argv[1],'r') as f:
for line in f:
line=line.strip('\n').split('\t')
peak.append(line)
bed=[]
with open(sys.argv[2],'r') as f:
for line in f:
line=line.strip('\n').split('\t')
bed.append(line)
SIZE=int(sys.argv[3])
index=0
n=len(peak)
num=[0]*n
for read in bed:
mid=(int(read[1])+int(read[2]))/2
while (index<n-1 and mid>int(peak[index][2])) or (index<n-1 and read[0]!=peak[index][0]):
index+=1
num[index]+=1
if (index<n-1) and (mid==int(peak[index+1][1])):
num[index+1]+=1
output=[]
for i in range(n):
if num[i]!=0:
y=1.0*num[i]*10**9/SIZE/(int(peak[i][2])-int(peak[i][1]))
y='%.4f'%y
output.append(peak[i][0]+'\t'+peak[i][1]+'\t'+peak[i][2]+'\t'+peak[i][3]+'\t'+str(num[i])+'\t'+y+'\n')
else:
output.append(peak[i][0]+'\t'+peak[i][1]+'\t'+peak[i][2]+'\t'+peak[i][3]+'\t'+str(num[i])+'\t'+str(0)+'\n')
with open('reads.txt','w') as f:
f.writelines(output)
f.close()
| mit | Python |
|
85202173cf120caad603315cd57fa66857a88b0b | Add missing migrations for institutions | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | feder/institutions/migrations/0013_auto_20170810_2118.py | feder/institutions/migrations/0013_auto_20170810_2118.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-10 21:18
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('institutions', '0012_auto_20170808_0309'),
]
operations = [
migrations.AlterField(
model_name='institution',
name='extra',
field=jsonfield.fields.JSONField(blank=True, verbose_name=b'Unorganized additional information'),
),
]
| mit | Python |
|
4bb5653f5f7f95bf28b2ee596c441cbc4c7fbf3a | Create whitefilterstr.py | Ton-Lankveld/whitelist-filter-string,Ton-Lankveld/whitelist-filter-string,Ton-Lankveld/whitelist-filter-string | whitefilterstr.py | whitefilterstr.py | def whiteListCharFilter(inStr, whiteListStr):
""" Sanatize a string with a list of allowed (white) characters
Input: inStr {string} String to be sanatized.
Input: whiteListStr {string} String with allowed characters.
Output: outStr {string} Sanatized string
"""
outStr = ""
if (isinstance(inStr, str) or isinstance(whiteListStrinStr, str)) == False:
return outStr
for characterStr in inStr:
if characterStr in whiteListStr:
outStr = outStr + characterStr
return outStr
| mit | Python |
|
4bfb560dc9f28d850a89c98590df032849cfc035 | Create zoql.py | bolaurent/zuora_restful_python | zoql.py | zoql.py | #!/usr/local/bin/python3
import sys
import cmd
import csv
import pdb
import config
from zuora import Zuora
zuora = Zuora(config.zuoraConfig)
def zuoraObjectKeys(zouraObject):
if zouraObject:
return zouraObject.keys()
def dumpRecords(records):
if records:
firstRecord = records[0]
keys = [key for key in zuoraObjectKeys(firstRecord) if firstRecord[key]]
print(','.join(keys))
for record in records:
print(','.join(str(record[key]) for key in keys))
print(len(records), 'records')
class Interpeter(cmd.Cmd):
def do_select(self, line):
try:
if '.' in line:
csvData = zuora.queryExport('select ' + line).split('\n')
records = [record for record in csv.DictReader(csvData)]
else:
records = zuora.queryAll('select ' + line)
dumpRecords(records)
except Exception as e:
print('Error: q', repr(e))
def do_q(self, line):
return self.do_EOF(line)
def do_EOF(self, line):
return True
if __name__ == '__main__':
Interpeter().cmdloop()
| mit | Python |
|
eb250318cf6933b4a037bd9ea238ce0fc7be58c2 | add first script | leethargo/gitthemall | gitthemall.py | gitthemall.py | #! /usr/bin/env python2
import argparse
import os.path
import logging
import sys
logging.basicConfig(format='%(levelname)s: %(message)s')
def fail(msg):
'Fail program with printed message'
logging.error(msg)
sys.exit(1)
def update(repo, actions):
'Update repo according to allowed actions.'
repo = os.path.expanduser(repo)
logging.debug('going to %s' % repo)
if not os.path.isdir(repo):
fail('No directory at %s!' % repo)
if not os.path.isdir(os.path.join(repo, '.git')):
fail('No git repo at %s!' % repo)
def parse(config):
'Parse config and yield repos with actions'
with open(config) as f:
for line in f:
items = line.strip().split(',')
yield items[0], items[1:]
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Keep git repos up-to-date.')
parser.add_argument('config', type=str, help='config file that lists repos')
parser.add_argument('-v', '--verbose', default=False, action='store_true')
args = parser.parse_args()
if args.verbose:
logging.getLogger().setLevel(logging.DEBUG)
for repo, actions in parse(args.config):
update(repo, actions)
| mit | Python |
|
f9ea992353f2caa835ca2007eb07b470d1b782a3 | Fix migration colorfield | makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin | geotrek/trekking/migrations/0006_practice_mobile_color.py | geotrek/trekking/migrations/0006_practice_mobile_color.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-03-04 12:43
from __future__ import unicode_literals
import colorfield.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('trekking', '0005_auto_20181219_1524'),
]
operations = [
migrations.AddField(
model_name='practice',
name='mobile_color',
field=colorfield.fields.ColorField(db_column=b'couleur_mobile', default=b'#444444', help_text="Color's practice in mobile", max_length=18, verbose_name='Mobile color'),
),
]
| bsd-2-clause | Python |
|
3959ad4a4ddc4655c1acd8362de4284ba1e8d3e7 | Apply the hack that renames local_settings.py only when running setup.py | LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
from setuptest import test
import os, sys
if sys.argv[1] == 'install':
'''
Rename local_settings.py in order to
be excluded from setup.py install command
'''
ORIG_NAME = 'cronos/local_settings.py'
TEMP_NAME = 'cronos/local_settings.py1'
try:
os.rename(ORIG_NAME, TEMP_NAME)
except:
pass
setup(
name='cronos',
version='0.3-dev',
description='Django application that collects announcements and other \
personal data for students of TEI of Larissa',
author='cronos development team',
author_email='[email protected]',
url='http://cronos.teilar.gr',
license='AGPLv3',
packages=find_packages(),
include_package_data=True,
data_files=[
('', ['LICENSE', 'manage.py']),
('bin', [
'bin/update_cronos.sh',
'bin/logs_create_fix_perms.sh',
'bin/get_full_production_db.sh'
]),
('configs', [
'configs/apache.conf',
'configs/cron.d_cronos',
'configs/logrotate.d_cronos',
'configs/logrotate.d_cronos-dev',
'configs/syslog-ng.conf'
]),
],
cmdclass={'test': test},
)
if sys.argv[1] == 'install':
'''
Restore local_settings.py
'''
try:
os.rename(TEMP_NAME, ORIG_NAME)
except:
pass
| #!/usr/bin/env python
from setuptools import setup, find_packages
from setuptest import test
import os
'''
Rename local_settings.py in order to
be excluded from setup.py install command
'''
ORIG_NAME = 'cronos/local_settings.py'
TEMP_NAME = 'cronos/local_settings.py1'
try:
os.rename(ORIG_NAME, TEMP_NAME)
except:
pass
setup(
name='cronos',
version='0.3-dev',
description='Django application that collects announcements and other \
personal data for students of TEI of Larissa',
author='cronos development team',
author_email='[email protected]',
url='http://cronos.teilar.gr',
license='AGPLv3',
packages=find_packages(),
include_package_data=True,
data_files=[
('', ['LICENSE', 'manage.py']),
('bin', [
'bin/update_cronos.sh',
'bin/logs_create_fix_perms.sh',
'bin/get_full_production_db.sh'
]),
('configs', [
'configs/apache.conf',
'configs/cron.d_cronos',
'configs/logrotate.d_cronos',
'configs/logrotate.d_cronos-dev',
'configs/syslog-ng.conf'
]),
],
cmdclass={'test': test},
)
'''
Restore local_settings.py
'''
try:
os.rename(TEMP_NAME, ORIG_NAME)
except:
pass
| agpl-3.0 | Python |
b187e844d667b14dcc7874b351ee3f82383be348 | Fix dependency reference error | aaront/puckdb | setup.py | setup.py | import ast
import re
from setuptools import setup, find_packages
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('puckdb/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='puckdb',
author='Aaron Toth',
version=version,
url='https://github.com/aaront/puckdb',
description='An async-first hockey data extractor and API',
long_description=open('README.rst').read(),
test_suite="tests",
include_package_data=True,
packages=find_packages(),
package_data={'': ['LICENSE']},
package_dir={'puckdb': 'puckdb'},
license='Apache 2.0',
install_requires=[
'aiodns',
'cchardet',
'aiohttp',
'aiodns',
'asyncpg',
'asyncpgsa',
'click',
'click-datetime',
'python-dateutil',
'pytz',
'pg8000',
'sqlalchemy',
'ujson',
'python-dotenv',
'dataclasses',
'alembic',
'pint'
],
entry_points='''
[console_scripts]
puckdb=puckdb.console:main
''',
classifiers=(
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries'
)
)
| import ast
import re
from setuptools import setup, find_packages
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('puckdb/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='puckdb',
author='Aaron Toth',
version=version,
url='https://github.com/aaront/puckdb',
description='An async-first hockey data extractor and API',
long_description=open('README.rst').read(),
test_suite="tests",
include_package_data=True,
packages=find_packages(),
package_data={'': ['LICENSE']},
package_dir={'puckdb': 'puckdb'},
license='Apache 2.0',
install_requires=[
'aiodns',
'cchardet',
'aiohttp',
'aiodns',
'dotenv',
'asyncpg',
'asyncpgsa',
'click',
'click-datetime',
'python-dateutil',
'pytz',
'pg8000',
'sqlalchemy',
'ujson',
'python-dotenv',
'dataclasses',
'alembic',
'pint'
],
entry_points='''
[console_scripts]
puckdb=puckdb.console:main
''',
classifiers=(
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries'
)
)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.