commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
9dbc755a17fbea3fbec52191d1e7bac60e5995e9 | test links in the docs | Aoyunyun/jpush-docs,war22moon/jpush-docs,xiepiaa/jpush-docs,xiepiaa/jpush-docs,xiongtiancheng/jpush-docs,Aoyunyun/jpush-docs,raoxudong/jpush-docs,Aoyunyun/jpush-docs,raoxudong/jpush-docs,jpush/jpush-docs,Nocturnana/jpush-docs,raoxudong/jpush-docs,war22moon/jpush-docs,xiepiaa/jpush-docs,xiongtiancheng/jpush-docs,xiepiaa/jpush-docs,xiongtiancheng/jpush-docs,war22moon/jpush-docs,Nocturnana/jpush-docs,jpush/jpush-docs,Aoyunyun/jpush-docs,xiongtiancheng/jpush-docs,raoxudong/jpush-docs,war22moon/jpush-docs,jpush/jpush-docs,Nocturnana/jpush-docs,jpush/jpush-docs,Nocturnana/jpush-docs | test_links.py | test_links.py | #!/usr/bin/env python
import os
import time
import yaml
import requests
from urllib.parse import urljoin
from bs4 import BeautifulSoup
current = os.path.split(os.path.realpath(__file__))[0]
yaml_file = "{0}/mkdocs.yml".format(current)
mkdocs = yaml.load(open(yaml_file))['pages']
host='http://127.0.0.1:8000'
page_filters = [
'index.md',
'resources.md',
'updates.md',
'javadoc.md',
'iOSdoc.md',
'server/sdk',
'server/old'
]
link_filters = [
'mailto:',
'im_android_api_docs',
'jmessage_ios_appledoc_html',
'www.mkdocs.org',
# 'www.jiguang.cn',
# 'blog.jiguang.cn',
'sdkfiledl.jiguang.cn',
# 'developer.apple.com',
'developer.android.com',
'google.com',
'wikipedia.org',
# 'github.com',
'api.jpush.cn',
'report.jpush.cn',
'device.jpush.cn',
'admin.jpush.cn',
'api.im.jpush.cn',
'report.im.jpush.cn',
'api.sms.jpush.cn',
]
def extract_value_from_list(mkdocs):
pages = []
for product in mkdocs:
for item in product.values():
if isinstance(item, str):
pages.append(item)
else:
page = extract_value_from_list(item)
pages.extend(page)
return pages
def is_valid_page(item):
return _is_valid(page_filters, item)
def is_valid_link(item):
if not item or item.startswith('#'):
return False
return _is_valid(link_filters, item)
def _is_valid(filters, item):
for filter in filters:
if filter in item:
return False
return True
def build_link(base, path):
if path.startswith('../') or path.startswith('./'):
return urljoin(base, path)
return path
def get_links(url):
links = []
html = requests.get(url)
bs = BeautifulSoup(html.content, 'html.parser')
for link in bs.find('div', { 'id': 'content' }).findAll('a'):
if 'href' in link.attrs and is_valid_link(link.attrs['href']):
links.append({'text': link.get_text(), 'href': link.attrs['href']})
return links
if __name__ == '__main__':
pages = extract_value_from_list(mkdocs)
for page in pages:
msg = "\nworking on " + page + ":\n"
if is_valid_page(page):
url = host+'/'+page.replace('.md', '/')
msg += 'url: ' + url + "\n"
links = get_links(url)
if links:
for link in links:
l = build_link(url, link['href'])
r = requests.get(l)
if r.status_code != 200:
print(msg + link['href'] + ' => ' + l)
print(link)
print(r.status_code)
# time.sleep(1)
else:
pass
# print("This page doesn't have valid links")
else:
pass
# print("skip...")
| mit | Python |
|
d56b1623a278d61ff8b113b95534ce4dd6682e25 | fix bug 1018349 - migration | AdrianGaudebert/socorro,Serg09/socorro,AdrianGaudebert/socorro,linearregression/socorro,yglazko/socorro,luser/socorro,twobraids/socorro,KaiRo-at/socorro,spthaolt/socorro,adngdb/socorro,spthaolt/socorro,mozilla/socorro,Serg09/socorro,rhelmer/socorro,mozilla/socorro,Tchanders/socorro,m8ttyB/socorro,pcabido/socorro,rhelmer/socorro,twobraids/socorro,luser/socorro,KaiRo-at/socorro,mozilla/socorro,linearregression/socorro,Tchanders/socorro,yglazko/socorro,twobraids/socorro,AdrianGaudebert/socorro,rhelmer/socorro,KaiRo-at/socorro,Tchanders/socorro,luser/socorro,bsmedberg/socorro,linearregression/socorro,twobraids/socorro,adngdb/socorro,pcabido/socorro,m8ttyB/socorro,adngdb/socorro,yglazko/socorro,Tchanders/socorro,linearregression/socorro,m8ttyB/socorro,luser/socorro,spthaolt/socorro,mozilla/socorro,cliqz/socorro,AdrianGaudebert/socorro,KaiRo-at/socorro,m8ttyB/socorro,Serg09/socorro,m8ttyB/socorro,lonnen/socorro,bsmedberg/socorro,pcabido/socorro,KaiRo-at/socorro,luser/socorro,AdrianGaudebert/socorro,rhelmer/socorro,Tchanders/socorro,cliqz/socorro,Serg09/socorro,linearregression/socorro,adngdb/socorro,Tayamarn/socorro,lonnen/socorro,spthaolt/socorro,spthaolt/socorro,AdrianGaudebert/socorro,mozilla/socorro,mozilla/socorro,Tayamarn/socorro,spthaolt/socorro,lonnen/socorro,yglazko/socorro,cliqz/socorro,Tchanders/socorro,bsmedberg/socorro,pcabido/socorro,yglazko/socorro,Tayamarn/socorro,lonnen/socorro,cliqz/socorro,rhelmer/socorro,pcabido/socorro,yglazko/socorro,twobraids/socorro,adngdb/socorro,luser/socorro,cliqz/socorro,Serg09/socorro,Tayamarn/socorro,adngdb/socorro,m8ttyB/socorro,KaiRo-at/socorro,Tayamarn/socorro,bsmedberg/socorro,Tayamarn/socorro,rhelmer/socorro,Serg09/socorro,twobraids/socorro,linearregression/socorro,bsmedberg/socorro,cliqz/socorro,pcabido/socorro | alembic/versions/1baef149e5d1_bug_1018349_add_coalesce_to_max_sort_.py | alembic/versions/1baef149e5d1_bug_1018349_add_coalesce_to_max_sort_.py | """bug 1018349 - add COALESCE to max(sort) when adding a new product
Revision ID: 1baef149e5d1
Revises: 26521f842be2
Create Date: 2014-06-25 15:04:37.934064
"""
# revision identifiers, used by Alembic.
revision = '1baef149e5d1'
down_revision = '26521f842be2'
from alembic import op
from socorro.lib import citexttype, jsontype, buildtype
from socorro.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
load_stored_proc(op, ['add_new_product.sql'])
def downgrade():
load_stored_proc(op, ['add_new_product.sql'])
| mpl-2.0 | Python |
|
d836571a8dff59371d156dffea7290228305ca17 | add tests for reading shapefiles via ogr | Uli1/mapnik,CartoDB/mapnik,yiqingj/work,cjmayo/mapnik,stefanklug/mapnik,naturalatlas/mapnik,tomhughes/mapnik,pnorman/mapnik,pnorman/mapnik,yohanboniface/python-mapnik,rouault/mapnik,manz/python-mapnik,tomhughes/python-mapnik,zerebubuth/mapnik,jwomeara/mapnik,lightmare/mapnik,CartoDB/mapnik,tomhughes/mapnik,rouault/mapnik,cjmayo/mapnik,zerebubuth/mapnik,garnertb/python-mapnik,jwomeara/mapnik,davenquinn/python-mapnik,manz/python-mapnik,Uli1/mapnik,whuaegeanse/mapnik,tomhughes/python-mapnik,Airphrame/mapnik,pramsey/mapnik,lightmare/mapnik,Airphrame/mapnik,rouault/mapnik,cjmayo/mapnik,pnorman/mapnik,mapycz/mapnik,davenquinn/python-mapnik,mapnik/mapnik,pramsey/mapnik,mapycz/python-mapnik,stefanklug/mapnik,tomhughes/mapnik,qianwenming/mapnik,mapycz/python-mapnik,mapnik/python-mapnik,qianwenming/mapnik,Mappy/mapnik,kapouer/mapnik,strk/mapnik,kapouer/mapnik,qianwenming/mapnik,naturalatlas/mapnik,garnertb/python-mapnik,mapycz/mapnik,stefanklug/mapnik,rouault/mapnik,Mappy/mapnik,mapnik/python-mapnik,lightmare/mapnik,whuaegeanse/mapnik,pramsey/mapnik,yohanboniface/python-mapnik,cjmayo/mapnik,naturalatlas/mapnik,qianwenming/mapnik,strk/mapnik,mapnik/mapnik,sebastic/python-mapnik,whuaegeanse/mapnik,mapnik/mapnik,whuaegeanse/mapnik,sebastic/python-mapnik,davenquinn/python-mapnik,naturalatlas/mapnik,mbrukman/mapnik,CartoDB/mapnik,Mappy/mapnik,Airphrame/mapnik,Airphrame/mapnik,kapouer/mapnik,mapnik/mapnik,pramsey/mapnik,mbrukman/mapnik,strk/mapnik,zerebubuth/mapnik,Uli1/mapnik,strk/mapnik,yiqingj/work,yiqingj/work,mapnik/python-mapnik,lightmare/mapnik,tomhughes/mapnik,yohanboniface/python-mapnik,jwomeara/mapnik,garnertb/python-mapnik,mbrukman/mapnik,mbrukman/mapnik,pnorman/mapnik,Mappy/mapnik,manz/python-mapnik,jwomeara/mapnik,stefanklug/mapnik,kapouer/mapnik,qianwenming/mapnik,Uli1/mapnik,sebastic/python-mapnik,mapycz/mapnik,tomhughes/python-mapnik,yiqingj/work | tests/python_tests/ogr_test.py | tests/python_tests/ogr_test.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
# Shapefile initialization
def test_shapefile_init():
s = mapnik.Ogr(file='../../demo/data/boundaries.shp',layer_by_index=0)
e = s.envelope()
assert_almost_equal(e.minx, -11121.6896651, places=7)
assert_almost_equal(e.miny, -724724.216526, places=6)
assert_almost_equal(e.maxx, 2463000.67866, places=5)
assert_almost_equal(e.maxy, 1649661.267, places=3)
# Shapefile properties
def test_shapefile_properties():
s = mapnik.Ogr(file='../../demo/data/boundaries.shp',layer_by_index=0,encoding='latin1')
f = s.features_at_point(s.envelope().center()).features[0]
eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
eq_(f['COUNTRY'], u'CAN')
eq_(f['F_CODE'], u'FA001')
eq_(f['NAME_EN'], u'Quebec')
# this seems to break if icu data linking is not working
eq_(f['NOM_FR'], u'Qu\xe9bec')
eq_(f['NOM_FR'], u'Québec')
eq_(f['Shape_Area'], 1512185733150.0)
eq_(f['Shape_Leng'], 19218883.724300001)
# Check that the deprecated interface still works,
# remove me once the deprecated code is cleaned up
eq_(f.properties['Shape_Leng'], 19218883.724300001)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| lgpl-2.1 | Python |
|
bf8b19d19ea2a5f39cba90ca815560a89e476c6c | Create Output.py | relisher/ferretextras | Output.py | Output.py | import os, time, sys
from threading import Thread
pipe_name = '/Users/stevenrelin/Documents/pipe_eye.txt'
def child( ):
pipeout = os.open(pipe_name, os.O_WRONLY)
counter = 0
while True:
time.sleep(1)
os.write(pipeout, 'Number %03d\n' % counter)
counter = (counter+1) % 5
if not os.path.exists(pipe_name):
os.mkfifo(pipe_name)
t = Thread(target=child)
t.start()
| mit | Python |
|
319d2115ad1130247caa5734572b7676e5bb0a6d | add offline plot of nexrad climo | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/feature/nexrad/climo.py | scripts/feature/nexrad/climo.py | import matplotlib.pyplot as plt
from pyiem.plot import maue
import datetime
import numpy as np
avgs = np.zeros((24, 53), 'f')
cnts = np.zeros((24, 53), 'f')
def make_y(ts):
if ts.hour >= 5:
return ts.hour - 5
return ts.hour + 19
maxv = 0
for line in open('nexrad35.txt'):
tokens = line.split(",")
ts = datetime.datetime.strptime(tokens[0], '%Y%m%d%H%M')
coverage = float(tokens[1])
if coverage > maxv:
print line
maxv = coverage
if ts.year > 1007:
avgs[make_y(ts), int(ts.strftime("%j"))/7-1] += coverage
cnts[make_y(ts), int(ts.strftime("%j"))/7-1] += 1.0
pixels = 6000 * 2400
(fig, ax) = plt.subplots(1, 1)
cmap = maue()
x, y = np.meshgrid(np.arange(53), np.arange(24))
m = ax.imshow(avgs / cnts / 100. * pixels, aspect='auto', interpolation='bicubic',
cmap=plt.get_cmap("gist_ncar"), extent=[0,53,24,0])
plt.colorbar(m, label='square miles, Iowa = 56,000')
ax.set_ylim(0, 24)
ax.set_yticks((0, 4, 8, 12, 16, 20))
ax.set_xticks(range(0, 55, 7))
ax.set_xticklabels(('Jan 1', 'Feb 19', 'Apr 8', 'May 27', 'Jul 15',
'Sep 2', 'Oct 21', 'Dec 9'))
ax.set_yticks(range(0, 24, 4))
ax.set_yticklabels(("Mid", "4 AM", "8 AM", "Noon", "4 PM", "8 PM"))
ax.set_ylabel("Central Daylight Time")
box = ax.get_position()
ax.set_position([box.x0, box.y0,
box.width * 0.95, box.height])
ax2 = ax.twinx()
ax2.set_yticks(range(0, 24, 4))
ax2.set_yticklabels(("5", "9", "13", "17", "21", "1"))
ax2.set_ylabel("UTC")
ax2.set_ylim(0, 24)
box = ax2.get_position()
ax2.set_position([box.x0, box.y0,
box.width * 0.95, box.height])
#ax.bar(np.arange(1, 366), avgs[:-1] / cnts[:-1] / 100. * pixels, fc='b', ec='b')
#ax.set_xticks((1,32,60,91,121,152,182,213,244,274,305,335,365))
#ax.set_xticklabels( ('Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec') )
ax.grid(True, color='white')
#ax.set_xlim(0, 366)
#ax.set_ylabel("Areal Coverage of 35+ dbZ [sq miles], Iowa=56,000")
ax.set_title(("Climatology of 35+ dbZ Returns over CONUS\n"
"Based on 1996-2015 IEM Composites of NWS NEXRAD"))
ax.set_xlabel("Partitioned by Week of Year, Smoothed")
fig.savefig('test.png')
| mit | Python |
|
5f20962d300850200ed796f941bf98662736d4da | Add server.py to serve files in the user's specified share dir | sandwich-share/sandwich | sandwich/server.py | sandwich/server.py | from os import curdir, sep, path
import time
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import config
class StaticServeHandler(BaseHTTPRequestHandler):
def do_GET(self):
if not config.shared_directory:
self.send_error(404, 'User not sharing files')
return
try:
f = open(path.expanduser(config.shared_directory) + self.path, 'rb')
self.send_response(200)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
class SandwichServer(object):
def __init__(self, ):
pass
def run(self, port):
try:
self.port = port
self.server = HTTPServer(('', self.port), StaticServeHandler)
print 'started httpserver...'
self.server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
self.server.socket.close()
if __name__ == '__main__':
ss = SandwichServer()
ss.run(8000)
| bsd-2-clause | Python |
|
d774bb7caa9637e4d453e19fcc43ee7b9b17702c | add script for computing WWA % times | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/sbw/wfo_time_percent.py | scripts/sbw/wfo_time_percent.py | import iemdb
import numpy
import network
nt = network.Table("WFO")
POSTGIS = iemdb.connect('postgis', bypass=True)
pcursor = POSTGIS.cursor()
import mx.DateTime
sts = mx.DateTime.DateTime(2005,10,1)
ets = mx.DateTime.DateTime(2013,1,1)
interval = mx.DateTime.RelativeDateTime(hours=3)
bins = (ets - sts).minutes
for wfo in nt.sts.keys():
wfo = wfo[-3:]
counts = numpy.zeros( (int(bins)), 'f')
pcursor.execute("""SELECT distinct issue, expire from warnings where wfo = '%s'
and issue > '2005-10-01' and expire < '2013-01-01' and gtype = 'C'
and phenomena = 'SC' """ % (wfo,))
for row in pcursor:
issue = mx.DateTime.strptime(row[0].strftime("%Y%m%d%H%M"), "%Y%m%d%H%M")
expire = mx.DateTime.strptime(row[1].strftime("%Y%m%d%H%M"), "%Y%m%d%H%M")
idx1 = int((issue - sts).minutes)
idx2 = int((expire - sts).minutes)
counts[idx1:idx2] = 1
print "%s,%.4f" % (wfo, numpy.sum( counts ) / float(bins))
pcursor.execute("""INSERT into ferree3(wfo, percentage) values (%s,%s)""",
(wfo, float(numpy.sum( counts ) / float(bins))))
POSTGIS.commit()
pcursor.close()
POSTGIS.commit()
POSTGIS.close() | mit | Python |
|
bcb65eb61c711b184114910c8d8c641278db5130 | Add frozen/equilibrium wake model helpers | ricklupton/py-bem | bem/models.py | bem/models.py | import numpy as np
class FrozenWakeAerodynamics:
"""Calculate induced flows once in given initial conditions"""
def __init__(self, bem_model, initial_wind_speed,
initial_rotor_speed, initial_pitch_angle):
self.bem_model = bem_model
# Find the frozen wake state
self.wake_state = bem_model.solve_wake(initial_wind_speed,
initial_rotor_speed,
initial_pitch_angle)
def forces(self, wind_speed, rotor_speed, pitch_angle, rho):
shape_test = (np.asarray(wind_speed) *
np.asarray(rotor_speed) *
np.asarray(pitch_angle))
if shape_test.ndim == 0:
# Single value
factors = self.wake_state / [wind_speed, rotor_speed]
factors[:, 1] /= self.bem_model.radii
forces = self.bem_model.forces(wind_speed, rotor_speed,
pitch_angle, rho, factors)
elif shape_test.ndim == 1:
# Multiple values
inputs = np.zeros((len(shape_test), 3))
inputs[:, 0] = wind_speed
inputs[:, 1] = rotor_speed
inputs[:, 2] = pitch_angle
forces = np.zeros((inputs.shape[0], self.wake_state.shape[0], 2))
for i in range(forces.shape[0]):
factors = self.wake_state / inputs[i, :2]
factors[:, 1] /= self.bem_model.radii
forces[i] = self.bem_model.forces(*inputs[i], rho=rho,
factors=factors)
else:
raise ValueError("Bad input shapes: {}".format(shape_test.shape))
return forces
class EquilibriumWakeAerodynamics:
"""Calculate induced flow for each requested set of conditions"""
def __init__(self, bem_model):
self.bem_model = bem_model
def forces(self, wind_speed, rotor_speed, pitch_angle, rho):
shape_test = (np.asarray(wind_speed) *
np.asarray(rotor_speed) *
np.asarray(pitch_angle))
if shape_test.ndim == 0:
# Single value
wake_state = self.bem_model.solve_wake(wind_speed,
rotor_speed,
pitch_angle)
factors = wake_state / [wind_speed, rotor_speed]
factors[:, 1] /= self.bem_model.radii
forces = self.bem_model.forces(wind_speed, rotor_speed,
pitch_angle, rho, factors)
elif shape_test.ndim == 1:
# Multiple values
inputs = np.zeros((len(shape_test), 3))
inputs[:, 0] = wind_speed
inputs[:, 1] = rotor_speed
inputs[:, 2] = pitch_angle
forces = np.zeros((inputs.shape[0], self.wake_state.shape[0], 2))
for i in range(forces.shape[0]):
wake_state = self.bem_model.solve_wake(*inputs[i])
factors = wake_state / inputs[i, :2]
factors[:, 1] /= self.bem_model.radii
forces[i] = self.bem_model.forces(*inputs[i], rho=rho,
factors=factors)
else:
raise ValueError("Bad input shapes: {}".format(shape_test.shape))
return forces
| mit | Python |
|
e56c3be6dc3ab8bf31b7ce9a3d3db275b18207f0 | Create sql-all.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | Django/sql-all.py | Django/sql-all.py | $ ./manage.py sqlall name-app
'''
CommandError: App 'name-app' has migrations.
Only the sqlmigrate and sqlflush commands can be used when an app has migrations.
'''
So there before migrate to see it.
| agpl-3.0 | Python |
|
a8ee7f46ffd4611a153538e05749bd99b4a98cbc | add checkPID | wannaphongcom/code-python3-blog | check-pid.py | check-pid.py | # โค้ดเซ็คความถูกต้องของบัตรประชาชน
# เขียนโดย วรรณพงษ์ ภัททิยไพบูลย์
# [email protected]
# https://python3.wannaphong.com
def checkPID(pid):
if(len(pid) != 13): # ถ้า pid ไม่ใช่ 13 ให้คืนค่า False
return False
num=0 # ค่าสำหรับอ้างอิง index list ข้อมูลบัตรประชาชน
num2=13 # ค่าประจำหลัก
listdata=list(pid) # list ข้อมูลบัตรประชาชน
sum=0 # ผลลัพธ์
while num<12:
sum+=int(listdata[num])*(num2-num) # นำค่า num เป็น index list แต่ละตัว * (num2 - num) แล้วรวมเข้ากับ sum
num+=1 # เพิ่มค่า num อีก 1
digit13 = sum%11 # sum หาร 11 เอาเศษ
if digit13==0: # ถ้าเศษ = 0
digit13=1 # ค่าหลักที่ 13 คือ 1
elif digit13==1: # ถ้าเศษ = 1
digit13=0 # ค่าหลักที่ 13 คือ 0
else:
digit13=11-digit13 # ถ้าเศษไม่ใช่กับอะไร ให้เอา 11 - digit13
if digit13==int(listdata[12]): # ถ้าค่าหลักที่ 13 เท่ากับค่าหลักที่ 13 ที่ป้อนข้อมูลมา คืนค่า True
return True
else: # ถ้าค่าหลักที่ 13 ไม่เท่ากับค่าหลักที่ 13 ที่ป้อนข้อมูลมา คืนค่า False
return False
a=checkPID("เลขบัตรประชาชน")
print(a) # ถ้าถูกต้อง คือ True ถ้าไม่ถูก คือ False | mit | Python |
|
43841114f4403b46e0ef077be6e0832ce690dfb2 | add ipy_workdir | ipython/ipython,ipython/ipython | IPython/Extensions/ipy_workdir.py | IPython/Extensions/ipy_workdir.py | #!/usr/bin/env python
import IPython.ipapi
ip = IPython.ipapi.get()
import os
workdir = None
def workdir_f(line):
global workdir
dummy,cmd = line.split(None,1)
if os.path.isdir(cmd):
workdir = cmd
print "Set workdir",workdir
elif workdir is None:
print "Please set workdir first by doing e.g. 'workdir q:/'"
else:
print "Execute command in",workdir
cwd = os.getcwd()
os.chdir(workdir)
try:
ip.runlines(cmd)
finally:
os.chdir(cwd)
ip.defalias("workdir",workdir_f)
| bsd-3-clause | Python |
|
b78ba3220a64e9b01b3fc8c61ada0e85dc1157fc | Implement data dumper | openego/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform | oeplatform/dumper.py | oeplatform/dumper.py | import oeplatform.securitysettings as sec
import sqlalchemy as sqla
from subprocess import call
import os
excluded_schemas = [
"information_schema",
"public",
"topology",
"reference",
]
def connect():
engine = _get_engine()
return sqla.inspect(engine)
def _get_engine():
engine = sqla.create_engine(
'postgresql://{0}:{1}@{2}:{3}/{4}'.format(
sec.dbuser,
sec.dbpasswd,
sec.dbhost,
sec.dbport,
sec.dbname))
return engine
insp = connect()
for schema in insp.get_schema_names():
if schema not in excluded_schemas:
if not os.path.exists(sec.datarepowc + schema):
os.mkdir(sec.datarepowc + schema)
for table in insp.get_table_names(schema=schema):
if not table.startswith('_'):
if not os.path.exists(sec.datarepowc + schema + '/' + table):
os.mkdir(sec.datarepowc + schema + '/' + table)
L = ['pg_dump', '-h', sec.dbhost, '-U', sec.dbuser, '-d',
sec.dbname, '-F', 'd', '-f',
sec.datarepowc + schema + '/' + table, '-t',
schema + '.' + table, '-w', ]
print(L)
call(L)
call(['tar', '-zcf',
sec.datarepowc + schema + '/' + table + '.tar.gz',
sec.datarepowc + schema + '/' + table])
call(['rm', '-r',
sec.datarepowc + schema + '/' + table])
| agpl-3.0 | Python |
|
8ce580d1f0890f72ab60efa4219de26b64ece897 | Add example skeleton script | bigfix/tools | example/example.py | example/example.py | #!/usr/bin/env python
import sys
from argparse import ArgumentParser
from getpass import getpass
class BigFixArgParser(ArgumentParser):
name = "hodor.py [options]"
def __init__(self):
description = "A tool for creating a smarter planet"
usage = """Options:
-h, --help Print this help message and exit
-s, --server SERVER[:PORT] REST API server and port
-u, --user USER[:PASSWORD] REST API user and password
-k, --insecure Don't verify the HTTPS connection to the server
-c, --cacert FILE CA certificate used to verify the server's HTTPS
certificate"""
super(BigFixArgParser, self).__init__(add_help=False,
usage=usage, description=description)
self.add_argument('-k', '--insecure', action='store_true')
self.add_argument('-c', '--cacert')
self.add_argument('-u', '--user', required=True)
self.add_argument('-s', '--server', required=True)
def parse_args(self):
self.usage = "{0}\n\n{1}\n\n{2}".format(self.name,
self.description, self.usage)
if '-h' in sys.argv or '--help' in sys.argv:
print(self.usage)
sys.exit()
args = super(BigFixArgParser, self).parse_args()
if ':' not in args.user:
prompt = "Enter password for user '{0}': ".format(args.user)
args.user = args.user + ':' + getpass(prompt)
return args
parser = BigFixArgParser()
print(parser.parse_args())
| apache-2.0 | Python |
|
30a4cb3794d52d1743dc482f2c2a83ced1dcbd90 | Make a clean report along with BLEU scores | vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material | session2/report.py | session2/report.py | import argparse, codecs, logging
import unicodecsv as csv
from nltk.align.bleu_score import bleu
import numpy as np
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('src', 'Source file')
parser.add_argument('target', 'Translated data')
parser.add_argument('gold', 'Gold output file')
parser.add_argument('model', 'Model Name')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
f = codecs.open('%s-%s.csv'% (args.out, args.suffix), 'w')
csv_f = csv.writer(f, delimiter=',', encoding='utf-8')
src_lines = codecs.open(args.src, 'r', 'utf-8').readlines()
src_lines_nounk = codecs.open(args.src + '.nounk', 'r', 'utf-8').readlines()
target_lines = codecs.open(args.target, 'r', 'utf-8').readlines()
target_lines_nounk = codecs.open(args.target + '.nounk', 'r', 'utf-8').readlines()
gold_lines = codecs.open(args.gold, 'r', 'utf-8').readlines()
gold_lines_nounk = codecs.open(args.gold + '.nounk', 'r', 'utf-8').readlines()
data = ['Src', 'Src_UNK', 'Target_UNK', 'Target', 'Gold_UNK', 'Gold', 'BLEU']
num_lines = len(gold_lines)
logging.info('Num Lines: %d'% num_lines)
bleu_scores = []
for index in range(num_lines):
data = []
data.append(src_lines_nounk[index].strip())
data.append(src_lines[index].strip())
data.append(target_lines[index].strip())
data.append(target_lines_nounk[index].strip())
data.append(gold_lines[index].strip())
data.append(gold_lines_nounk[index].strip())
bleu_score = bleu(target_lines[index].split(), [gold_lines[index].split()], [1])
bleu_scores.append(bleu_score)
data.append(str(bleu_score))
csv_f.writerow(data)
logging.info('Average BLEU Score: %f'% np.mean(bleu_scores))
if __name__ == '__main__':
main() | bsd-3-clause | Python |
|
31bb487a2f75268cb0b60ef4539935df83b68a84 | Add auto solver for "W3-Radix Sorts". | hghwng/mooc-algs2,hghwng/mooc-algs2 | quiz/3-radixsort.py | quiz/3-radixsort.py | #!/usr/bin/env python3
def make_arr(text):
return text.strip().split(' ')
def print_arr(arr):
for t in arr:
print(t, end=' ')
print()
def solve_q1(arr, time):
for t in range(len(arr[0]) - 1, time - 1, -1):
arr = sorted(arr, key=lambda x: x[t])
return arr
def msd_radix_sort(arr, start, end, depth):
if end - start <= 1:
return
global msd_radix_sort_left
if msd_radix_sort_left <= 0:
return
msd_radix_sort_left -= 1
arr[start:end] = sorted(arr[start:end], key=lambda x: x[depth])
pre_n = start
pre_v = arr[pre_n][depth]
for i in range(start, end):
if arr[i][depth] != pre_v:
pre_v = arr[i][depth]
msd_radix_sort(arr, pre_n, i, depth + 1)
pre_n = i
msd_radix_sort(arr, pre_n, end, depth + 1)
def solve_q2(arr, time):
global msd_radix_sort_left
msd_radix_sort_left = time
msd_radix_sort(arr, 0, len(arr), 0)
return arr
def solve_q3(arr):
k = arr[0][0]
l = 0
m = l
h = len(arr) - 1
while m <= h:
v = arr[m][0]
if v < k:
arr[m], arr[l] = arr[l], arr[m]
m += 1
l += 1
elif v == k:
m += 1
else: # arr[m] > k
arr[m], arr[h] = arr[h], arr[m]
h -= 1
return arr
q1 = ' 4322 4441 1244 3122 1332 2131 4431 3113 2244 1241'
q2 = ' 1324 3314 1122 3112 4423 3321 3344 4223 1412 1344 4314 4412 1333 2323 3243 '
q3 = ' 5552 5255 3462 2614 6432 5252 6543 6152 5156 5434 '
print_arr(solve_q1(make_arr(q1), 2))
print_arr(solve_q2(make_arr(q2), 3))
print_arr(solve_q3(make_arr(q3)))
| mit | Python |
|
85e7d3b4f69919b274e597b7e8f73377e7d28698 | Add another script for testing purposes | ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod | process_datasets.py | process_datasets.py | """
For testing purposes: Process a specific page on the Solr index.
"""
import os
import sys
import datetime
import json
import uuid
import pandas
import xml.etree.ElementTree as ET
import urllib
from d1graphservice.people import processing
from d1graphservice import settings
from d1graphservice import dataone
from d1graphservice import util
from d1graphservice import validator
from d1graphservice import store
from d1graphservice import multi_store
from d1graphservice.people import processing
from d1graphservice.people.formats import eml
from d1graphservice.people.formats import dryad
from d1graphservice.people.formats import fgdc
if __name__ == "__main__":
# query = "https://cn.dataone.org/cn/v1/query/solr/?fl=author,identifier,title,authoritativeMN&q=author:*Jones*Matthew*&rows=1000&start=0"
# query = "https://cn.dataone.org/cn/v1/query/solr/?fl=author,identifier,title,authoritativeMN&q=author:*Jones*&rows=20&start=0"
query = "https://cn.dataone.org/cn/v1/query/solr/?fl=author,identifier,title,authoritativeMN&q=author:Jeremy*Jones*&rows=20&start=0"
cache_dir = "/Users/mecum/src/d1dump/documents/"
formats_map = util.loadFormatsMap()
namespaces = {
"foaf": "http://xmlns.com/foaf/0.1/",
"dcterms": "http://purl.org/dc/terms/",
"datacite": "http://purl.org/spar/datacite/",
"owl": "http://www.w3.org/2002/07/owl#",
"xsd": "http://www.w3.org/2001/XMLSchema#",
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"glview": "http://schema.geolink.org/dev/view/",
"d1people": "https://dataone.org/person/",
"d1org": "https://dataone.org/organization/",
"d1resolve": "https://cn.dataone.org/cn/v1/resolve/",
"prov": "http://www.w3.org/ns/prov#",
"d1node": "https://cn.dataone.org/cn/v1/node/",
"d1landing": "https://search.dataone.org/#view/",
"d1repo": "https://cn.dataone.org/cn/v1/node/"
}
# Load triple stores
stores = {
'people': store.Store("http://localhost:3030/", 'ds', namespaces),
'organizations': store.Store("http://localhost:3131/", 'ds', namespaces),
'datasets': store.Store("http://localhost:3232/", 'ds', namespaces)
}
for store_name in stores:
stores[store_name].delete_all()
stores = multi_store.MultiStore(stores, namespaces)
vld = validator.Validator()
page_xml = util.getXML(query)
documents = page_xml.findall(".//doc")
for doc in documents:
identifier = doc.find(".//str[@name='identifier']").text
print identifier
scimeta = dataone.getScientificMetadata(identifier, cache=True)
if scimeta is None:
continue
records = processing.extractCreators(identifier, scimeta)
# Add records and organizations
people = [p for p in records if 'type' in p and p['type'] == 'person']
organizations = [o for o in records if 'type' in o and o['type'] == 'organization']
# Always do organizations first, so peoples' organization URIs exist
for organization in organizations:
organization = vld.validate(organization)
stores.addOrganization(organization)
for person in people:
person = vld.validate(person)
stores.addPerson(person)
stores.addDataset(doc, scimeta, formats_map)
stores.save()
| apache-2.0 | Python |
|
d959587c168424ed0d8e91a4a20ea36076a646b7 | add forgotten __init__.py | juxor/dhcpcanon_debian,DHCPAP/dhcpcanon,juga0/dhcpcanon,juxor/dhcpcanon_debian,DHCPAP/dhcpcanon,juga0/dhcpcanon | dhcpcanon/__init__.py | dhcpcanon/__init__.py | __version__ = "0.1"
__author__ = "juga"
| mit | Python |
|
98fe743217ebd7868d11d8518f25430539eae5a0 | add regrresion example | fukatani/stacked_generalization | example/simple_regression_example.py | example/simple_regression_example.py | from sklearn import datasets, metrics, preprocessing
from stacked_generalization.lib.stacking import StackedRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import Ridge
from sklearn.linear_model import LinearRegression
boston = datasets.load_boston()
X = preprocessing.StandardScaler().fit_transform(boston.data)
breg = LinearRegression()
regs = [RandomForestRegressor(n_estimators=50, random_state=1),
GradientBoostingRegressor(n_estimators=25, random_state=1),
ExtraTreesRegressor(),
Ridge(random_state=1)]
sr = StackedRegressor(breg,
regs,
n_folds=3,
verbose=0)
sr.fit(X, boston.target)
score = metrics.mean_squared_error(sr.predict(X), boston.target)
print ("MSE: %f" % score) | apache-2.0 | Python |
|
896270bcd99b26e4128fd35dd3821a59807ae850 | Add the model.py file declarative generated from mysql. | mteule/StationMeteo,mteule/StationMeteo | doc/model/model_decla.py | doc/model/model_decla.py | #autogenerated by sqlautocode
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relation
engine = create_engine('mysql://monty:passwd@localhost/test_dia')
DeclarativeBase = declarative_base()
metadata = DeclarativeBase.metadata
metadata.bind = engine
class Metering(DeclarativeBase):
__tablename__ = 'Metering'
__table_args__ = {}
#column definitions
date = Column(u'date', DATE())
id = Column(u'id', INTEGER(), primary_key=True, nullable=False)
sensor_id = Column(u'sensor_id', INTEGER(), ForeignKey('Sensor.id'))
value = Column(u'value', FLOAT())
#relation definitions
Sensor = relation('Sensor', primaryjoin='Metering.sensor_id==Sensor.id')
class Sensor(DeclarativeBase):
__tablename__ = 'Sensor'
__table_args__ = {}
#column definitions
bus_adress = Column(u'bus_adress', VARCHAR(length=255))
description = Column(u'description', VARCHAR(length=255))
high_threshold = Column(u'high_threshold', FLOAT())
id = Column(u'id', INTEGER(), primary_key=True, nullable=False)
low_threshold = Column(u'low_threshold', FLOAT())
max_value = Column(u'max_value', FLOAT())
min_value = Column(u'min_value', FLOAT())
name = Column(u'name', VARCHAR(length=255))
unique_key = Column(u'unique_key', VARCHAR(length=255))
unit = Column(u'unit', VARCHAR(length=255))
unit_label = Column(u'unit_label', VARCHAR(length=255))
#relation definitions
| mit | Python |
|
7aab44f006a6412d8f169c3f9a801f41a6ea0a95 | Remove start dates for the second time from draft dos2 briefs | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api | migrations/versions/880_remove_invalid_draft_dos2_brief_dates_again.py | migrations/versions/880_remove_invalid_draft_dos2_brief_dates_again.py | """Remove dates from draft dos2 briefs.
This is identical to the previous migration but will be run again to cover any draft briefs with invalid
dates that could have appeared during the previous API rollout process (after the previous migration but before
the code propogated fully to the ec2 instances).
Revision ID: 880
Revises: 870
Create Date: 2016-04-07
"""
# revision identifiers, used by Alembic.
revision = '880'
down_revision = '870'
from alembic import op
import sqlalchemy as sa
frameworks_table = sa.Table(
'frameworks',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('slug', sa.String, nullable=False, unique=True, index=True)
)
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('framework_id', sa.Integer, nullable=False),
sa.Column('published_at', sa.DateTime, nullable=True),
sa.Column('data', sa.JSON, nullable=True)
)
def upgrade():
"""Remove question and answer for startDate from briefs.data for draft dos2 briefs."""
conn = op.get_bind()
# SELECT id, data
# FROM briefs JOIN frameworks ON briefs.framework_id = frameworks.id
# WHERE frameworks.slug = 'digital-outcomes-and-specialists-2' AND briefs.published_at IS null;
query = briefs_table.join(
frameworks_table,
briefs_table.c.framework_id == frameworks_table.c.id
).select(
sa.and_(
frameworks_table.c.slug == 'digital-outcomes-and-specialists-2',
briefs_table.c.published_at == sa.null()
)
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if brief_data.pop('startDate', None) is not None:
# UPDATE briefs SET data = _brief_data WHERE id = _brief_id;
query = briefs_table.update().where(briefs_table.c.id==brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
| mit | Python |
|
77ccb8db873c31ad2bd8318118410abab3141312 | add __version__.py | marshq/europilot | europilot/__version__.py | europilot/__version__.py | __title__ = 'europilot'
__description__ = 'End to end driving simulation inside Euro Truck Simulator 2'
__version__ = '0.0.1'
| mit | Python |
|
8c82465a08f5a601e6a43a8eb675136fc3678954 | Create lc960.py | FiveEye/ProblemSet,FiveEye/ProblemSet | LeetCode/lc960.py | LeetCode/lc960.py | def createArray(dims) :
if len(dims) == 1:
return [0 for _ in range(dims[0])]
return [createArray(dims[1:]) for _ in range(dims[0])]
def f(A, x, y):
m = len(A)
for i in range(m):
if A[i][x] > A[i][y]:
return 0
return 1
class Solution(object):
def minDeletionSize(self, A):
"""
:type A: List[str]
:rtype: int
"""
n = len(A[0])
g = createArray([n, n])
for i in range(n):
for j in range(i+1, n):
g[i][j] = f(A, i, j)
dp = createArray([n])
for i in range(0, n):
dp[i] = 1
for j in range(0, i):
if g[j][i] == 1:
if dp[i] < dp[j] + 1:
dp[i] = dp[j] + 1
return n - max(dp)
| mit | Python |
|
3ebae0f57ae3396213eb28b6fc7a23ff3e3c4980 | Create file and add pseudocode | BranSeals/uml-to-cpp | uml-to-cpp.py | uml-to-cpp.py | # Copyright (C) 2017 Bran Seals. All rights reserved.
# Created: 2017-06-05
print("== UML to CPP ==")
print("Create or modify C++ header and implementation files by plaintext UML.")
#print("Enter a UML filename: ") # file import currently disabled
# check if file isn't too bonkers
#uml = [] # pull UML into memory as string list
# check if file is properly formatted
classList = [] # list of classes that will be created, along with members
noteList = [] # if weird things happen, this list will show potential errors
# will be displayed after files are created for user info
# while uml list items exist:
# get class name
# while } not reached:
# if +, put into hppPub
# if -, put into hppPriv
# if neither, put into hppPriv and add message to noteList
# use these to create UmlClass object and append to classList
# for each in classList:
# build hpp list using hpp, hppPublic, hppPrivate
# checkForLibs()
# while hpp list item exists:
# if isFunction, append to functions list
# while functions list item exists:
# format function and append to cpp list
# create name.hpp file and write using hpp list
# create name.cpp file and write using cpp list
# remove object from classList?
class UmlClass:
def __init__(self, className, hppPub, hppPriv):
self.name = className
self.hppPublic = list(hppPub)
self.hppPrivate = list(hppPriv)
functions = [] # list of functions used to build cpp file
hpp = [] # will contain final hpp template, built from hppPub, hppPriv
cpp = [] # same as hpp, but with implementation file
#def isFunction(): # looks for function syntax
# used when creating cpp file from hpp list
#def checkForLibs(): # include libraries for data types that need them
#def formatFunc(): # formats function from hpp to cpp style
# also takes into account return type and variable names | mit | Python |
|
e2ed635fb3289a5b45f5f15cd1eb543d87fb93d7 | Add test for posting a review through the view | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | wafer/talks/tests/test_review_views.py | wafer/talks/tests/test_review_views.py | """Tests for wafer.talk review form behaviour."""
from django.test import Client, TestCase
from django.urls import reverse
from reversion import revisions
from reversion.models import Version
from wafer.talks.models import (SUBMITTED, UNDER_CONSIDERATION,
ReviewAspect, Review)
from wafer.talks.forms import ReviewForm, make_aspect_key
from wafer.tests.utils import create_user
from wafer.talks.tests.fixtures import create_talk
class ReviewFormTests(TestCase):
def setUp(self):
self.reviewer_a = create_user('reviewer_a', perms=('add_review',))
self.talk_a = create_talk('Talk A', SUBMITTED, "author_a")
with revisions.create_revision():
# Ensure we have an initial revision
self.talk_a.save()
self.aspect_1 = ReviewAspect.objects.create(name='General')
self.aspect_2 = ReviewAspect.objects.create(name='Other')
self.client = Client()
def test_review_submission(self):
"""Test that submitting a review works"""
self.client.login(username='reviewer_a', password='reviewer_a_password')
self.assertTrue(Version.objects.get_for_object(self.talk_a), 1)
response = self.client.post(reverse('wafer_talk_review', kwargs={'pk': self.talk_a.pk}),
data={'notes': 'Review notes',
make_aspect_key(self.aspect_1): '1',
make_aspect_key(self.aspect_2): '2'})
self.assertEqual(response.status_code, 302)
review = Review.objects.get(talk=self.talk_a, reviewer=self.reviewer_a)
self.assertEqual(review.avg_score, 1.5)
self.talk_a.refresh_from_db()
self.assertEqual(self.talk_a.status, UNDER_CONSIDERATION)
self.assertTrue(Version.objects.get_for_object(self.talk_a), 2)
self.assertTrue(review.is_current())
| isc | Python |
|
466410249867b3eadbe5e2b59c46c95ecd288c6c | Add script for word counts | berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud | python_scripts/solr_query_fetch_all.py | python_scripts/solr_query_fetch_all.py | #!/usr/bin/python
import requests
import ipdb
import time
import csv
import sys
import pysolr
def fetch_all( solr, query ) :
documents = []
num_matching_documents = solr.search( query ).hits
start = 0
rows = num_matching_documents
sys.stderr.write( ' starting fetch for ' + query )
while ( len( documents ) < num_matching_documents ) :
results = solr.search( query, **{
'start': start,
'rows': rows,
# 'fl' : 'media_id',
})
documents.extend( results.docs )
start += rows
assert len( documents ) <= num_matching_documents
assert len( documents ) == num_matching_documents
return documents
solr = pysolr.Solr('http://localhost:8983/solr/')
queries = [ '*:*',
]
for query in queries:
print query
results = fetch_all( solr, query )
print "got " + query
print results
| agpl-3.0 | Python |
|
3f69fae4f15efff515b82f216de36dd6d57807e9 | add ci_test.py file for ci | quxiaolong1504/cloudmusic | settings/ci_test.py | settings/ci_test.py | __author__ = 'quxl'
from base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
| mpl-2.0 | Python |
|
50f0e040f363e52a390efc6acd1bc0bc0ddcabcc | Add test funcs in report_reader for DB reading | AnselCmy/ARPS,hbtech-ai/ARPS,hbtech-ai/ARPS,AnselCmy/ARPS,HeadCow/ARPS,HeadCow/ARPS,AnselCmy/ARPS | report_reader.py | report_reader.py | import pymongo as pm
def connectDB():
conn = pm.MongoClient('localhost', 27017)
db = conn.get_database('report_db')
return db
def getColList(db):
return db.collection_names()
def getDocNum(col):
return col.find().count()
def match(col, matchDict):
return list(col.find(matchDict))
def main():
db = connectDB()
print(getColList(db))
col = db['col20170503']
print(getDocNum(col))
print(match(col, {'school':'HFUT'}))
if __name__ == '__main__':
main() | mit | Python |
|
1c9d398be7f99f15fb550adca31f3366870930e3 | Set debug to false in prod, otherwise true | Code4Nepal/nepalmap_app,Code4Nepal/nepalmap_app,cliftonmcintosh/nepalmap_app,cliftonmcintosh/nepalmap_app,Code4Nepal/nepalmap_app,Code4Nepal/nepalmap_app,cliftonmcintosh/nepalmap_app,cliftonmcintosh/nepalmap_app | wazimap_np/settings.py | wazimap_np/settings.py | # pull in the default wazimap settings
from wazimap.settings import * # noqa
DEBUG = False if (os.environ.get('APP_ENV', 'dev') == 'prod') else True
# install this app before Wazimap
INSTALLED_APPS = ['wazimap_np'] + INSTALLED_APPS
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://wazimap_np:wazimap_np@localhost/wazimap_np')
DATABASES['default'] = dj_database_url.parse(DATABASE_URL)
DATABASES['default']['ATOMIC_REQUESTS'] = True
SCHEME = 'http' if (os.environ.get('APP_ENV', 'dev') == 'dev') else 'https'
URL = SCHEME+'://'+'nepalmap.org'
# Localise this instance of Wazimap
WAZIMAP['name'] = 'NepalMap'
# NB: this must be https if your site supports HTTPS.
WAZIMAP['url'] = URL
WAZIMAP['country_code'] = 'NP'
WAZIMAP['profile_builder'] = 'wazimap_np.profiles.get_census_profile'
WAZIMAP['levels'] = {
'country': {
'plural': 'countries',
'children': ['district']
},
'district': {
'plural': 'districts',
'children': ['vdc']
},
'vdc': {
'plural': 'vdcs',
'children': []
}
}
WAZIMAP['comparative_levels'] = ['country', 'district', 'vdc']
WAZIMAP['geometry_data'] = {
'country': 'geo/country.topojson',
'district': 'geo/district.topojson',
'vdc': 'geo/vdc.topojson'
}
WAZIMAP['ga_tracking_id'] = os.environ.get('GA_TRACKING_ID')
WAZIMAP['twitter'] = '@codefornepal'
WAZIMAP['map_centre'] = [28.229651, 83.8165328]
WAZIMAP['map_zoom'] = 7
# Custom Settings
WAZIMAP['email'] = '[email protected]'
WAZIMAP['github'] = 'https://github.com/Code4Nepal/nepalmap_app'
WAZIMAP['tagline'] = 'Explore and understand Nepal using data'
WAZIMAP['facebook'] = 'codefornepal'
WAZIMAP['twittercard'] = True
| # pull in the default wazimap settings
from wazimap.settings import * # noqa
# install this app before Wazimap
INSTALLED_APPS = ['wazimap_np'] + INSTALLED_APPS
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://wazimap_np:wazimap_np@localhost/wazimap_np')
DATABASES['default'] = dj_database_url.parse(DATABASE_URL)
DATABASES['default']['ATOMIC_REQUESTS'] = True
SCHEME = 'http' if (os.environ.get('APP_ENV', 'dev') == 'dev') else 'https'
URL = SCHEME+'://'+'nepalmap.org'
# Localise this instance of Wazimap
WAZIMAP['name'] = 'NepalMap'
# NB: this must be https if your site supports HTTPS.
WAZIMAP['url'] = URL
WAZIMAP['country_code'] = 'NP'
WAZIMAP['profile_builder'] = 'wazimap_np.profiles.get_census_profile'
WAZIMAP['levels'] = {
'country': {
'plural': 'countries',
'children': ['district']
},
'district': {
'plural': 'districts',
'children': ['vdc']
},
'vdc': {
'plural': 'vdcs',
'children': []
}
}
WAZIMAP['comparative_levels'] = ['country', 'district', 'vdc']
WAZIMAP['geometry_data'] = {
'country': 'geo/country.topojson',
'district': 'geo/district.topojson',
'vdc': 'geo/vdc.topojson'
}
WAZIMAP['ga_tracking_id'] = os.environ.get('GA_TRACKING_ID')
WAZIMAP['twitter'] = '@codefornepal'
WAZIMAP['map_centre'] = [28.229651, 83.8165328]
WAZIMAP['map_zoom'] = 7
# Custom Settings
WAZIMAP['email'] = '[email protected]'
WAZIMAP['github'] = 'https://github.com/Code4Nepal/nepalmap_app'
WAZIMAP['tagline'] = 'Explore and understand Nepal using data'
WAZIMAP['facebook'] = 'codefornepal'
WAZIMAP['twittercard'] = True
| mit | Python |
22578771d9812a21361ec959d16e3eaacba998e3 | Add APData Info collector | juvenal/RadioController | APData/APInfo.py | APData/APInfo.py | #
#
#
#
class APInfo:
"""..."""
# Protected members
__IPAddress = ""
__MACAddress = ""
__Channel = 0
__Region = 0
__Localization = ""
__TxPowerList = []
__CurrentPowerIndex = -1
__UnderloadLimit = -1
__OverloadLimit = -1
__Reachable = False
__Enabled = False
__EMailSent = False
__SupportedOS = ""
# Public members mirrors
# Class initialization
def __init__(self):
#
# Set the AP transmission power (Tx)
def updateTxPowerIndex(self, newTxPower):
#
if newTxPower < 0:
self.CurrentPowerIndex = len(self.TxPowerList) - 1
else:
for powerTxIndex in self.TxPowerList:
if newTxPower > self.TxPowerList[powerTxIndex]:
break
self.CurrentPowerIndex = powerTxIndex
# Heroku: User: [email protected] / Senha: w4cpvX3DWw
# wget -qO- https://toolbelt.heroku.com/install-ubuntu.sh | sh
| lgpl-2.1 | Python |
|
bd1e135a6ffd9186451ec02fcbcaab7f9066e40f | Add breakpad fetch recipe. | svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools | recipes/breakpad.py | recipes/breakpad.py | # Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Breakpad(recipe_util.Recipe):
@staticmethod
def fetch_spec(props):
url = 'https://chromium.googlesource.com/breakpad/breakpad.git'
solution = {
'name': 'src',
'url': url,
'managed': False,
'custom_deps': {},
'safesync_url': '',
}
spec = {
'solutions': [solution],
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
if props.get('target_os_only'):
spec['target_os_only'] = props['target_os_only']
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Breakpad().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | Python |
|
4996ddddc14ad0d20759abbcf4d54e6132b7b028 | Add the dj_redis_url file | dstufft/dj-redis-url | dj_redis_url.py | dj_redis_url.py | # -*- coding: utf-8 -*-
import os
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
# Register database schemes in URLs.
urlparse.uses_netloc.append("redis")
DEFAULT_ENV = "REDIS_URL"
def config(env=DEFAULT_ENV, default=None, **overrides):
"""Returns configured REDIS dictionary from REDIS_URL."""
config = {}
s = os.environ.get(env, default)
if s:
config = parse(s)
overrides = dict([(k.upper(), v) for k, v in overrides.items()])
config.update(overrides)
return config
def parse(url):
"""Parses a database URL."""
config = {}
url = urlparse.urlparse(url)
# Remove query strings.
path = url.path[1:]
path = path.split('?', 2)[0]
# Update with environment configuration.
config.update({
"DB": int(path or 0),
"PASSWORD": url.password,
"HOST": url.hostname,
"PORT": url.port,
})
return config
| bsd-2-clause | Python |
|
1487722c0431fce19d54b1b020c3af0ab411cc8a | Add sample config.py file | mattstibbs/twilio-snippets | rename_to_config.py | rename_to_config.py | account_sid = "ACXXXXXXXXXXXXXXXXX"
auth_token = "XXXXXXXXXXXXXXXX"
from_number = "+441111222333"
to_number = "+447777222333"
| mit | Python |
|
0cc3aafced65d2f128a8036aad62edb5ee19f566 | Add brume main script | flou/brume,geronimo-iia/brume | scripts/brume.py | scripts/brume.py | #!/usr/bin/env python
import os
import click
import yaml
from glob import glob
from subprocess import check_output
from brume.template import CfnTemplate
from brume.stack import Stack
def load_configuration(config_file='brume.yml'):
"""Return the YAML configuration for a project based on the `config_file` template."""
from jinja2 import Template
def env(key):
"""Return the value of the `key` environment variable."""
return os.getenv(key, None)
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
def git_branch():
"""Return the name of the current Git branch."""
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
template = Template(open(config_file, 'r').read())
return yaml.load(template.render(env=env, git_commit=git_commit(), git_branch=git_branch()))
conf = load_configuration()
s3_config = conf['templates']
cf_config = conf['stack']
def collect_templates():
return [CfnTemplate(t) for t in glob('*.cform')]
@click.command()
def config():
"""Print the current stack confguration."""
print(yaml.dump(conf))
@click.command()
def create():
"""Create a new CloudFormation stack."""
stack = Stack(cf_config)
stack.create()
stack.tail()
@click.command()
def update():
"""Update an existing CloudFormation stack."""
stack = Stack(cf_config)
stack.update()
stack.tail()
@click.command()
def deploy():
"""Create or update a CloudFormation stack."""
stack = Stack(cf_config)
stack.create_or_update()
stack.tail()
@click.command()
def delete():
"""Delete a CloudFormation stack."""
stack = Stack(cf_config)
stack.delete()
stack.tail()
@click.command()
def validate():
"""Validate CloudFormation templates."""
templates = collect_templates()
return map(lambda t: t.validate(), templates)
@click.command()
def events():
"""Tail the events of the stack."""
Stack(cf_config).tail()
@click.command()
@click.option('--bucket', required=True, help='Name of the bucket')
@click.option('--prefix', required=True, help='Prefix to the file name')
def upload(templates, bucket, path_prefix):
"""Upload CloudFormation templates to S3."""
[t.upload(bucket, path_prefix) for t in templates]
return templates
@click.group()
def cli():
pass
cli.add_command(create)
cli.add_command(update)
cli.add_command(deploy)
cli.add_command(upload)
cli.add_command(delete)
cli.add_command(validate)
cli.add_command(config)
cli.add_command(events)
if __name__ == '__main__':
cli()
| mit | Python |
|
03bfd2059cfaa7043cbcd941465df6b790f84726 | add `branch.py` script for initial email burst | mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com | branch.py | branch.py | """
This is a one-off script to populate the new `emails` table using the addresses
we have in `participants` and `elsewhere`.
"""
from __future__ import division, print_function, unicode_literals
import uuid
from aspen.utils import utcnow
import gratipay.wireup
env = gratipay.wireup.env()
db = gratipay.wireup.db(env)
gratipay.wireup.mail(env)
INITIAL_EMAIL = dict(
subject="Connect to {username} on Gratipay?",
html="""\
<div style="text-align: center; font: normal 14px/21px Arial, sans-serif; color: #333;">
We're working on adding email notifications to Gratipay (formerly Gittip)
and we're sending this email to confirm that you (<b>{email}</b>)
<br>
are the owner of the <b><a href="https://gratipay.com/{username}">{username}</a></b>
account on Gratipay. Sound familiar?
<br>
<br>
<a href="{link}" style="color: #fff; text-decoration:none; display:inline-block; padding: 0 15px; background: #396; font: normal 14px/40px Arial, sans-serif; white-space: nowrap; border-radius: 3px">Yes, proceed!</a>
</div>
""",
text="""\
We're working on adding email notifications to Gratipay (formerly Gittip)
and we're sending this email to confirm that you ({email}) are the owner
of the {username} account on Gratipay. Sound familiar? Follow this link
to finish connecting your email:
{link}
""",
)
def add_email(self, email):
nonce = str(uuid.uuid4())
ctime = utcnow()
db.run("""
INSERT INTO emails
(address, nonce, ctime, participant)
VALUES (%s, %s, %s, %s)
""", (email, nonce, ctime, self.username))
username = self.username_lower
link = "https://gratipay.com/{username}/verify-email.html?email={email}&nonce={nonce}"
self.send_email(INITIAL_EMAIL,
email=email,
link=link.format(**locals()),
username=self.username,
include_unsubscribe=False)
participants = db.all("""
UPDATE participants p
SET email = (e.email, false)
FROM (
SELECT DISTINCT ON (participant)
participant, email
FROM elsewhere
WHERE email IS NOT NULL AND email <> ''
ORDER BY participant, platform = 'github' DESC
) e
WHERE e.participant = p.username
AND p.email IS NULL
AND NOT p.is_closed
AND p.is_suspicious IS NOT true
AND p.claimed_time IS NOT NULL;
SELECT p.*::participants
FROM participants p
WHERE email IS NOT NULL
AND NOT is_closed
AND is_suspicious IS NOT true
AND claimed_time IS NOT NULL;
""")
total = len(participants)
for i, p in enumerate(participants, 1):
print('sending email to %s (%i/%i)' % (p.username, i, total))
add_email(p, p.email.address)
| mit | Python |
|
bf02019c8b97d8dc35e3e186b31cb57adac6a8ec | Create a measurement | shane-kerr/ripe-atlas-shrugd | shrugd-create.py | shrugd-create.py | import ripe.atlas.cousteau
from atlaskeys import create_key
# DNS query properties
query_argument = "wide.ad.jp"
query_type = "AAAA"
dnssec_ok = True
set_nsid_bit = True
# IP addresses to start from
dns_server_ips = [
"199.7.91.13", "2001:500:2d::d", # D.ROOT-SERVERS.NET
"192.203.230.10", # E.ROOT-SERVERS.NET
]
def ip_address_family(ip_addr):
"""Return whether an IP address is IPv4 or IPv6"""
if ':' in ip_addr:
return 6
else:
return 4
dns_measurements = []
for ip_addr in dns_server_ips:
dns_query = ripe.atlas.cousteau.Dns(
target=ip_addr,
af=ip_address_family(ip_addr),
query_argument=query_argument,
query_type=query_type,
query_class="IN",
set_nsid_bit=set_nsid_bit,
udp_payload_size=4096,
description="shrugd " + query_argument + "/"
)
dns_measurements.append(dns_query)
break
# XXX: possibly should at least pick good IPv6 servers when querying over IPv6
source = ripe.atlas.cousteau.AtlasSource(type="area", value="WW", requested=1)
atlas_request = ripe.atlas.cousteau.AtlasCreateRequest(
key=create_key,
measurements=dns_measurements,
sources=[source],
is_oneoff=True
)
(is_success, response) = atlas_request.create()
if is_success:
print("worked, IDs: %s" % response)
else:
print("did not work")
| agpl-3.0 | Python |
|
323176a9749d37d05e87339fe34b50b90cc6b663 | add solution for Maximum Product Subarray | zhyu/leetcode,zhyu/leetcode | src/maximumProductSubarray.py | src/maximumProductSubarray.py | class Solution:
# @param A, a list of integers
# @return an integer
def maxProduct(self, A):
if not A:
return 0
if len(A) == 1:
return A[0]
maxV, minV = A[0], A[0]
res = maxV
for val in A[1:]:
if val > 0:
maxV, minV = max(val, maxV * val), min(val, minV * val)
else:
maxV, minV = max(val, minV * val), min(val, maxV * val)
res = max(res, maxV)
return res
| mit | Python |
|
abd23cbc80149d4f2985eb8aef5d893714cca717 | add a script to reset the db | Psycojoker/dierentheater,Psycojoker/dierentheater,Psycojoker/dierentheater | scripts/reset_db.py | scripts/reset_db.py | from scraper import clean
def run():
if raw_input("Are you sure? Then write 'yes'") == "yes":
clean()
| agpl-3.0 | Python |
|
43e5727d4091e0b6cb11e0e13ea9f7daf69628fc | Add corpusPreProcess. | wxpftd/ngram | corpusPreProcess.py | corpusPreProcess.py | #! /usr/share/env python
# -*- coding=utf-8 -*-
resultFile = open('corpus/BigCorpusPre.txt', 'w')
with open('corpus/BigCorpus.txt', 'r') as f:
for line in f:
line = line[line.find(':')+1:]
resultFile.write(line.strip()+'\n')
resultFile.close()
| apache-2.0 | Python |
|
82089ad5e5c0d597cfdd16575b4fa5a9a09415ff | introduce plumbery from the command line -- no python coding, yeah! | bernard357/plumbery,DimensionDataCBUSydney/plumbery,bernard357/plumbery,DimensionDataCBUSydney/plumbery | plumbery/__main__.py | plumbery/__main__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Runs plumbery from the command line
Example::
$ python -m plumbery fittings.yaml build web
To get some help, you can type::
$ python -m plumbery -h
"""
import sys
import argparse
from engine import PlumberyEngine
parser = argparse.ArgumentParser(
prog='plumbery',
description='Plumbing infrastructure with Apache Libcloud.')
parser.add_argument(
'fittings',
nargs=1,
help='File that is containing fittings plan')
parser.add_argument(
'action',
nargs=1,
help="Either 'build', 'start', 'polish', 'stop' or 'destroy'")
parser.add_argument(
'blueprint',
nargs='?',
help='Name of the selected blueprint. '
'If omitted, all blueprints will be considered',
default=None)
args = parser.parse_args()
engine = PlumberyEngine(args.fittings[0])
verb = args.action[0].lower()
if verb == 'build':
if args.blueprint is None:
engine.build_all_blueprints()
else:
engine.build_blueprint(args.blueprint)
elif verb == 'start':
if args.blueprint is None:
engine.start_all_nodes()
else:
engine.start_nodes(args.blueprint)
elif verb == 'polish':
if args.blueprint is None:
engine.polish_all_blueprints()
else:
engine.polish_blueprint(args.blueprint)
elif verb == 'stop':
if args.blueprint is None:
engine.stop_all_nodes()
else:
engine.stop_node(args.blueprint)
elif verb == 'destroy':
if args.blueprint is None:
engine.destroy_all_blueprints()
else:
engine.destroy_blueprint(args.blueprint)
else:
print("{}: error: unrecognised action '{}'".format('plumbery', args.action[0]))
parser.print_help()
sys.exit(2)
| apache-2.0 | Python |
|
7182af317116db7eb3f7a278b3487ad91a3b3331 | Add example for a clunky 3D high resolution loupe for napari | jni/useful-histories | high-res-slider.py | high-res-slider.py | import functools
import numpy as np
import dask.array as da
from magicgui.widgets import Slider, Container
import napari
# stack = ... # your dask array
# stack2 = stack[::2, ::2, ::2]
# stack4 = stack2[::2, ::2, ::2]
# 👆 quick and easy multiscale pyramid, don't do this really
# see https://github.com/dask/dask-image/issues/136
# for better ways
# and, specifically, stack4 will be small but will still need
# to access full data. You should save all data sizes as
# their own arrays on disk and load those. I recommend
# using dask.array.Array.to_zarr.
# You can also read about NGFF:
# https://ngff.openmicroscopy.org/latest/
# example with some example data from Liu et al, Science, 2018
stack, stack2, stack4 = [
da.from_zarr(f'/Users/jni/data/gokul-lls/{i}.zarr')[0]
for i in range(3)
]
# a list of arrays of decreasing size is interpreted as
# a multiscale dataset by napari
multiscale_data = [stack, stack2, stack4]
viewer = napari.Viewer(ndisplay=3)
multiscale_layer = viewer.add_image(
multiscale_data,
colormap='magenta',
scale=[3, 1, 1],
)
crop_sizes = (30, 256, 256)
cropz, cropy, cropx = crop_sizes
shapez, shapey, shapex = stack.shape
ends = np.asarray(stack.shape) - np.asarray(crop_sizes) + 1
stepsizes = ends // 100
highres_crop_layer = viewer.add_image(
stack[:cropz, :cropy, :cropx],
name='cropped',
blending='additive',
colormap='green',
scale=multiscale_layer.scale,
)
def set_slice(axis, value):
idx = int(value)
scale = np.asarray(highres_crop_layer.scale)
translate = np.asarray(highres_crop_layer.translate)
izyx = translate // scale
izyx[axis] = idx
i, j, k = izyx
highres_crop_layer.data = stack[i:i + cropz, j:j + cropy, k:k + cropx]
highres_crop_layer.translate = scale * izyx
highres_crop_layer.refresh()
sliders = [
Slider(name=axis, min=0, max=end, step=step)
for axis, end, step in zip('zyx', ends, stepsizes)
]
for axis, slider in enumerate(sliders):
slider.changed.connect(
lambda event, axis=axis: set_slice(axis, event.value)
)
container_widget = Container(layout='vertical')
container_widget.extend(sliders)
viewer.window.add_dock_widget(container_widget, area='right')
napari.run()
| bsd-3-clause | Python |
|
1e104af5dc1ef5cbec4bfad62a1691bd0c784caf | Add lstm with zoneout (slow to converge). | LaurentMazare/deep-models,LaurentMazare/deep-models | rhn/lstm_zoneout.py | rhn/lstm_zoneout.py | # LSTM implementation using zoneout as described in
# Zoneout: Regularizing RNNs by Randomly Preserving Hidden Activations
# https://arxiv.org/abs/1606.01305
from keras import backend as K
from keras.layers import LSTM, time_distributed_dense
from keras import initializations, activations, regularizers
from keras.engine import InputSpec
class LSTM_zoneout(LSTM):
def __init__(self, output_dim, zoneout_h=0., zoneout_c=0., **kwargs):
self.zoneout_h = zoneout_h
self.zoneout_c = zoneout_c
if self.zoneout_h or self.zoneout_c:
self.uses_learning_phase = True
super(LSTM_zoneout, self).__init__(output_dim, **kwargs)
def zoneout(self, v, prev_v, pr=0.):
diff = v - prev_v
diff = K.in_train_phase(K.dropout(diff, pr, noise_shape=(self.output_dim,)), diff)
# In testing, return v * (1-pr) + pr * prev_v
# In training when dropout returns 0, return prev_v
# when dropout diff/(1-pr), return v
return prev_v + diff * (1-pr)
def step(self, x, states):
h_tm1 = states[0]
c_tm1 = states[1]
B_U = states[2]
B_W = states[3]
if self.consume_less == 'gpu':
z = K.dot(x * B_W[0], self.W) + K.dot(h_tm1 * B_U[0], self.U) + self.b
z0 = z[:, :self.output_dim]
z1 = z[:, self.output_dim: 2 * self.output_dim]
z2 = z[:, 2 * self.output_dim: 3 * self.output_dim]
z3 = z[:, 3 * self.output_dim:]
i = self.inner_activation(z0)
f = self.inner_activation(z1)
c = f * c_tm1 + i * self.activation(z2)
o = self.inner_activation(z3)
else:
if self.consume_less == 'cpu':
x_i = x[:, :self.output_dim]
x_f = x[:, self.output_dim: 2 * self.output_dim]
x_c = x[:, 2 * self.output_dim: 3 * self.output_dim]
x_o = x[:, 3 * self.output_dim:]
elif self.consume_less == 'mem':
x_i = K.dot(x * B_W[0], self.W_i) + self.b_i
x_f = K.dot(x * B_W[1], self.W_f) + self.b_f
x_c = K.dot(x * B_W[2], self.W_c) + self.b_c
x_o = K.dot(x * B_W[3], self.W_o) + self.b_o
else:
raise Exception('Unknown `consume_less` mode.')
i = self.inner_activation(x_i + K.dot(h_tm1 * B_U[0], self.U_i))
f = self.inner_activation(x_f + K.dot(h_tm1 * B_U[1], self.U_f))
c = f * c_tm1 + i * self.activation(x_c + K.dot(h_tm1 * B_U[2], self.U_c))
o = self.inner_activation(x_o + K.dot(h_tm1 * B_U[3], self.U_o))
if self.zoneout_c:
c = self.zoneout(c, c_tm1, pr=self.zoneout_c)
h = o * self.activation(c)
if self.zoneout_h:
h = self.zoneout(h, h_tm1, pr=self.zoneout_h)
return h, [h, c]
| apache-2.0 | Python |
|
72f32099411644a3fed6103430f7dd78fb0929a5 | Add new content parser class (based upon code in Konstruktuer) | fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur | konstrukteur/ContentParser.py | konstrukteur/ContentParser.py | #
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
import glob, os
from jasy.core import Console
import konstrukteur.Language
import konstrukteur.Util
class ContentParser:
""" Content parser class for Konstrukteur """
def __init__(self, extensions, fixJasyCommands, defaultLanguage):
self.__extensions = extensions
self.__extensionParser = {}
self.__extensionParser["html"] = konstrukteur.HtmlParser
self.__id = 1
self.__commandReplacer = []
self.__fixJasyCommands = fixJasyCommands
self.__languages = {}
self.__defaultLanguage = defaultLanguage
def parse(self, pagesPath, pages, languages):
#pagesPath = os.path.join(self.__contentPath, sourcePath)
Console.info("Parse content files at %s" % pagesPath)
Console.indent()
for extension in self.__extensions:
for filename in glob.iglob(os.path.join(pagesPath, "*.%s" % extension)):
basename = os.path.basename(filename)
Console.debug("Parsing %s" % basename)
page = self.__parseContentFile(filename, extension)
if page:
self.generateFields(page, languages)
pages.append(page)
else:
Console.error("Error parsing %s" % filename)
Console.outdent()
def generateFields(self, page, languages):
for key, value in page.items():
page[key] = self.__fixJasyCommands(value)
if "slug" in page:
page["slug"] =konstrukteur.Util.fixSlug(page["slug"])
else:
page["slug"] = konstrukteur.Util.fixSlug(page["title"])
page["content"] = konstrukteur.Util.fixCoreTemplating(page["content"])
if not "status" in page:
page["status"] = "published"
if not "pos" in page:
page["pos"] = 0
else:
page["pos"] = int(page["pos"])
if not "lang" in page:
page["lang"] = self.__defaultLanguage
if page["lang"] not in languages:
languages.append(page["lang"])
return page
def __parseContentFile(self, filename, extension):
""" Parse single content file """
if not extension in self.__extensionParser:
raise RuntimeError("No content parser for extension %s registered" % extension)
return self.__extensionParser[extension].parse(filename)
| mit | Python |
|
1d20bec9306904a6d676c4e1e34a07a842a7a600 | Add the IGMP file which got left out. | gvnn3/PCS,gvnn3/PCS | pcs/packets/igmp.py | pcs/packets/igmp.py | import pcs
from socket import AF_INET, inet_ntop
import struct
import inspect
import time
import igmpv2
import igmpv3
#import dvmrp
#import mtrace
IGMP_HOST_MEMBERSHIP_QUERY = 0x11
IGMP_v1_HOST_MEMBERSHIP_REPORT = 0x12
IGMP_DVMRP = 0x13
IGMP_v2_HOST_MEMBERSHIP_REPORT = 0x16
IGMP_HOST_LEAVE_MESSAGE = 0x17
IGMP_v3_HOST_MEMBERSHIP_REPORT = 0x22
IGMP_MTRACE_REPLY = 0x1e
IGMP_MTRACE_QUERY = 0x1f
map = {
IGMP_HOST_MEMBERSHIP_QUERY: igmpv2.igmpv2,
IGMP_v1_HOST_MEMBERSHIP_REPORT: igmpv2.igmpv2,
#IGMP_DVMRP: dvmrp.dvmrp,
IGMP_v2_HOST_MEMBERSHIP_REPORT: igmpv2.igmpv2,
IGMP_HOST_LEAVE_MESSAGE: igmpv2.igmpv2,
#IGMP_MTRACE_REPLY: mtrace.reply,
#IGMP_MTRACE_QUERY: mtrace.query,
IGMP_v3_HOST_MEMBERSHIP_REPORT: igmpv3.report
}
descr = {
IGMP_HOST_MEMBERSHIP_QUERY: "IGMPv2 Query",
IGMP_v1_HOST_MEMBERSHIP_REPORT: "IGMPv1 Report",
IGMP_DVMRP: "DVMRP",
IGMP_v2_HOST_MEMBERSHIP_REPORT: "IGMPv2 Report",
IGMP_HOST_LEAVE_MESSAGE: "IGMPv2 Leave",
IGMP_MTRACE_REPLY: "MTRACE Reply",
IGMP_MTRACE_QUERY: "MTRACE Query",
IGMP_v3_HOST_MEMBERSHIP_REPORT: "IGMPv3 Report"
}
class igmp(pcs.Packet):
"""IGMP"""
_layout = pcs.Layout()
_map = map
_descr = descr
def __init__(self, bytes = None, timestamp = None):
""" Define the common IGMP encapsulation; see RFC 2236. """
type = pcs.Field("type", 8, discriminator=True)
code = pcs.Field("code", 8)
checksum = pcs.Field("checksum", 16)
pcs.Packet.__init__(self, [type, code, checksum], bytes = bytes)
# Description MUST be set after the PCS layer init
self.description = inspect.getdoc(self)
if timestamp == None:
self.timestamp = time.time()
else:
self.timestamp = timestamp
if bytes != None:
offset = self.sizeof()
if self.type == IGMP_HOST_MEMBERSHIP_QUERY and \
len(bytes) >= igmpv3.IGMP_V3_QUERY_MINLEN:
self.data = igmpv3.query(bytes[offset:len(bytes)],
timestamp = timestamp)
else:
self.data = self.next(bytes[offset:len(bytes)],
timestamp = timestamp)
else:
self.data = None
def __str__(self):
"""Walk the entire packet and pretty print the values of the fields."""
retval = self._descr[self.type] + "\n"
for field in self._layout:
retval += "%s %s\n" % (field.name, field.value)
return retval
| bsd-3-clause | Python |
|
2cadad76c2756852b94948088e92b9191abebbb7 | make one pickle file with all metadata (for faster loading) | 317070/kaggle-heart | generate_metadata_pkl.py | generate_metadata_pkl.py | import argparse
from dicom.sequence import Sequence
import glob
import re
from log import print_to_file
import cPickle as pickle
def read_slice(path):
return pickle.load(open(path))['data']
def convert_to_number(value):
value = str(value)
try:
if "." in value:
return float(value)
else:
return int(value)
except:
pass
return value
def clean_metadata(metadatadict):
# Do cleaning
keys = sorted(list(metadatadict.keys()))
for key in keys:
value = metadatadict[key]
if key == 'PatientAge':
metadatadict[key] = int(value[:-1])
if key == 'PatientSex':
metadatadict[key] = 1 if value == 'F' else -1
else:
if isinstance(value, Sequence):
#convert to list
value = [i for i in value]
if isinstance(value, (list,)):
metadatadict[key] = [convert_to_number(i) for i in value]
else:
metadatadict[key] = convert_to_number(value)
return metadatadict
def read_metadata(path):
d = pickle.load(open(path))['metadata'][0]
metadata = clean_metadata(d)
return metadata
def get_patient_data(patient_data_path):
patient_data = []
spaths = sorted(glob.glob(patient_data_path + r'/*.pkl'),
key=lambda x: int(re.search(r'/*_(\d+)\.pkl$', x).group(1)))
pid = re.search(r'/(\d+)/study$', patient_data_path).group(1)
for s in spaths:
slice_id = re.search(r'/(.*_\d+\.pkl)$', s).group(1)
metadata = read_metadata(s)
patient_data.append({'metadata': metadata,
'slice_id': slice_id})
return patient_data, pid
def get_metadata(data_path):
patient_paths = sorted(glob.glob(data_path + '/*/study'))
metadata_dict = {}
for p in patient_paths:
patient_data, pid = get_patient_data(p)
print "patient", pid
metadata_dict[pid] = dict()
for pd in patient_data:
metadata_dict[pid][pd['slice_id']] = pd['metadata']
filename = data_path.split('/')[-1] + '_metadata.pkl'
with open(filename, 'w') as f:
pickle.dump(metadata_dict, f)
print 'saved to ', filename
return metadata_dict
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
required = parser.add_argument_group('required arguments')
#required.add_argument('-c', '--config',
# help='configuration to run',
# required=True)
args = parser.parse_args()
data_paths = ['/mnt/storage/data/dsb15_pkl/pkl_train', '/mnt/storage/data/dsb15_pkl/pkl_validate']
with print_to_file("/mnt/storage/metadata/kaggle-heart/logs/generate_metadata.log"):
for d in data_paths:
get_metadata(d)
print "log saved to '%s'" % ("/mnt/storage/metadata/kaggle-heart/logs/generate_metadata.log")
| mit | Python |
|
8939e873f4ea61169f9384eded5b8c603cfde988 | Add crypto pre-submit that will add the openssl builder to the default try-bot list. | ropik/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,gavinp/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,gavinp/chromium,ropik/chromium,yitian134/chromium,ropik/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,ropik/chromium,adobe/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,adobe/chromium | crypto/PRESUBMIT.py | crypto/PRESUBMIT.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chromium presubmit script for src/net.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def GetPreferredTrySlaves(project, change):
# Changes in crypto often need a corresponding OpenSSL edit.
return ['linux_redux']
| bsd-3-clause | Python |
|
27ed31c7a21c4468bc86aaf220e30315e366c425 | add message to SearxParameterException - fixes #1722 | jcherqui/searx,jcherqui/searx,asciimoo/searx,asciimoo/searx,asciimoo/searx,jcherqui/searx,dalf/searx,asciimoo/searx,dalf/searx,dalf/searx,jcherqui/searx,dalf/searx | searx/exceptions.py | searx/exceptions.py | '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2017- by Alexandre Flament, <[email protected]>
'''
class SearxException(Exception):
pass
class SearxParameterException(SearxException):
def __init__(self, name, value):
if value == '' or value is None:
message = 'Empty ' + name + ' parameter'
else:
message = 'Invalid value "' + value + '" for parameter ' + name
super(SearxParameterException, self).__init__(message)
self.message = message
self.parameter_name = name
self.parameter_value = value
| '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2017- by Alexandre Flament, <[email protected]>
'''
class SearxException(Exception):
pass
class SearxParameterException(SearxException):
def __init__(self, name, value):
if value == '' or value is None:
message = 'Empty ' + name + ' parameter'
else:
message = 'Invalid value "' + value + '" for parameter ' + name
super(SearxParameterException, self).__init__(message)
self.parameter_name = name
self.parameter_value = value
| agpl-3.0 | Python |
a230bb1b2f1c96c7f9764ee2bf759ea9fe39e801 | add populations tests | timothydmorton/isochrones,timothydmorton/isochrones | isochrones/tests/test_populations.py | isochrones/tests/test_populations.py | import unittest
from pandas.testing import assert_frame_equal
from scipy.stats import uniform, norm
from isochrones import get_ichrone
from isochrones.priors import ChabrierPrior, FehPrior, GaussianPrior, SalpeterPrior, DistancePrior, AVPrior
from isochrones.populations import StarFormationHistory, StarPopulation, BinaryDistribution, deredden
class PopulationTest(unittest.TestCase):
def setUp(self):
mist = get_ichrone("mist")
sfh = StarFormationHistory() # Constant SFR for 10 Gyr; or, e.g., dist=norm(3, 0.2)
imf = SalpeterPrior(bounds=(0.4, 10)) # bounds on solar masses
binaries = BinaryDistribution(fB=0.4, gamma=0.3)
feh = GaussianPrior(-0.2, 0.2)
distance = DistancePrior(max_distance=3000) # pc
AV = AVPrior(bounds=[0, 2])
pop = StarPopulation(
mist, sfh=sfh, imf=imf, feh=feh, distance=distance, binary_distribution=binaries, AV=AV
)
self.pop = pop
self.mist = mist
self.df = pop.generate(1000)
self.dereddened_df = deredden(mist, self.df)
def test_mags(self):
"""Check no total mags are null
"""
mags = [f"{b}_mag" for b in self.mist.bands]
assert self.df[mags].isnull().sum().sum() == 0
def test_dereddening(self):
"""Check mass, age, feh the same when dereddened
"""
cols = ["initial_mass", "initial_feh", "requested_age"]
assert_frame_equal(self.df[cols], self.dereddened_df[cols])
# Check de-reddening vis-a-vis A_x
for b in self.mist.bands:
diff = (self.dereddened_df[f"{b}_mag"] + self.df[f"A_{b}"]) - self.df[f"{b}_mag"]
is_binary = self.df.mass_B > 0
assert diff.loc[~is_binary].std() < 0.0001
| mit | Python |
|
06df583e3821470856852b10f0703fccce81e2d6 | Add planex-pin command | djs55/planex,djs55/planex,djs55/planex | planex/pin.py | planex/pin.py | """
planex-pin: Generate a new override spec file for a given package
"""
import argparse
import os
import sys
import re
import logging
from planex.util import run
def describe(repo, treeish="HEAD"):
dotgitdir = os.path.join(repo, ".git")
if not os.path.exists(dotgitdir):
raise Exception("Pin target is not a git repository: '%s'" % repo)
# First, get the hash of the commit
cmd = ["git", "--git-dir=%s" % dotgitdir, "rev-parse", treeish]
sha = run(cmd)['stdout'].strip()
# Now lets describe that hash
cmd = ["git", "--git-dir=%s" % dotgitdir, "describe", "--tags", sha]
description = run(cmd, check=False)['stdout'].strip()
# if there are no tags, use the number of commits
if description == "":
cmd = ["git", "--git-dir=%s" % dotgitdir, "log", "--oneline", sha]
commits = run(cmd)['stdout'].strip()
description = str(len(commits.splitlines()))
# replace '-' with '+' in description to not confuse rpm
match = re.search("[^0-9]*", description)
matchlen = len(match.group())
return description[matchlen:].replace('-', '+')
def archive(repo, commit_hash, pin_version, target_dir):
dotgitdir = os.path.join(repo, ".git")
prefix = "%s-%s" % (os.path.basename(repo), pin_version)
path = os.path.join(target_dir, "%s.tar" % prefix)
run(["git", "--git-dir=%s" % dotgitdir, "archive", commit_hash,
"--prefix=%s/" % prefix, "-o", path])
run(["gzip", "--no-name", "-f", path])
return path + ".gz"
def pinned_spec_of_spec(spec_path, pin_version, source_path):
spec_in = open(spec_path)
spec_contents = spec_in.readlines()
spec_in.close()
source_url = "file://" + os.path.abspath(source_path)
pinned_spec = []
for line in spec_contents:
# replace the source url
match = re.match(r'^([Ss]ource\d*:\s+)(.+)\n', line)
if match:
line = match.group(1) + source_url + "\n"
# replace the use of the version macro in the spec contents
line = line.replace("%{version}", pin_version)
pinned_spec.append(line)
return "".join(pinned_spec)
def parse_args_or_exit(argv=None):
"""
Parse command line options
"""
parser = argparse.ArgumentParser(
description='Pin a package to a specific version')
parser.add_argument('spec', help='RPM Spec file')
parser.add_argument('pin', help='Specific version, local path or git url')
parser.add_argument('output_dir', help='Path to write output spec file')
parser.add_argument('--remove', '-r', help='Remove pin for this package',
action='store_true')
parser.add_argument('--verbose', '-v', help='Be verbose',
action='store_true')
return parser.parse_args(argv)
def main(argv):
"""
Main function
"""
args = parse_args_or_exit(argv)
if args.verbose:
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
if os.path.exists(args.output_dir):
if not os.path.isdir(args.output_dir):
raise Exception(
"Output directory exists and is not a directory: '%s'" %
args.output_dir)
else:
os.makedirs(args.output_dir)
# we're assuming for now that the target is a git repository
repo, _, hash = args.pin.partition('#')
pin_version = describe(repo, hash) if hash else describe(repo)
source_path = archive(repo, hash, pin_version, args.output_dir)
spec_filename = os.path.basename(args.spec)
output_spec_path = os.path.join(args.output_dir, spec_filename)
with open(output_spec_path, 'w') as f:
f.write(pinned_spec_of_spec(args.spec, pin_version, source_path))
def _main():
"""
Entry point for setuptools CLI wrapper
"""
main(sys.argv[1:])
# Entry point when run directly
if __name__ == "__main__":
_main()
| lgpl-2.1 | Python |
|
f982cd78ae79f77c2ca59440de20de37002d6658 | Add a pcakge: libzip. (#3656) | iulian787/spack,skosukhin/spack,matthiasdiener/spack,skosukhin/spack,skosukhin/spack,EmreAtes/spack,krafczyk/spack,LLNL/spack,krafczyk/spack,lgarren/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,tmerrick1/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,tmerrick1/spack,TheTimmy/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,matthiasdiener/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,lgarren/spack,lgarren/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack,lgarren/spack,iulian787/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,mfherbst/spack,matthiasdiener/spack,TheTimmy/spack | var/spack/repos/builtin/packages/libzip/package.py | var/spack/repos/builtin/packages/libzip/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libzip(AutotoolsPackage):
"""libzip is a C library for reading, creating,
and modifying zip archives."""
homepage = "https://nih.at/libzip/index.html"
url = "https://nih.at/libzip/libzip-1.2.0.tar.gz"
version('1.2.0', '5c3372ab3a7897295bfefb27f745cf69')
| lgpl-2.1 | Python |
|
9877c21c502b27460f70e6687ed3fd6a2d3fd0d5 | add new package at v8.3.0 (#27446) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/racket/package.py | var/spack/repos/builtin/packages/racket/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Racket(Package):
"""The Racket programming language."""
homepage = "https://www.racket-lang.org"
url = "https://download.racket-lang.org/releases/8.3/installers/racket-src.tgz"
maintainers = ['arjunguha']
version('8.3.0', 'c4af1a10b957e5fa0daac2b5ad785cda79805f76d11482f550626fa68f07b949')
depends_on('libffi', type=('build', 'link', 'run'))
depends_on('patchutils')
phases = ['configure', 'build', 'install']
def configure(self, spec, prefix):
with working_dir('src'):
configure = Executable('./configure')
configure("--prefix", prefix)
def build(self, spec, prefix):
with working_dir('src'):
make()
def install(self, spec, prefix):
with working_dir('src'):
make('install')
| lgpl-2.1 | Python |
|
bd49a4c82e011d7c5025abc15324220b1496f8c8 | add deepspeech.py to support DeepSpeech | peitaosu/Diplomatist | deepspeech.py | deepspeech.py | import subprocess
class DeepSpeechRecognizer():
def __init__(self, model=None, alphabet=None, lm=None, trie=None):
self.model = model
self.alphabet = alphabet
self.lm = lm
self.trie = trie
def recognize(self, audio_file):
"""recognize audio file
args:
audio_file (str)
return:
result (str/False)
"""
output = subprocess.getoutput("deepspeech --model {} --alphabet {} --lm {} --trie {} --audio {}".format(self.model, self.alphabet, self.lm, self.trie, audio_file))
for index, line in enumerate(output.split("\n")):
if line.startswith("Inference took "):
return output.split("\n")[index + 1]
return None
if __name__=="__main__":
recognizer = DeepSpeechRecognizer(r"models/output_graph.pbmm", r"models/alphabet.txt", r"models/lm.binary", r"models/trie")
result = recognizer.recognize("audio/8455-210777-0068.wav")
print(result)
| mit | Python |
|
ca09dc0b9d555f10aafb17380a9a8592727d0a0f | Add dp/SPOJ-ROCK.py | ankeshanand/interview-prep,ankeshanand/interview-prep | dp/SPOJ-ROCK.py | dp/SPOJ-ROCK.py | def compute_zero_counts(rock_desc):
zero_counts = [0 for i in xrange(N+1)]
for i in xrange(1, N+1):
zero_counts[i] = zero_counts[i-1]
if rock_desc[i-1] == '0':
zero_counts[i] += 1
return zero_counts
def score(zero_counts, start, end):
length = end - start + 1
zeroes = zero_counts[end] - zero_counts[start-1]
ones = length - zeroes
if ones > zeroes:
return length
return 0
t = int(raw_input())
for case in xrange(t):
N = int(raw_input())
rock_desc = raw_input()
zero_counts = compute_zero_counts(rock_desc)
dp = [0 for i in xrange(N+1)]
for i in xrange(1,N+1):
for j in xrange(0,i):
dp[i] = max(dp[i], dp[j] + score(zero_counts, j+1, i))
print dp[N]
| mit | Python |
|
50d05aabc2eb1d5bcb20d457dd05d2882b983afa | Add installation script for profiler. | tensorflow/profiler,tensorflow/profiler,tensorflow/profiler,tensorflow/profiler,tensorflow/profiler | install_and_run.py | install_and_run.py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Install and run the TensorBoard plugin for performance analysis.
Usage: python3 install_and_run.py --envdir ENVDIR --logdir LOGDIR
"""
# Lint as: python3
import argparse
import os
import subprocess
def run(*args):
"""Runs a shell command."""
subprocess.run(' '.join(args), shell=True, check=True)
class VirtualEnv(object):
"""Creates and runs programs in a virtual environment."""
def __init__(self, envdir):
self.envdir = envdir
run('virtualenv', '--system-site-packages', '-p', 'python3', self.envdir)
def run(self, program, *args):
run(os.path.join(self.envdir, 'bin', program), *args)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--envdir', help='Virtual environment', required=True)
parser.add_argument('--logdir', help='TensorBoard logdir', required=True)
args = parser.parse_args()
venv = VirtualEnv(args.envdir)
venv.run('pip3', 'uninstall', '-q', '-y', 'tensorboard')
venv.run('pip3', 'uninstall', '-q', '-y', 'tensorflow')
venv.run('pip3', 'install', '-q', '-U', 'tf-nightly')
venv.run('pip3', 'install', '-q', '-U', 'tb-nightly')
venv.run('pip3', 'install', '-q', '-U', 'tensorboard_plugin_profile')
venv.run('tensorboard', '--logdir=' + args.logdir, '--bind_all')
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
734967196c8f0577b218802c16d9eab31c9e9054 | Add problem 36, palindrome binaries | dimkarakostas/project-euler | problem_36.py | problem_36.py | from time import time
def is_palindrome(s):
for idx in range(len(s)/2):
if s[idx] != s[-1*idx - 1]:
return False
return True
def main():
palindrom_nums = [num for num in range(int(1e6)) if is_palindrome(str(num)) and is_palindrome(str(bin(num))[2:])]
print 'Palindroms:', palindrom_nums
print 'Palindrom sum:', sum(palindrom_nums)
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
| mit | Python |
|
ad9a9df8e144c41456aeded591081a3a339853f3 | Create RLU_forward_backward.py | rupertsmall/machine-learning,rupertsmall/machine-learning | Neural-Networks/RLU_forward_backward.py | Neural-Networks/RLU_forward_backward.py |
from numpy import *
from RLU_neural_forward import *
from RLU_back_propagation import *
def forwardBackward(xi, x, y, MT, time_queue, good_queue, DELTA_queue):
A = neural_forward(xi, x, MT)
check = argmax(A[-xi[-1]:])
# send back some progress statistic
if y[check]-1 == 0:
good = good_queue.get()
good += 1
good_queue.put(good)
good_queue.task_done()
time = time_queue.get()
time += 1
time_queue.put(time)
time_queue.task_done()
DELTA = DELTA_queue.get()
DELTA = DELTA + back_propagation(y, A, MT, xi)
DELTA_queue.put(DELTA)
DELTA_queue.task_done()
| mit | Python |
|
1ecb4a0711304af13f41ae1aae67792057783334 | Create ScaperUtils.py | Soncrates/stock-study,Soncrates/stock-study | data/ScaperUtils.py | data/ScaperUtils.py | class ScraperUtil (object) :
class Base :
def __init__(self,data_get,data_parse, data_formatter=None) :
self.get_data = data_get
self.parse_data = data_parse
self.data_formatter = data_formatter
class Yahoo(Base) :
def __init__(self,data_get,data_format,data_parse) :
ScraperUtil.Base.__init__( self,data_get,data_parse,data_format)
def __call__(self,symbol) :
ret = self.get_data(symbol)
if self.data_formatter is not None :
ret = self.data_formatter(ret)
for token in self.parse_data(ret) :
yield token
class Nasdaq(Base) :
def __init__(self,data_get,data_parse,data_formatter,exchange_list=None,unwanted_keys_list=None) :
ScraperUtil.Base.__init__( self,data_get,data_parse,data_formatter)
self.exchanges=["nyse", "nasdaq"]
self.unwanted_keys=['Summary Quote','MarketCap','LastSale','IPOyear','Unnamed: 8']
if exchange_list is not None : self.exchanges = exchange_list
if unwanted_keys_list is not None : self.unwanted_keys = unwanted_keys_list
def __call__(self,exchange_list=None,unwanted_keys_list=None) :
exchanges = self.exchanges
unwanted_keys = self.unwanted_keys
if exchange_list is not None : exchanges = exchange_list
if unwanted_keys_list is not None : unwanted_keys = unwanted_keys_list
ret = None
for exchange in exchanges :
if ret is None : ret = self.get_data(exchange)
else : ret = b"".join([ret, self.get_data(exchange)])
ret = self.parse_data(ret)
if self.data_formatter is not None :
ret = self.data_formatter(ret,unwanted_keys,exchange)
return ret.reindex()
class NasdaqService() :
def __init__(self,service) :
self.service = service
self.fresh = None
self.cache = None
def __call__(self) :
if self.cache is None or not self.fresh():
self.cache = self.service()
self.fresh = TimeUtil.ExpireTimer(24*60)
return self.cache
class StockService() :
def __init__(self) :
self.fresh = {}
self.cache = {}
def __call__(self,stock) :
if stock not in self.cache.keys() or not self.fresh[stock]():
y1,y2,r = get_year_parameters()
self.cache[stock] = get_yahoo_historical(stock,y1)
self.fresh[stock] = TimeUtil.ExpireTimer(24*60)
return self.cache[stock]
| lgpl-2.1 | Python |
|
6d33ed73adeea4808ed4b3b9bd8642ad83910dfc | add ridgeline example (#1519) | jakevdp/altair,altair-viz/altair | altair/examples/ridgeline_plot.py | altair/examples/ridgeline_plot.py | """
Ridgeline plot (Joyplot) Example
--------------------------------
A `Ridgeline plot <https://serialmentor.com/blog/2017/9/15/goodbye-joyplots>`_
chart is a chart that lets you visualize distribution of a numeric value for
several groups.
Such a chart can be created in Altair by first transforming the data into a
suitable representation.
"""
# category: other charts
import altair as alt
from vega_datasets import data
source = data.seattle_weather.url
step = 20
overlap = 1
ridgeline = alt.Chart(source).transform_timeunit(
Month='month(date)'
).transform_joinaggregate(
mean_temp='mean(temp_max)', groupby=['Month']
).transform_bin(
['bin_max', 'bin_min'], 'temp_max'
).transform_aggregate(
value='count()', groupby=['Month', 'mean_temp', 'bin_min', 'bin_max']
).transform_impute(
impute='value', groupby=['Month', 'mean_temp'], key='bin_min', value=0
).mark_line(
interpolate='monotone',
fillOpacity=0.8,
stroke='lightgray',
strokeWidth=0.5
).encode(
alt.X('bin_min:Q', bin='binned', title='Maximum Daily Temperature (C)'),
alt.Y(
'value:Q',
scale=alt.Scale(range=[step, -step * overlap]),
axis=None
),
alt.Fill(
'mean_temp:Q',
legend=None,
scale=alt.Scale(domain=[30, 5], scheme='redyellowblue')
),
alt.Row(
'Month:T',
title=None,
header=alt.Header(labelAngle=0, labelAlign='right', format='%B')
)
).properties(
bounds='flush', title='Seattle Weather', height=step
).configure_facet(
spacing=0
).configure_view(
stroke=None
).configure_title(
anchor='end'
)
ridgeline
| bsd-3-clause | Python |
|
dfe65e6839a4347c7acfc011f052db6ec4ee1d9d | test Task | xassbit/zorn,xassbit/zorn,xassbit/zorn | tests/unit/test_task.py | tests/unit/test_task.py | import sys
from zorn import tasks
from io import StringIO
def test_task():
task = tasks.Task()
assert task.verbosity == 1
def test_parse_verbosity_standard():
silent = False
verbose = False
verbosity = tasks.Task.parse_verbosity(verbose, silent)
assert verbosity == 1
def test_parse_verbosity_silent():
silent = True
verbose = False
verbosity = tasks.Task.parse_verbosity(verbose, silent)
assert verbosity == 0
silent = True
verbose = True
verbosity = tasks.Task.parse_verbosity(verbose, silent)
assert verbosity == 0
def test_parse_verbosity_verbose():
silent = False
verbose = True
verbosity = tasks.Task.parse_verbosity(verbose, silent)
assert verbosity == 2
def test_comunicate_standard_verbosity():
task = tasks.Task(1)
stdout_ = sys.stdout
stream = StringIO()
sys.stdout = stream
task.communicate('standard')
task.communicate('verbose', False)
sys.stdout = stdout_
assert stream.getvalue() == 'standard\n'
def test_comunicate_silent():
task = tasks.Task(0)
stdout_ = sys.stdout
stream = StringIO()
sys.stdout = stream
task.communicate('standard')
task.communicate('verbose', False)
sys.stdout = stdout_
assert stream.getvalue() == ''
def test_comunicate_verbose():
task = tasks.Task(2)
stdout_ = sys.stdout
stream = StringIO()
sys.stdout = stream
task.communicate('standard')
task.communicate('verbose', False)
sys.stdout = stdout_
assert stream.getvalue() == 'standard\nverbose\n'
| mit | Python |
|
02156d3e9140b7f8f61b79816891ede2fff2cc49 | rename models to properties | noperative/whalehelpbot | properties.py | properties.py | import ConfigParser
import os
import sys
subreddit = 'taigeilove'
user_agent = 'Python:whalehelpbot:v1.0 (by /u/Noperative)'
general_words = []
first_time_words = []
expedition_words = []
quest_words = []
| mit | Python |
|
e74c3273f840afbca25936083abdfb6577b4fdd0 | Devuelve lista de etiquetas y atributos | celiagarcia/ptavi-p3 | smallsmilhandler.py | smallsmilhandler.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#CELIA GARCIA FERNANDEz
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
class SmallSMILHandler(ContentHandler):
def __init__ (self):
self.lista = []
self.etiquetas = ['root-layout', 'region', 'img', 'audio', 'textstream']
self.attributosD = {
'root-layout': ['width', 'height'],
'region': ['id','top','left'],
'img': ['scr','region','begin','dur'],
'audio': ['src','begin','dur'],
'textstream': ['src', 'region']
}
def startElement(self, name, attrs):
diccionario = {}
if name in self.etiquetas:
diccionario["name"] = name
for key in self.attributosD[name]:
diccionario[key] = attrs.get(key, "")
self.lista.append(diccionario)
def get_tags(self):
return self.lista
if __name__ == "__main__":
parser = make_parser()
small = SmallSMILHandler()
parser.setContentHandler(small)
parser.parse(open('karaoke.smil'))
print small.get_tags()
| mit | Python |
|
024e7fe473a19a16b7e34203aef2841af7a3aad4 | add markreads script | MischaLundberg/bamsurgeon,adamewing/bamsurgeon,adamewing/bamsurgeon,MischaLundberg/bamsurgeon | etc/markreads.py | etc/markreads.py | #!/usr/bin/env python
import pysam
import sys
def markreads(bamfn, outfn):
bam = pysam.AlignmentFile(bamfn, 'rb')
out = pysam.AlignmentFile(outfn, 'wb', template=bam)
for read in bam.fetch(until_eof=True):
tags = read.tags
tags.append(('BS',1))
read.tags = tags
out.write(read)
if len(sys.argv) == 3:
markreads(*sys.argv[1:])
else:
print 'usage:', sys.argv[0], '<input BAM> <output BAM>'
| mit | Python |
|
b86ad075f690718e528364bedce891a3a4debdaf | Add a basic example API | Gentux/imap-cli,Gentux/imap-cli | examples/api.py | examples/api.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Simple REST API for Imap-CLI."""
import copy
import json
import logging
import re
from wsgiref import simple_server
from webob.dec import wsgify
from webob.exc import status_map
import imap_cli
from imap_cli import config
from imap_cli import const
from imap_cli import fetch
from imap_cli import search
ctx = config.new_context_from_file()
log = logging.getLogger('Imap-CLI API')
@wsgify
def read_controller(req):
params = req.params
inputs = {
'directory': params.get('directory') or const.DEFAULT_DIRECTORY,
'uid': req.urlvars.get('uid'),
}
if inputs['uid'] is None:
return 'You need to specify an UID'
imap_cli.change_dir(ctx, inputs['directory'] or const.DEFAULT_DIRECTORY)
fetched_mail = fetch.read(ctx, inputs['uid'])
if fetched_mail is None:
return 'Mail was not fetched, an error occured'
return_json = copy.deepcopy(fetched_mail)
for part in return_json['parts']:
if not part['content_type'].startswith('text'):
del part['data']
return json.dumps(return_json, indent=2)
@wsgify
def search_controller(req):
params = req.params
inputs = {
'directory': params.get('directory') or const.DEFAULT_DIRECTORY,
'tags': params.getall('tag') or None,
'text': params.get('text') or None,
}
search_criterion = search.prepare_search(
ctx,
directory=inputs['directory'],
tags=inputs['tags'],
text=inputs['text'],
)
mail_set = search.fetch_uids(ctx, search_criterion=search_criterion or [])
mails_info = list(
search.fetch_mails_info(ctx, directory=inputs['directory'], mail_set=mail_set)
)
return json.dumps(mails_info, indent=2)
@wsgify
def status_controller(req):
return json.dumps(list(imap_cli.status(ctx)), indent=2,)
routings = [
('GET', '^/v1/status.json$', status_controller),
('GET', '^/v1/list/?$', search_controller),
('GET', '^/v1/search/?$', search_controller),
('GET', '^/v1/read/(?P<uid>.+)?$', read_controller),
]
@wsgify
def router(req):
"""Dispatch request to controllers."""
split_path_info = req.path_info.split('/')
assert not split_path_info[0], split_path_info
for methods, regex, app, vars in routes:
if methods is None or req.method in methods:
match = regex.match(req.path_info)
if match is not None:
if getattr(req, 'urlvars', None) is None:
req.urlvars = {}
req.urlvars.update(dict(
(name, value.decode('utf-8') if value is not None else None)
for name, value in match.groupdict().iteritems()
))
req.urlvars.update(vars)
req.script_name += req.path_info[:match.end()]
req.path_info = req.path_info[match.end():]
return req.get_response(app)
return status_map[404]()
if __name__ == '__main__':
routes = []
for routing in routings:
methods, regex, app = routing[:3]
if isinstance(methods, basestring):
methods = (methods,)
vars = routing[3] if len(routing) >= 4 else {}
routes.append((methods, re.compile(regex), app, vars))
imap_cli.connect(ctx)
httpd = simple_server.make_server('127.0.0.1', 8000, router)
httpd.serve_forever()
| mit | Python |
|
0bd69e17d75cf1ecaa53153fd07abf2e139f57b7 | add function0-input.py | chaonet/iDoulist,Frank-the-Obscure/iDoulist | input/function0-input.py | input/function0-input.py | # -*- coding: utf-8 -*-
# Author Frank Hu
# iDoulist Function 0 - input
import urllib2
response = urllib2.urlopen("http://www.douban.com/doulist/38390646/")
print response.read() | mit | Python |
|
70d5b47a66d883187574c409ac08ece24277d292 | Add the test.py example that is cited in the cytomine.org documentation | cytomine/Cytomine-python-client,cytomine/Cytomine-python-client | examples/test.py | examples/test.py | # -*- coding: utf-8 -*-
# * Copyright (c) 2009-2020. Authors: see NOTICE file.
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# * This script is just a simple example used to verify if the
# * Cytomine Python Client is correctly installed.
# * Using a correct Cytomine instance URL, and keys of a user, it will just fetch his username.
import sys
from argparse import ArgumentParser
__author__ = "Renaud Hoyoux <[email protected]>"
if __name__ == '__main__':
from cytomine import Cytomine
from cytomine.models.user import *
parser = ArgumentParser(prog="Cytomine Python client example")
# Cytomine
parser.add_argument('--cytomine_host', dest='host', default='demo.cytomine.be', help="The Cytomine host")
parser.add_argument('--cytomine_public_key', dest='public_key', help="The Cytomine public key")
parser.add_argument('--cytomine_private_key', dest='private_key', help="The Cytomine private key")
params, other = parser.parse_known_args(sys.argv[1:])
with Cytomine(host=params.host, public_key=params.public_key, private_key=params.private_key) as cytomine:
# Get the connected user
user = CurrentUser().fetch()
print(user) | apache-2.0 | Python |
|
1c2bde23ffc6188fe839b36011775663f86c8919 | Create config.py | sculove/xing-plus | config.py | config.py | # -*- coding: utf-8 -*-
import configparser
class Config:
_cp = None
def load():
Config._cp = configparser.ConfigParser()
Config._cp.read("config.ini")
for category in Config._cp.sections():
temp = {}
for op in Config._cp.options(category):
temp[op] = Config._cp[category][op]
setattr(Config, category, temp)
Config.load()
| mit | Python |
|
0712d78cf76c1d3f699317fcc64db3fe60dc6266 | Add utility functions for generating documentation | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks | docs/utils.py | docs/utils.py | def cleanup_docstring(docstring):
doc = ""
stripped = [line.strip() for line in docstring.split("\n")]
doc += '\n'.join(stripped)
return doc
| agpl-3.0 | Python |
|
a36e013e9b1d7133ed98cb2f087f3cb3dc53de69 | Add 5-2 to working dictionary. | fx2003/tensorflow-study,fx2003/tensorflow-study,fx2003/tensorflow-study,fx2003/tensorflow-study,fx2003/tensorflow-study,fx2003/tensorflow-study | models/tutorials/image/cifar10/5-2cnn_advance.py | models/tutorials/image/cifar10/5-2cnn_advance.py | import cifar10, cifar10_input
import tensorflow as tf
import numpy as np
import time
max_steps = 3000
batch_size = 128
data_dir = '/tmp/cifar10_data/cifar-10-batches-bin'
def variable_with_weight_loss(shape, stddev, wl):
var = tf.Variable(tf.truncated_normal(shape, stddev = stddev))
if wl is not None:
weight_loss = tf.multiply(tf.nn.l2_loss(var), wl, name = 'weight_loss')
tf.add_to_collection('losses', weight_loss)
return var
cifar10.maybe_download_and_extract()
images_train, labels_train = cifar10_input.distorted_inputs(data_dir = data_dir, batch_size = batch_size)
images_test, labels_test = cifar10_input.inputs(eval_data = True, data_dir = data_dir, batch_size = batch_size)
image_holder = tf.placeholder(tf.float32, [batch_size, 24, 24, 3])
label_holder = tf.placeholder(tf.int32, [batch_size])
weight1 = variable_with_weight_loss(shape = [5, 5, 3, 64], stddev = 5e-2, wl = 0.0)
kernel1 = tf.nn.conv2d(image_holder, weight1, [1, 1, 1, 1], padding = 'SAME')
bias1 = tf.Variable(tf.constant(0.0, shape = [64]))
conv1 = tf.nn.relu(tf.nn.bias_add(kernel1, bias1))
pool1 = tf.nn.max_pool(conv1, ksize = [1, 3, 3, 1], strides = [1, 2, 2, 1], padding = 'SAME')
norm1 = tf.nn.lrn(pool1, 4, bias = 1.0, alpha = 0.001 / 9.0, beta = 0.75)
weight2 = variable_with_weight_loss(shape = [5, 5, 64, 64], stddev = 5e-2, wl = 0.0)
kernel2 = tf.nn.conv2d(norm1, weight2, [1, 1, 1, 1], padding = 'SAME')
bias2 = tf.Variable(tf.constant(0.1, shape = [64]))
conv2 = tf.nn.relu(tf.nn.bias_add(kernel2, bias2))
norm2 = tf.nn.lrn(conv2, 4, bias = 1.0, alpha = 0.001 / 9.0, beta = 0.75)
pool2 = tf.nn.max_pool(norm2, ksize = [1, 3, 3, 1], strides = [1, 2, 2, 1], padding = 'SAME')
reshape = tf.reshape(pool2, [batch_size, -1])
dim = reshape.get_shape()[1].value
weight3 = variable_with_weight_loss(shape = [dim, 384], stddev = 0.04, wl = 0.004)
bias3 = tf.Variable(tf.constant(0.1, shape = [384]))
local3 = tf.nn.relu(tf.matmul(reshape, weight3) + bias3)
weight4 = variable_with_weight_loss(shape = [384, 192], stddev = 0.04, wl = 0.004)
bias4 = tf.Variable(tf.constant(0.1, shape = [192]))
local4 = tf.nn.relu(tf.matmul(local3, weight4) + bias4)
weight5 = variable_with_weight_loss(shape = [192, 10], stddev = 1 / 192.0, wl = 0.0)
bias5 = tf.Variable(tf.constant(0.0, shape = [10]))
logits = tf.add(tf.matmul(local4, weight5), bias5)
def loss(logits, labels):
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits = logits, labels = labels, name = 'cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name = 'cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
return tf.add_n(tf.get_collection('losses'), name = 'total_loss')
loss = loss(logits, label_holder)
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
top_k_op = tf.nn.in_top_k(logits, label_holder, 1)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
tf.train.start_queue_runners()
for step in range(max_steps):
start_time = time.time()
image_batch, label_batch = sess.run([images_train, labels_train])
_, loss_value = sess.run([train_op, loss], feed_dict = {image_holder: image_batch, label_holder: label_batch})
duration = time.time() - start_time
if step % 10 == 0:
examples_per_sec = batch_size / duration
sec_per_batch = float(duration)
format_str = ('step %d, loss = %.2f (%.1f examples / sec; %.3f sec / batch)')
print(format_str % (step, loss_value, examples_per_sec, sec_per_batch))
num_examples = 10000
import math
num_iter = int(math.ceil(num_examples / batch_size))
true_count = 0
total_sample_count = num_iter * batch_size
step = 0
while step < num_iter:
image_batch, label_batch = sess.run([images_test, labels_test])
predictions = sess.run([top_k_op], feed_dict = {image_holder: image_batch, label_holder: label_batch})
true_count += np.sum(predictions)
step += 1
precision = true_count / total_sample_count
print('precision @ 1 = %.3f' % precision) | mit | Python |
|
24bb92edc18ea65166873fa41cd8db3ed6d62b5d | Add tests for forms | TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio | td_biblio/tests/test_forms.py | td_biblio/tests/test_forms.py | from django.core.exceptions import ValidationError
from django.test import TestCase
from ..forms import text_to_list, EntryBatchImportForm
def test_text_to_list():
"""Test text_to_list utils"""
inputs = [
'foo,bar,lol',
'foo , bar, lol',
'foo\nbar\nlol',
'foo,\nbar,\nlol',
'foo, \nbar,lol',
'foo,,bar,\nlol',
]
expected = ['bar', 'foo', 'lol']
for input in inputs:
result = text_to_list(input)
result.sort()
assert result == expected
class EntryBatchImportFormTests(TestCase):
"""
Tests for the EntryBatchImportForm
"""
def test_clean_pmids(self):
"""Test PMIDs cleaning method"""
inputs = [
{'pmids': '26588162\n19569182'},
{'pmids': '19569182\n26588162'},
{'pmids': '19569182,\n26588162'},
{'pmids': '19569182,26588162'},
{'pmids': '19569182,,26588162'},
{'pmids': '19569182\n\n26588162'},
]
expected = ['19569182', '26588162']
for input in inputs:
form = EntryBatchImportForm(input)
assert form.is_valid()
pmids = form.cleaned_data['pmids']
pmids.sort()
assert pmids == expected
def test_clean_pmids_with_random_input(self):
"""Test PMIDs cleaning method with non PMIDs"""
inputs = [
{'pmids': 'lorem, ipsum'},
{'pmids': 'lorem, 19569182'},
{'pmids': 'lorem42\nipsum234'},
]
for input in inputs:
form = EntryBatchImportForm(input)
self.assertFalse(form.is_valid())
def test_clean_dois(self):
"""Test DOIs cleaning method"""
inputs = [
{'dois': '10.1093/nar/gks419\n10.1093/nar/gkp323'},
{'dois': '10.1093/nar/gkp323\n10.1093/nar/gks419'},
{'dois': '10.1093/nar/gkp323,\n10.1093/nar/gks419'},
{'dois': '10.1093/nar/gkp323,10.1093/nar/gks419'},
{'dois': '10.1093/nar/gkp323,,10.1093/nar/gks419'},
{'dois': '10.1093/nar/gkp323\n\n10.1093/nar/gks419'},
]
expected = ['10.1093/nar/gkp323', '10.1093/nar/gks419']
for input in inputs:
form = EntryBatchImportForm(input)
assert form.is_valid()
dois = form.cleaned_data['dois']
dois.sort()
assert dois == expected
def test_clean_dois_with_random_input(self):
"""Test DOIs cleaning method with non DOIs"""
inputs = [
{'dois': 'lorem, ipsum'},
{'dois': 'lorem, 19569182'},
{'dois': 'lorem42\nipsum234'},
]
for input in inputs:
form = EntryBatchImportForm(input)
self.assertFalse(form.is_valid())
| mit | Python |
|
1bf634bd24d94a7d7ff358cea3215bba5b59d014 | Create power_of_two.py in bit manipulation | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | bit_manipulation/power_of_two/python/power_of_two.py | bit_manipulation/power_of_two/python/power_of_two.py | # Check if given number is power of 2 or not
# Function to check if x is power of 2
def isPowerOfTwo (x):
# First x in the below expression is for the case when x is 0
return (x and (not(x & (x - 1))) )
# Driver code
x = int(input("Enter a no:"))
if(isPowerOfTwo(x)):
print('Yes')
else:
print('No')
| cc0-1.0 | Python |
|
8fddde260af6ea1e6de8491dd99dca671634327c | Add test for the matrix representation function. | odlgroup/odl,odlgroup/odl,kohr-h/odl,kohr-h/odl,aringh/odl,aringh/odl | test/operator/utility_test.py | test/operator/utility_test.py | # Copyright 2014, 2015 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import str, super
# External module imports
import pytest
import numpy as np
# ODL imports
import odl
from odl.operator.utility import matrix_representation
from odl.util.testutils import almost_equal
class MultiplyOp(odl.Operator):
"""Multiply with matrix.
"""
def __init__(self, matrix, domain=None, range=None):
domain = (odl.Rn(matrix.shape[1])
if domain is None else domain)
range = (odl.Rn(matrix.shape[0])
if range is None else range)
self.matrix = matrix
super().__init__(domain, range, linear=True)
def _apply(self, rhs, out):
np.dot(self.matrix, rhs.data, out=out.data)
@property
def adjoint(self):
return MultiplyOp(self.matrix.T, self.range, self.domain)
def test_matrix_representation():
# Verify that the matrix representation function returns the correct matrix
A = np.random.rand(3, 3)
Aop = MultiplyOp(A)
the_matrix = matrix_representation(Aop)
assert almost_equal(np.sum(np.abs(A - the_matrix)), 1e-6)
if __name__ == '__main__':
pytest.main(str(__file__.replace('\\', '/')) + ' -v')
| mpl-2.0 | Python |
|
8b92e55fa202723f7859cd1ea22e835e5c693807 | Add some time handling functions | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org | Instanssi/kompomaatti/misc/awesometime.py | Instanssi/kompomaatti/misc/awesometime.py | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
def todayhelper():
today = datetime.today()
return datetime(day=today.day, year=today.year, month=today.month)
def format_single_helper(t):
now = datetime.now()
today = todayhelper()
tomorrow = today + timedelta(days=1)
the_day_after_tomorrow = today + timedelta(days=2) # Must honor the movie!
if t < now:
return "päättynyt"
elif t >= now and t < tomorrow:
return "tänään klo. " + t.strftime("%H:%M")
elif t >= tomorrow and t < the_day_after_tomorrow:
return "huomenna klo. " + t.strftime("%H:%M")
elif t >= the_day_after_tomorrow and t < today+timedelta(days=3):
return "ylihuomenna klo. " + t.strftime("%H:%M")
else:
return t.strftime("%d.%m.%Y klo. %H:%M")
def format_single(t):
return format_single_helper(t).capitalize()
def format_between(t1, t2):
now = datetime.now()
today = todayhelper()
tomorrow = today + timedelta(days=1)
the_day_after_tomorrow = today + timedelta(days=2) # Must honor the movie!
if t1 < now and t2 > now:
left = t2-now
l_hours = int(left.total_seconds() / timedelta(hours=1).total_seconds())
l_minutes = int((left.total_seconds() - timedelta(hours=l_hours).total_seconds()) / 60)
if(l_hours == 0):
return "Menossa, aikaa jäljellä " + str(l_minutes) + " minuuttia"
else:
return "Menossa, aikaa jäljellä " + str(l_hours) + " tuntia ja " + str(l_minutes) + " minuuttia"
elif t1 > now and t1 < today+timedelta(days=3):
return "Alkaa " + format_single_helper(t1) + " ja päättyy " + format_single_helper(t2)
else:
return "Alkaa " + t1.strftime("%d.%m.%Y %H:%M") + " ja päättyy " + t2.strftime("%d.%m.%Y %H:%M") + "."
| mit | Python |
|
bca4a0a0dda95306fe126191166e733c7ccea3ee | Add staff permissions for backup models | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/backup/perms.py | nodeconductor/backup/perms.py | from nodeconductor.core.permissions import StaffPermissionLogic
PERMISSION_LOGICS = (
('backup.BackupSchedule', StaffPermissionLogic(any_permission=True)),
('backup.Backup', StaffPermissionLogic(any_permission=True)),
)
| mit | Python |
|
54b66e132137eb6abea0a5ae6571dbc52e309b59 | change all libraries to have module_main of 'index', and add an index.js if it doesn't have one | mozilla/FlightDeck,mozilla/FlightDeck,mozilla/FlightDeck | migrations/011-ensure_library_main_module.py | migrations/011-ensure_library_main_module.py | from jetpack.models import PackageRevision
LIB_MODULE_MAIN = 'index'
libs = PackageRevision.objects.filter(package__type='l', module_main='main')
.select_related('package', 'modules')
libs.update(module_main=LIB_MODULE_MAIN)
main_per_package = {}
for revision in libs:
if revision.modules.filter(filename=LIB_MODULE_MAIN).count() == 0:
mod = main_per_package.get(revision.package_id)
if not mod:
mod = Module(filename=LIB_MODULE_MAIN, author=revision.author)
mod.save()
main_per_package[revision.package_id] = mod
revision.modules.add(mod)
| bsd-3-clause | Python |
|
2fda10a83aa5a4d3080a0ce8751e28a18fc9a3e0 | Add two-point example to serve as a regression test for gridline/plot distinguishing | joferkington/mplstereonet | examples/two_point.py | examples/two_point.py | """
Demonstrates plotting multiple linear features with a single ``ax.pole`` call.
The real purpose of this example is to serve as an implicit regression test for
some oddities in the way axes grid lines are handled in matplotlib and
mplstereonet. A 2-vertex line can sometimes be confused for an axes grid line,
and they need different handling on a stereonet.
"""
import matplotlib.pyplot as plt
import mplstereonet
fig, ax = mplstereonet.subplots(figsize=(7,7))
strike = [200, 250]
dip = [50, 60]
ax.pole(strike, dip, 'go', markersize=10)
ax.grid()
plt.show()
| mit | Python |
|
ee85acb7f9f3af91db3bfb4bf766636883f07685 | Add an extra test for the OpalSerializer | khchine5/opal,khchine5/opal,khchine5/opal | opal/tests/test_core_views.py | opal/tests/test_core_views.py | """
Unittests for opal.core.views
"""
from opal.core import test
from opal.core import views
class SerializerTestCase(test.OpalTestCase):
def test_serializer_default_will_super(self):
s = views.OpalSerializer()
with self.assertRaises(TypeError):
s.default(None)
| agpl-3.0 | Python |
|
1fdffc42c7ff7ea4339a58e8a19ffa07253e4149 | Add script to resolve conflicts | itkach/mwscrape | resolveconflicts.py | resolveconflicts.py | # Copyright (C) 2014 Igor Tkach
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import couchdb
from urlparse import urlparse
def parse_args():
argparser = argparse.ArgumentParser()
argparser.add_argument('couch_url')
argparser.add_argument('-s', '--start')
return argparser.parse_args()
def mkclient(couch_url):
parsed_url = urlparse(couch_url)
couch_db = parsed_url.path.lstrip('/')
server_url = parsed_url.scheme + '://'+ parsed_url.netloc
server = couchdb.Server(server_url)
username = parsed_url.username
password = parsed_url.password
print "User %s%s at %s, database %s" % (
username,
'' if password else ' (no password)',
server.resource.url,
couch_db)
if password:
server.resource.credentials = (username, password)
return server[couch_db]
def main():
args = parse_args()
db = mkclient(args.couch_url)
viewoptions = {}
if args.start:
viewoptions['startkey'] = args.start
viewoptions['startkey_docid'] = args.start
for row in db.iterview('_all_docs', 100, **viewoptions):
doc = db.get(row.id, conflicts=True)
conflicts = doc.get('_conflicts')
if conflicts:
best_mw_revid = doc['parse']['revid']
docs = [doc]
best_doc = doc
print row.id, '\n', doc.rev, best_mw_revid, conflicts
all_aliases = set(doc.get('aliases', ()))
aliase_count = len(all_aliases)
for conflict_rev in conflicts:
conflict_doc = db.get(row.id, rev=conflict_rev)
docs.append(conflict_doc)
conflict_mw_revid = conflict_doc['parse']['revid']
#print 'conflict mw revid:', conflict_mw_revid
if conflict_mw_revid > best_mw_revid:
best_mw_revid = conflict_mw_revid
best_doc = conflict_doc
aliases = set(doc.get('aliases', ()))
all_aliases.update(aliases)
#print all_aliases
new_aliases_count = len(all_aliases) - aliase_count
#print 'New aliases found in conflict:', new_aliases_count
#print 'Best doc: ', best_doc.rev
if new_aliases_count > 0:
print '+A', doc.id
if best_doc.rev != doc.rev > 0:
print '+R', doc.id
for doc in docs:
if doc.rev == best_doc.rev:
print 'Keeping ', doc.rev
doc['aliases'] = list(all_aliases)
db.save(doc)
else:
print 'Discarding ', doc.rev
db.delete(doc)
if __name__ == '__main__':
main()
| mpl-2.0 | Python |
|
a0a2017e05af986cd0a7207c429e7dc5e8b3fcd2 | Add missing tests for Variable | amolenaar/gaphas | tests/test_solver_variable.py | tests/test_solver_variable.py | from gaphas.solver import Variable
def test_equality():
v = Variable(3)
w = Variable(3)
o = Variable(2)
assert v == 3
assert 3 == v
assert v == w
assert not v == o
assert v != 2
assert 2 != v
assert not 3 != v
assert v != o
def test_add_to_variable():
v = Variable(3)
assert v + 1 == 4
assert v - 1 == 2
assert 1 + v == 4
assert 4 - v == 1
def test_add_to_variable_with_variable():
v = Variable(3)
o = Variable(1)
assert v + o == 4
assert v - o == 2
def test_mutiplication():
v = Variable(3)
assert v * 2 == 6
assert v / 2 == 1.5
assert v // 2 == 1
assert 2 * v == 6
assert 4.5 / v == 1.5
assert 4 // v == 1
def test_mutiplication_with_variable():
v = Variable(3)
o = Variable(2)
assert v * o == 6
assert v / o == 1.5
assert v // o == 1
def test_comparison():
v = Variable(3)
assert v > 2
assert v < 4
assert v >= 2
assert v >= 3
assert v <= 4
assert v <= 3
assert not v > 3
assert not v < 3
assert not v <= 2
assert not v >= 4
def test_inverse_comparison():
v = Variable(3)
assert 4 > v
assert 2 < v
assert 4 >= v
assert 3 >= v
assert 2 <= v
assert 3 <= v
assert not 3 > v
assert not 3 < v
assert not 4 <= v
assert not 2 >= v
def test_power():
v = Variable(3)
o = Variable(2)
assert v ** 2 == 9
assert 2 ** v == 8
assert v ** o == 9
def test_modulo():
v = Variable(3)
o = Variable(2)
assert v % 2 == 1
assert 4 % v == 1
assert v % o == 1
assert divmod(v, 2) == (1, 1)
assert divmod(4, v) == (1, 1)
assert divmod(v, o) == (1, 1)
| lgpl-2.1 | Python |
|
865356c5b7bbec2b9412ffd3d2a39fea19e4b01a | Create getcounts.py | sunjerry019/photonLauncher,sunjerry019/photonLauncher,sunjerry019/photonLauncher,sunjerry019/photonLauncher,sunjerry019/photonLauncher | usbcounter/getcounts.py | usbcounter/getcounts.py | import serial
import json
import os, sys
import time
| apache-2.0 | Python |
|
993b1af160e6ed7886c2c95770683fae72332aed | remove __debug__ | jjkoletar/panda3d,Wilee999/panda3d,tobspr/panda3d,chandler14362/panda3d,Wilee999/panda3d,hj3938/panda3d,ee08b397/panda3d,jjkoletar/panda3d,grimfang/panda3d,grimfang/panda3d,mgracer48/panda3d,brakhane/panda3d,chandler14362/panda3d,tobspr/panda3d,cc272309126/panda3d,brakhane/panda3d,brakhane/panda3d,jjkoletar/panda3d,hj3938/panda3d,cc272309126/panda3d,matthiascy/panda3d,grimfang/panda3d,hj3938/panda3d,tobspr/panda3d,grimfang/panda3d,tobspr/panda3d,cc272309126/panda3d,cc272309126/panda3d,jjkoletar/panda3d,hj3938/panda3d,ee08b397/panda3d,tobspr/panda3d,mgracer48/panda3d,grimfang/panda3d,tobspr/panda3d,chandler14362/panda3d,cc272309126/panda3d,mgracer48/panda3d,jjkoletar/panda3d,chandler14362/panda3d,matthiascy/panda3d,Wilee999/panda3d,brakhane/panda3d,grimfang/panda3d,ee08b397/panda3d,ee08b397/panda3d,chandler14362/panda3d,ee08b397/panda3d,mgracer48/panda3d,tobspr/panda3d,Wilee999/panda3d,ee08b397/panda3d,tobspr/panda3d,brakhane/panda3d,matthiascy/panda3d,jjkoletar/panda3d,mgracer48/panda3d,matthiascy/panda3d,jjkoletar/panda3d,ee08b397/panda3d,chandler14362/panda3d,grimfang/panda3d,mgracer48/panda3d,cc272309126/panda3d,cc272309126/panda3d,matthiascy/panda3d,jjkoletar/panda3d,mgracer48/panda3d,tobspr/panda3d,grimfang/panda3d,Wilee999/panda3d,brakhane/panda3d,chandler14362/panda3d,chandler14362/panda3d,matthiascy/panda3d,brakhane/panda3d,brakhane/panda3d,hj3938/panda3d,hj3938/panda3d,Wilee999/panda3d,brakhane/panda3d,ee08b397/panda3d,cc272309126/panda3d,grimfang/panda3d,hj3938/panda3d,matthiascy/panda3d,Wilee999/panda3d,jjkoletar/panda3d,mgracer48/panda3d,matthiascy/panda3d,grimfang/panda3d,Wilee999/panda3d,hj3938/panda3d,ee08b397/panda3d,cc272309126/panda3d,matthiascy/panda3d,chandler14362/panda3d,mgracer48/panda3d,tobspr/panda3d,Wilee999/panda3d,chandler14362/panda3d,hj3938/panda3d | direct/src/task/Task.py | direct/src/task/Task.py | """ This module exists temporarily as a gatekeeper between
TaskOrig.py, the original Python implementation of the task system,
and TaskNew.py, the new C++ implementation. """
from pandac.libpandaexpressModules import ConfigVariableBool
wantNewTasks = ConfigVariableBool('want-new-tasks', False).getValue()
if wantNewTasks:
from TaskNew import *
else:
from TaskOrig import *
| """ This module exists temporarily as a gatekeeper between
TaskOrig.py, the original Python implementation of the task system,
and TaskNew.py, the new C++ implementation. """
wantNewTasks = False
if __debug__:
from pandac.PandaModules import ConfigVariableBool
wantNewTasks = ConfigVariableBool('want-new-tasks', False).getValue()
if wantNewTasks:
from TaskNew import *
else:
from TaskOrig import *
| bsd-3-clause | Python |
85cbec4f398c49a4903c7370f74deeae3d5adabf | Create ShowData.py | Larz60p/Python-Record-Structure | ShowData.py | ShowData.py | """
The MIT License (MIT)
Copyright (c) <2016> <Larry McCaig (aka: Larz60+ aka: Larz60p)>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import ReadRecord as RdRec
class ShowData:
def __init__(self):
self.rr = RdRec.ReadRecord('StockData.json')
self.stock_market_record = self.rr.read_data_file()
def show_data(self):
stkmktrec = self.stock_market_record
# get a list of field names:
print('Record fields: {}'.format(stkmktrec._fields))
# List entire record
print('\nEntire record: {}'.format(stkmktrec))
# Get individual field
print('\ndbtabledesc: {}'.format(stkmktrec.dbtabledesc))
# Show database column entries
print('\ndatabase column 0: {}'.format(stkmktrec.columns[0]))
print('database column 1: {}'.format(stkmktrec.columns[1]))
print('database column 2: {}'.format(stkmktrec.columns[2]))
# Column data by key:
for n in range(len(stkmktrec.columns)):
column = stkmktrec.columns[n]
print('\nColumn {} all: {}'.format(n, column))
print('Column data {} field_name: {}'.format(n, column.field_name))
print('Column data {} db_column_name: {}'.format(n, column.db_column_name))
print('Column data {} db_column_desc: {}'.format(n, column.db_column_desc))
print('Column data {} db_column_type: {}'.format(n, column.db_column_type))
# Using get_field_item
print('\nUsing get_field_item - Column 1, db_column_desc: {}'
.format(self.rr.get_field_item(1, itemname='db_column_desc')))
# same with bad data
print('With bad data you get: {}'
.format(self.rr.get_field_item(1, itemname='donkykong')))
if __name__ == '__main__':
sd = ShowData()
sd.show_data()
| mit | Python |
|
64130f988f2154870db540244a399a8297a103e9 | move hardcoded URL from email script to model definition. | CarlFK/veyepar,CarlFK/veyepar,xfxf/veyepar,xfxf/veyepar,xfxf/veyepar,CarlFK/veyepar,CarlFK/veyepar,xfxf/veyepar,xfxf/veyepar,CarlFK/veyepar | dj/scripts/email_url.py | dj/scripts/email_url.py | #!/usr/bin/python
# email_url.py
# emails the video URL to the presenters
import itertools
from pprint import pprint
from email_ab import email_ab
class email_url(email_ab):
ready_state = 7
subject_template = "[{{ep.show.name}}] Video up: {{ep.name}}"
body_body = """
The video is posted:
{% for url in urls %} {{url}}
{% endfor %}
Look at it, make sure the title is spelled right and the audio sounds reasonable.
If you are satisfied, tweet it, blog it, whatever it. No point in making videos if no one watches them.
To approve it click the Approve button at
{{ep.approve_url}}
As soon as you or someone approves your video, it will be tweeted on @NextDayVideo{% if ep.show.client.tweet_prefix %} tagged {{ep.show.client.tweet_prefix}}{% endif %}. It will also be sent to the event organizers in hopes that they add it to the event website.
{% if ep.twitter_url %}
It has been tweeted: {{ ep.twitter_url }}
Re-tweet it, blog it, whatever it. No point in making videos if no one watches them.
{% endif %}
"""
def context(self, ep):
ctx = super(email_url, self).context(ep)
# dig around for URLs that might be relevant
urls = filter( None,
[ep.public_url,
ep.host_url,
ep.archive_ogv_url,
ep.archive_mp4_url] )
ctx['urls'] = urls
ctx['py_name'] = "email_url.py"
return ctx
if __name__ == '__main__':
p=email_url()
p.main()
| #!/usr/bin/python
# email_url.py
# emails the video URL to the presenters
import itertools
from pprint import pprint
from email_ab import email_ab
class email_url(email_ab):
ready_state = 7
subject_template = "[{{ep.show.name}}] Video up: {{ep.name}}"
body_body = """
The video is posted:
{% for url in urls %} {{url}}
{% endfor %}
Look at it, make sure the title is spelled right and the audio sounds reasonable.
If you are satisfied, tweet it, blog it, whatever it. No point in making videos if no one watches them.
To approve it click the Approve button at
https://veyepar.nextdayvideo.com/main/approve/{{ep.id}}/{{ep.slug}}/{{ep.edit_key}}/
As soon as you or someone approves your video, it will be tweeted on @NextDayVideo{% if ep.show.client.tweet_prefix %} tagged {{ep.show.client.tweet_prefix}}{% endif %}. It will also be sent to the event organizers in hopes that they add it to the event website.
{% if ep.twitter_url %}
It has been tweeted: {{ ep.twitter_url }}
Re-tweet it, blog it, whatever it. No point in making videos if no one watches them.
{% endif %}
"""
def context(self, ep):
ctx = super(email_url, self).context(ep)
# dig around for URLs that might be relevant
urls = filter( None,
[ep.public_url,
ep.host_url,
ep.archive_ogv_url,
ep.archive_mp4_url] )
ctx['urls'] = urls
ctx['py_name'] = "email_url.py"
return ctx
if __name__ == '__main__':
p=email_url()
p.main()
| mit | Python |
ce47fec10ccda45550625221c64322d89622c707 | Add libjpeg.gyp that wraps third_party/externals/libjpeg/libjpeg.gyp Review URL: https://codereview.appspot.com/5848046 | Hikari-no-Tenshi/android_external_skia,DiamondLovesYou/skia-sys,Khaon/android_external_skia,OptiPop/external_skia,TeamBliss-LP/android_external_skia,fire855/android_external_skia,mydongistiny/android_external_skia,nfxosp/platform_external_skia,AOSP-YU/platform_external_skia,aospo/platform_external_skia,todotodoo/skia,sudosurootdev/external_skia,AOSP-YU/platform_external_skia,Pure-Aosp/android_external_skia,todotodoo/skia,Tesla-Redux/android_external_skia,houst0nn/external_skia,Fusion-Rom/android_external_skia,Infinitive-OS/platform_external_skia,NamelessRom/android_external_skia,xzzz9097/android_external_skia,MinimalOS/external_skia,DARKPOP/external_chromium_org_third_party_skia,wildermason/external_skia,akiss77/skia,aospo/platform_external_skia,MarshedOut/android_external_skia,zhaochengw/platform_external_skia,TeamBliss-LP/android_external_skia,AsteroidOS/android_external_skia,TeamEOS/external_skia,qrealka/skia-hc,zhaochengw/platform_external_skia,AOSPA-L/android_external_skia,android-ia/platform_external_chromium_org_third_party_skia,jtg-gg/skia,Pure-Aosp/android_external_skia,timduru/platform-external-skia,timduru/platform-external-skia,TeslaOS/android_external_skia,android-ia/platform_external_skia,Infinitive-OS/platform_external_skia,pcwalton/skia,Khaon/android_external_skia,MyAOSP/external_chromium_org_third_party_skia,xin3liang/platform_external_chromium_org_third_party_skia,DesolationStaging/android_external_skia,TeamEOS/external_skia,NamelessRom/android_external_skia,nox/skia,amyvmiwei/skia,mmatyas/skia,Fusion-Rom/external_chromium_org_third_party_skia,xin3liang/platform_external_chromium_org_third_party_skia,Jichao/skia,samuelig/skia,nox/skia,larsbergstrom/skia,OneRom/external_skia,geekboxzone/mmallow_external_skia,jtg-gg/skia,TeamExodus/external_skia,GladeRom/android_external_skia,vanish87/skia,sigysmund/platform_external_skia,nvoron23/skia,invisiblek/android_external_skia,spezi77/android_external_skia,TeamEOS/external_skia,BrokenROM/external_skia,geekboxzone/lollipop_external_skia,sombree/android_external_skia,Omegaphora/external_chromium_org_third_party_skia,sigysmund/platform_external_skia,timduru/platform-external-skia,OneRom/external_skia,TeamEOS/external_skia,MarshedOut/android_external_skia,android-ia/platform_external_skia,qrealka/skia-hc,Infusion-OS/android_external_skia,HalCanary/skia-hc,TeamBliss-LP/android_external_skia,Pure-Aosp/android_external_skia,amyvmiwei/skia,AOSPA-L/android_external_skia,mydongistiny/android_external_skia,scroggo/skia,Android-AOSP/external_skia,VRToxin-AOSP/android_external_skia,samuelig/skia,DARKPOP/external_chromium_org_third_party_skia,Asteroid-Project/android_external_skia,AOSPU/external_chromium_org_third_party_skia,spezi77/android_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,GladeRom/android_external_skia,BrokenROM/external_skia,Infusion-OS/android_external_skia,F-AOSP/platform_external_skia,HalCanary/skia-hc,Asteroid-Project/android_external_skia,vvuk/skia,tmpvar/skia.cc,w3nd1go/android_external_skia,F-AOSP/platform_external_skia,geekboxzone/mmallow_external_skia,Igalia/skia,FusionSP/android_external_skia,TeamEOS/external_chromium_org_third_party_skia,Infinitive-OS/platform_external_skia,FusionSP/android_external_skia,sudosurootdev/external_skia,chenlian2015/skia_from_google,YUPlayGodDev/platform_external_skia,google/skia,amyvmiwei/skia,Tesla-Redux/android_external_skia,MinimalOS-AOSP/platform_external_skia,AOSP-YU/platform_external_skia,geekboxzone/lollipop_external_skia,mmatyas/skia,AOSPB/external_skia,houst0nn/external_skia,aospo/platform_external_skia,Plain-Andy/android_platform_external_skia,UBERMALLOW/external_skia,F-AOSP/platform_external_skia,MIPS/external-chromium_org-third_party-skia,Hybrid-Rom/external_skia,MinimalOS/android_external_skia,Purity-Lollipop/platform_external_skia,noselhq/skia,DiamondLovesYou/skia-sys,rubenvb/skia,android-ia/platform_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,byterom/android_external_skia,sigysmund/platform_external_skia,mmatyas/skia,BrokenROM/external_skia,NamelessRom/android_external_skia,google/skia,MarshedOut/android_external_skia,boulzordev/android_external_skia,GladeRom/android_external_skia,MinimalOS/android_external_skia,aosp-mirror/platform_external_skia,geekboxzone/lollipop_external_skia,FusionSP/android_external_skia,vvuk/skia,MinimalOS/external_skia,Euphoria-OS-Legacy/android_external_skia,akiss77/skia,InfinitiveOS/external_skia,AsteroidOS/android_external_skia,mydongistiny/android_external_skia,TeamEOS/external_chromium_org_third_party_skia,RadonX-ROM/external_skia,ominux/skia,TeamExodus/external_skia,DiamondLovesYou/skia-sys,VRToxin-AOSP/android_external_skia,MyAOSP/external_chromium_org_third_party_skia,DARKPOP/external_chromium_org_third_party_skia,AndroidOpenDevelopment/android_external_skia,Asteroid-Project/android_external_skia,qrealka/skia-hc,pacerom/external_skia,boulzordev/android_external_skia,mydongistiny/android_external_skia,DARKPOP/external_chromium_org_third_party_skia,AOSP-YU/platform_external_skia,qrealka/skia-hc,YUPlayGodDev/platform_external_skia,MinimalOS/external_chromium_org_third_party_skia,MinimalOS/android_external_chromium_org_third_party_skia,shahrzadmn/skia,mozilla-b2g/external_skia,suyouxin/android_external_skia,pcwalton/skia,codeaurora-unoffical/platform-external-skia,TeamExodus/external_skia,aosp-mirror/platform_external_skia,nfxosp/platform_external_skia,PAC-ROM/android_external_skia,vvuk/skia,VentureROM-L/android_external_skia,invisiblek/android_external_skia,AOSPB/external_skia,sigysmund/platform_external_skia,HealthyHoney/temasek_SKIA,MyAOSP/external_chromium_org_third_party_skia,todotodoo/skia,Hikari-no-Tenshi/android_external_skia,aospo/platform_external_skia,SlimSaber/android_external_skia,AOSP-YU/platform_external_skia,MIPS/external-chromium_org-third_party-skia,geekboxzone/mmallow_external_skia,ctiao/platform-external-skia,YUPlayGodDev/platform_external_skia,Euphoria-OS-Legacy/android_external_skia,Infusion-OS/android_external_skia,spezi77/android_external_skia,MonkeyZZZZ/platform_external_skia,Infinitive-OS/platform_external_skia,xzzz9097/android_external_skia,TeslaProject/external_skia,sigysmund/platform_external_skia,temasek/android_external_skia,ench0/external_chromium_org_third_party_skia,FusionSP/external_chromium_org_third_party_skia,VentureROM-L/android_external_skia,Hikari-no-Tenshi/android_external_skia,houst0nn/external_skia,sigysmund/platform_external_skia,vanish87/skia,amyvmiwei/skia,ctiao/platform-external-skia,MyAOSP/external_chromium_org_third_party_skia,MinimalOS-AOSP/platform_external_skia,TeamEOS/external_skia,MinimalOS/external_chromium_org_third_party_skia,suyouxin/android_external_skia,ench0/external_skia,Fusion-Rom/android_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,nox/skia,OptiPop/external_skia,android-ia/platform_external_chromium_org_third_party_skia,mmatyas/skia,geekboxzone/mmallow_external_skia,InfinitiveOS/external_skia,AOSP-YU/platform_external_skia,PAC-ROM/android_external_skia,w3nd1go/android_external_skia,codeaurora-unoffical/platform-external-skia,AsteroidOS/android_external_skia,w3nd1go/android_external_skia,PAC-ROM/android_external_skia,Plain-Andy/android_platform_external_skia,VRToxin-AOSP/android_external_skia,spezi77/android_external_skia,nox/skia,Hikari-no-Tenshi/android_external_skia,vanish87/skia,xzzz9097/android_external_skia,suyouxin/android_external_skia,MinimalOS/android_external_skia,w3nd1go/android_external_skia,OptiPop/external_skia,Fusion-Rom/external_chromium_org_third_party_skia,sudosurootdev/external_skia,GladeRom/android_external_skia,OptiPop/external_skia,ench0/external_chromium_org_third_party_skia,ench0/external_chromium_org_third_party_skia,TeamTwisted/external_skia,MinimalOS/external_skia,MinimalOS/external_chromium_org_third_party_skia,ench0/external_chromium_org_third_party_skia,MinimalOS/android_external_skia,byterom/android_external_skia,Asteroid-Project/android_external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,TeamEOS/external_skia,pacerom/external_skia,HealthyHoney/temasek_SKIA,Igalia/skia,qrealka/skia-hc,OptiPop/external_chromium_org_third_party_skia,android-ia/platform_external_chromium_org_third_party_skia,aosp-mirror/platform_external_skia,Samsung/skia,TeamExodus/external_skia,Fusion-Rom/android_external_skia,Jichao/skia,RadonX-ROM/external_skia,chenlian2015/skia_from_google,timduru/platform-external-skia,Samsung/skia,Khaon/android_external_skia,nvoron23/skia,scroggo/skia,Infusion-OS/android_external_skia,larsbergstrom/skia,samuelig/skia,zhaochengw/platform_external_skia,Jichao/skia,Fusion-Rom/external_chromium_org_third_party_skia,MinimalOS/android_external_chromium_org_third_party_skia,TeslaProject/external_skia,fire855/android_external_skia,suyouxin/android_external_skia,AsteroidOS/android_external_skia,VentureROM-L/android_external_skia,ench0/external_chromium_org_third_party_skia,Omegaphora/external_chromium_org_third_party_skia,tmpvar/skia.cc,AndroidOpenDevelopment/android_external_skia,codeaurora-unoffical/platform-external-skia,Hikari-no-Tenshi/android_external_skia,nvoron23/skia,noselhq/skia,TeamEOS/external_chromium_org_third_party_skia,nox/skia,aospo/platform_external_skia,RadonX-ROM/external_skia,geekboxzone/lollipop_external_skia,shahrzadmn/skia,UBERMALLOW/external_skia,OptiPop/external_chromium_org_third_party_skia,google/skia,timduru/platform-external-skia,amyvmiwei/skia,AOSP-YU/platform_external_skia,boulzordev/android_external_skia,OptiPop/external_chromium_org_third_party_skia,MarshedOut/android_external_skia,Jichao/skia,Tesla-Redux/android_external_skia,nvoron23/skia,DesolationStaging/android_external_skia,MarshedOut/android_external_skia,InfinitiveOS/external_skia,VentureROM-L/android_external_skia,OptiPop/external_chromium_org_third_party_skia,android-ia/platform_external_skia,todotodoo/skia,sombree/android_external_skia,AOSPA-L/android_external_skia,DiamondLovesYou/skia-sys,Omegaphora/external_chromium_org_third_party_skia,wildermason/external_skia,TeamEOS/external_chromium_org_third_party_skia,CyanogenMod/android_external_chromium_org_third_party_skia,w3nd1go/android_external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,Samsung/skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,MinimalOS-AOSP/platform_external_skia,TeslaProject/external_skia,TeslaOS/android_external_skia,android-ia/platform_external_skia,AOSPU/external_chromium_org_third_party_skia,Igalia/skia,VentureROM-L/android_external_skia,rubenvb/skia,sombree/android_external_skia,ench0/external_skia,shahrzadmn/skia,SlimSaber/android_external_skia,tmpvar/skia.cc,Khaon/android_external_skia,MonkeyZZZZ/platform_external_skia,DiamondLovesYou/skia-sys,nox/skia,MIPS/external-chromium_org-third_party-skia,Hybrid-Rom/external_skia,mmatyas/skia,wildermason/external_skia,VRToxin-AOSP/android_external_skia,AOSP-YU/platform_external_skia,Euphoria-OS-Legacy/android_external_skia,Android-AOSP/external_skia,MinimalOS-AOSP/platform_external_skia,Omegaphora/external_chromium_org_third_party_skia,scroggo/skia,Omegaphora/external_chromium_org_third_party_skia,AOSPB/external_skia,spezi77/android_external_skia,Pure-Aosp/android_external_skia,wildermason/external_skia,Infusion-OS/android_external_skia,zhaochengw/platform_external_skia,DesolationStaging/android_external_skia,vvuk/skia,OptiPop/external_chromium_org_third_party_skia,NamelessRom/android_external_skia,scroggo/skia,AOSPA-L/android_external_skia,Khaon/android_external_skia,Omegaphora/external_skia,mydongistiny/android_external_skia,zhaochengw/platform_external_skia,larsbergstrom/skia,pcwalton/skia,DesolationStaging/android_external_skia,RadonX-ROM/external_skia,MyAOSP/external_chromium_org_third_party_skia,TeamTwisted/external_skia,akiss77/skia,rubenvb/skia,vanish87/skia,FusionSP/android_external_skia,boulzordev/android_external_skia,temasek/android_external_skia,Omegaphora/external_skia,Infinitive-OS/platform_external_skia,Hikari-no-Tenshi/android_external_skia,sombree/android_external_skia,noselhq/skia,jtg-gg/skia,TeamTwisted/external_skia,sombree/android_external_skia,houst0nn/external_skia,HealthyHoney/temasek_SKIA,MyAOSP/external_chromium_org_third_party_skia,MinimalOS/external_chromium_org_third_party_skia,temasek/android_external_skia,Euphoria-OS-Legacy/android_external_skia,google/skia,xzzz9097/android_external_skia,AndroidOpenDevelopment/android_external_skia,Hybrid-Rom/external_skia,ench0/external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,wildermason/external_skia,chenlian2015/skia_from_google,boulzordev/android_external_skia,UBERMALLOW/external_skia,nvoron23/skia,TeamBliss-LP/android_external_skia,DARKPOP/external_chromium_org_third_party_skia,aosp-mirror/platform_external_skia,Infusion-OS/android_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,Samsung/skia,fire855/android_external_skia,OneRom/external_skia,jtg-gg/skia,AsteroidOS/android_external_skia,boulzordev/android_external_skia,Omegaphora/external_chromium_org_third_party_skia,shahrzadmn/skia,FusionSP/external_chromium_org_third_party_skia,MinimalOS/android_external_chromium_org_third_party_skia,rubenvb/skia,AOSPU/external_chromium_org_third_party_skia,DARKPOP/external_chromium_org_third_party_skia,fire855/android_external_skia,Hybrid-Rom/external_skia,mydongistiny/external_chromium_org_third_party_skia,larsbergstrom/skia,google/skia,ench0/external_skia,OneRom/external_skia,TeamExodus/external_skia,nfxosp/platform_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,Asteroid-Project/android_external_skia,AOSPB/external_skia,ench0/external_skia,google/skia,Omegaphora/external_chromium_org_third_party_skia,boulzordev/android_external_skia,TeamExodus/external_skia,shahrzadmn/skia,google/skia,Asteroid-Project/android_external_skia,F-AOSP/platform_external_skia,HalCanary/skia-hc,MIPS/external-chromium_org-third_party-skia,aospo/platform_external_skia,pacerom/external_skia,MinimalOS-AOSP/platform_external_skia,BrokenROM/external_skia,Tesla-Redux/android_external_skia,ench0/external_skia,tmpvar/skia.cc,sigysmund/platform_external_skia,suyouxin/android_external_skia,Hybrid-Rom/external_skia,InfinitiveOS/external_skia,samuelig/skia,AOSPA-L/android_external_skia,temasek/android_external_skia,Pure-Aosp/android_external_skia,Fusion-Rom/android_external_skia,amyvmiwei/skia,vanish87/skia,timduru/platform-external-skia,TeslaProject/external_skia,MinimalOS-AOSP/platform_external_skia,geekboxzone/mmallow_external_skia,invisiblek/android_external_skia,Omegaphora/external_skia,todotodoo/skia,BrokenROM/external_skia,nfxosp/platform_external_skia,PAC-ROM/android_external_skia,Tesla-Redux/android_external_skia,nox/skia,Purity-Lollipop/platform_external_skia,amyvmiwei/skia,AsteroidOS/android_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,YUPlayGodDev/platform_external_skia,ench0/external_skia,chenlian2015/skia_from_google,todotodoo/skia,VRToxin-AOSP/android_external_skia,pacerom/external_skia,geekboxzone/lollipop_external_skia,ctiao/platform-external-skia,OneRom/external_skia,Infinitive-OS/platform_external_skia,UBERMALLOW/external_skia,MarshedOut/android_external_skia,vanish87/skia,HalCanary/skia-hc,noselhq/skia,noselhq/skia,MinimalOS/android_external_skia,pacerom/external_skia,larsbergstrom/skia,chenlian2015/skia_from_google,DARKPOP/external_chromium_org_third_party_skia,Tesla-Redux/android_external_skia,MinimalOS/android_external_skia,Euphoria-OS-Legacy/android_external_skia,MarshedOut/android_external_skia,MonkeyZZZZ/platform_external_skia,AndroidOpenDevelopment/android_external_skia,pcwalton/skia,UBERMALLOW/external_skia,codeaurora-unoffical/platform-external-skia,sudosurootdev/external_skia,MonkeyZZZZ/platform_external_skia,AOSPU/external_chromium_org_third_party_skia,YUPlayGodDev/platform_external_skia,ctiao/platform-external-skia,Omegaphora/external_skia,SlimSaber/android_external_skia,NamelessRom/android_external_skia,PAC-ROM/android_external_skia,codeaurora-unoffical/platform-external-skia,Igalia/skia,CyanogenMod/android_external_chromium_org_third_party_skia,mozilla-b2g/external_skia,w3nd1go/android_external_skia,invisiblek/android_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,vanish87/skia,TeamTwisted/external_skia,ctiao/platform-external-skia,AOSPA-L/android_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,vvuk/skia,qrealka/skia-hc,MinimalOS/external_skia,F-AOSP/platform_external_skia,SlimSaber/android_external_skia,FusionSP/external_chromium_org_third_party_skia,Asteroid-Project/android_external_skia,xzzz9097/android_external_skia,RadonX-ROM/external_skia,AndroidOpenDevelopment/android_external_skia,MIPS/external-chromium_org-third_party-skia,mozilla-b2g/external_skia,invisiblek/android_external_skia,TeamExodus/external_skia,HalCanary/skia-hc,mydongistiny/android_external_skia,noselhq/skia,fire855/android_external_skia,todotodoo/skia,CyanogenMod/android_external_chromium_org_third_party_skia,mydongistiny/external_chromium_org_third_party_skia,geekboxzone/lollipop_external_skia,tmpvar/skia.cc,invisiblek/android_external_skia,sombree/android_external_skia,android-ia/platform_external_skia,akiss77/skia,AOSPB/external_skia,Fusion-Rom/android_external_skia,PAC-ROM/android_external_skia,AOSPU/external_chromium_org_third_party_skia,mozilla-b2g/external_skia,samuelig/skia,Jichao/skia,shahrzadmn/skia,nfxosp/platform_external_skia,Hikari-no-Tenshi/android_external_skia,Fusion-Rom/android_external_skia,Omegaphora/external_skia,AOSPB/external_skia,Asteroid-Project/android_external_skia,noselhq/skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,fire855/android_external_skia,MinimalOS/external_skia,android-ia/platform_external_skia,HalCanary/skia-hc,shahrzadmn/skia,mmatyas/skia,vanish87/skia,pcwalton/skia,android-ia/platform_external_skia,vanish87/skia,todotodoo/skia,TeamBliss-LP/android_external_skia,Plain-Andy/android_platform_external_skia,larsbergstrom/skia,Purity-Lollipop/platform_external_skia,google/skia,DesolationStaging/android_external_skia,YUPlayGodDev/platform_external_skia,nfxosp/platform_external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,NamelessRom/android_external_skia,VentureROM-L/android_external_skia,TeslaProject/external_skia,ominux/skia,byterom/android_external_skia,akiss77/skia,Samsung/skia,aospo/platform_external_skia,Tesla-Redux/android_external_skia,rubenvb/skia,sudosurootdev/external_skia,Infusion-OS/android_external_skia,Purity-Lollipop/platform_external_skia,HealthyHoney/temasek_SKIA,Khaon/android_external_skia,Pure-Aosp/android_external_skia,pcwalton/skia,Hybrid-Rom/external_skia,mozilla-b2g/external_skia,rubenvb/skia,MIPS/external-chromium_org-third_party-skia,zhaochengw/platform_external_skia,RadonX-ROM/external_skia,temasek/android_external_skia,Hybrid-Rom/external_skia,UBERMALLOW/external_skia,ominux/skia,android-ia/platform_external_chromium_org_third_party_skia,noselhq/skia,VRToxin-AOSP/android_external_skia,Android-AOSP/external_skia,Jichao/skia,jtg-gg/skia,TeslaOS/android_external_skia,sombree/android_external_skia,OptiPop/external_skia,MinimalOS/android_external_chromium_org_third_party_skia,TeamTwisted/external_skia,Pure-Aosp/android_external_skia,F-AOSP/platform_external_skia,byterom/android_external_skia,Igalia/skia,geekboxzone/lollipop_external_skia,vvuk/skia,GladeRom/android_external_skia,rubenvb/skia,w3nd1go/android_external_skia,larsbergstrom/skia,Omegaphora/external_chromium_org_third_party_skia,fire855/android_external_skia,BrokenROM/external_skia,OneRom/external_skia,android-ia/platform_external_chromium_org_third_party_skia,ench0/external_chromium_org_third_party_skia,noselhq/skia,SlimSaber/android_external_skia,pcwalton/skia,rubenvb/skia,rubenvb/skia,fire855/android_external_skia,mydongistiny/external_chromium_org_third_party_skia,Jichao/skia,TeamBliss-LP/android_external_skia,MonkeyZZZZ/platform_external_skia,DARKPOP/external_chromium_org_third_party_skia,Igalia/skia,geekboxzone/lollipop_external_skia,qrealka/skia-hc,OneRom/external_skia,ominux/skia,Plain-Andy/android_platform_external_skia,UBERMALLOW/external_skia,aosp-mirror/platform_external_skia,AOSP-YU/platform_external_skia,mmatyas/skia,w3nd1go/android_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,Euphoria-OS-Legacy/android_external_skia,ominux/skia,TeamEOS/external_skia,sudosurootdev/external_skia,Hybrid-Rom/external_skia,Android-AOSP/external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,InfinitiveOS/external_skia,wildermason/external_skia,tmpvar/skia.cc,Purity-Lollipop/platform_external_skia,NamelessRom/android_external_skia,pacerom/external_skia,codeaurora-unoffical/platform-external-skia,mydongistiny/android_external_skia,mydongistiny/external_chromium_org_third_party_skia,qrealka/skia-hc,houst0nn/external_skia,ctiao/platform-external-skia,boulzordev/android_external_skia,ominux/skia,Samsung/skia,Fusion-Rom/android_external_skia,akiss77/skia,sombree/android_external_skia,PAC-ROM/android_external_skia,YUPlayGodDev/platform_external_skia,MinimalOS/external_skia,xin3liang/platform_external_chromium_org_third_party_skia,Android-AOSP/external_skia,pcwalton/skia,Pure-Aosp/android_external_skia,Igalia/skia,suyouxin/android_external_skia,OptiPop/external_chromium_org_third_party_skia,wildermason/external_skia,TeamEOS/external_chromium_org_third_party_skia,larsbergstrom/skia,pacerom/external_skia,Hikari-no-Tenshi/android_external_skia,scroggo/skia,Tesla-Redux/android_external_skia,xzzz9097/android_external_skia,mydongistiny/android_external_skia,MonkeyZZZZ/platform_external_skia,ominux/skia,geekboxzone/mmallow_external_skia,Plain-Andy/android_platform_external_skia,aosp-mirror/platform_external_skia,AndroidOpenDevelopment/android_external_skia,MinimalOS-AOSP/platform_external_skia,mmatyas/skia,HalCanary/skia-hc,TeamBliss-LP/android_external_skia,todotodoo/skia,byterom/android_external_skia,MinimalOS/external_chromium_org_third_party_skia,android-ia/platform_external_chromium_org_third_party_skia,OptiPop/external_skia,wildermason/external_skia,TeslaProject/external_skia,MIPS/external-chromium_org-third_party-skia,TeamExodus/external_skia,aosp-mirror/platform_external_skia,nfxosp/platform_external_skia,shahrzadmn/skia,DesolationStaging/android_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,NamelessRom/android_external_skia,vvuk/skia,houst0nn/external_skia,vvuk/skia,RadonX-ROM/external_skia,sigysmund/platform_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,MinimalOS/external_skia,F-AOSP/platform_external_skia,byterom/android_external_skia,OptiPop/external_skia,VRToxin-AOSP/android_external_skia,zhaochengw/platform_external_skia,Omegaphora/external_skia,zhaochengw/platform_external_skia,mmatyas/skia,mydongistiny/external_chromium_org_third_party_skia,Infinitive-OS/platform_external_skia,ench0/external_skia,MyAOSP/external_chromium_org_third_party_skia,OptiPop/external_skia,scroggo/skia,Samsung/skia,tmpvar/skia.cc,DesolationStaging/android_external_skia,Khaon/android_external_skia,SlimSaber/android_external_skia,Samsung/skia,DiamondLovesYou/skia-sys,chenlian2015/skia_from_google,nvoron23/skia,UBERMALLOW/external_skia,temasek/android_external_skia,amyvmiwei/skia,MarshedOut/android_external_skia,AOSPB/external_skia,ench0/external_chromium_org_third_party_skia,geekboxzone/mmallow_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,MyAOSP/external_chromium_org_third_party_skia,MinimalOS/external_chromium_org_third_party_skia,MonkeyZZZZ/platform_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,aospo/platform_external_skia,MinimalOS-AOSP/platform_external_skia,sudosurootdev/external_skia,akiss77/skia,ominux/skia,android-ia/platform_external_chromium_org_third_party_skia,Fusion-Rom/external_chromium_org_third_party_skia,HealthyHoney/temasek_SKIA,pcwalton/skia,Infusion-OS/android_external_skia,xzzz9097/android_external_skia,FusionSP/external_chromium_org_third_party_skia,MinimalOS/android_external_skia,OneRom/external_skia,rubenvb/skia,jtg-gg/skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,TeamEOS/external_chromium_org_third_party_skia,MinimalOS/external_chromium_org_third_party_skia,MIPS/external-chromium_org-third_party-skia,mydongistiny/external_chromium_org_third_party_skia,AOSPB/external_skia,nvoron23/skia,BrokenROM/external_skia,TeamEOS/external_chromium_org_third_party_skia,codeaurora-unoffical/platform-external-skia,samuelig/skia,scroggo/skia,chenlian2015/skia_from_google,OneRom/external_skia,Infinitive-OS/platform_external_skia,BrokenROM/external_skia,TeslaOS/android_external_skia,AOSPU/external_chromium_org_third_party_skia,Khaon/android_external_skia,DesolationStaging/android_external_skia,FusionSP/external_chromium_org_third_party_skia,HalCanary/skia-hc,HealthyHoney/temasek_SKIA,aosp-mirror/platform_external_skia,AOSPA-L/android_external_skia,Euphoria-OS-Legacy/android_external_skia,mydongistiny/external_chromium_org_third_party_skia,FusionSP/external_chromium_org_third_party_skia,mydongistiny/external_chromium_org_third_party_skia,suyouxin/android_external_skia,Purity-Lollipop/platform_external_skia,nvoron23/skia,akiss77/skia,Euphoria-OS-Legacy/android_external_skia,TeslaOS/android_external_skia,AOSPA-L/android_external_skia,TeslaOS/android_external_skia,FusionSP/external_chromium_org_third_party_skia,Fusion-Rom/android_external_skia,AsteroidOS/android_external_skia,boulzordev/android_external_skia,mozilla-b2g/external_skia,w3nd1go/android_external_skia,PAC-ROM/android_external_skia,Jichao/skia,TeslaProject/external_skia,AsteroidOS/android_external_skia,TeamTwisted/external_skia,VRToxin-AOSP/android_external_skia,HealthyHoney/temasek_SKIA,FusionSP/android_external_skia,nfxosp/platform_external_skia,Plain-Andy/android_platform_external_skia,Omegaphora/external_skia,HealthyHoney/temasek_SKIA,TeslaOS/android_external_skia,tmpvar/skia.cc,InfinitiveOS/external_skia,SlimSaber/android_external_skia,invisiblek/android_external_skia,shahrzadmn/skia,OptiPop/external_chromium_org_third_party_skia,temasek/android_external_skia,jtg-gg/skia,SlimSaber/android_external_skia,FusionSP/external_chromium_org_third_party_skia,byterom/android_external_skia,TeamTwisted/external_skia,nox/skia,ctiao/platform-external-skia,samuelig/skia,MinimalOS/android_external_skia,Plain-Andy/android_platform_external_skia,YUPlayGodDev/platform_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,F-AOSP/platform_external_skia,TeamTwisted/external_skia,GladeRom/android_external_skia,UBERMALLOW/external_skia,VRToxin-AOSP/android_external_skia,AOSPU/external_chromium_org_third_party_skia,TeamTwisted/external_skia,MonkeyZZZZ/platform_external_skia,android-ia/platform_external_chromium_org_third_party_skia,VentureROM-L/android_external_skia,nfxosp/platform_external_skia,HalCanary/skia-hc,TeslaProject/external_skia,MarshedOut/android_external_skia,timduru/platform-external-skia,Purity-Lollipop/platform_external_skia,vvuk/skia,nox/skia,geekboxzone/mmallow_external_skia,nvoron23/skia,Infinitive-OS/platform_external_skia,DiamondLovesYou/skia-sys,tmpvar/skia.cc,TeslaOS/android_external_skia,codeaurora-unoffical/platform-external-skia,GladeRom/android_external_skia,HalCanary/skia-hc,RadonX-ROM/external_skia,samuelig/skia,GladeRom/android_external_skia,MonkeyZZZZ/platform_external_skia,Igalia/skia,MinimalOS-AOSP/platform_external_skia,InfinitiveOS/external_skia,PAC-ROM/android_external_skia,YUPlayGodDev/platform_external_skia,akiss77/skia,TeamEOS/external_chromium_org_third_party_skia,OptiPop/external_chromium_org_third_party_skia,MinimalOS/external_skia,invisiblek/android_external_skia,google/skia,Purity-Lollipop/platform_external_skia,InfinitiveOS/external_skia,xzzz9097/android_external_skia,AOSPB/external_skia,TeamExodus/external_skia,Android-AOSP/external_skia,houst0nn/external_skia,ench0/external_chromium_org_third_party_skia,Jichao/skia,mozilla-b2g/external_skia,MinimalOS/external_chromium_org_third_party_skia,geekboxzone/mmallow_external_skia,Android-AOSP/external_skia,mozilla-b2g/external_skia,Omegaphora/external_skia,sudosurootdev/external_skia,ominux/skia,byterom/android_external_skia,FusionSP/android_external_skia,scroggo/skia,temasek/android_external_skia,google/skia,spezi77/android_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,AndroidOpenDevelopment/android_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,FusionSP/android_external_skia,VentureROM-L/android_external_skia,larsbergstrom/skia | gyp/libjpeg.gyp | gyp/libjpeg.gyp | # Copyright 2012 The Android Open Source Project
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Depend on this wrapper to pick up libjpeg from third_party
{
'targets': [
{
'target_name': 'libjpeg',
'type': 'none',
'dependencies': [
'../third_party/externals/libjpeg/libjpeg.gyp:libjpeg',
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| bsd-3-clause | Python |
|
d4ff515df7e12d26c759adfafcacf82e47da71a1 | Add util | hausdorff/snapchat-fs | snapchat_fs/util.py | snapchat_fs/util.py | #!/usr/bin/env python
"""
util.py provides a set of nice utility functions that support the snapchat_fs pkg
"""
__author__ = "Alex Clemmer, Chad Brubaker"
__copyright__ = "Copyright 2013, Alex Clemmer and Chad Brubaker"
__credits__ = ["Alex Clemmer", "Chad Brubaker"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Alex Clemmer"
__email__ = "[email protected]"
__status__ = "Prototype"
def bold(text):
return '\033[1m%s\033[0m' % text
def green(text):
return '\033[1;32m%s\033[0m' % text
def red(text):
return '\033[1;31m%s\033[0m' % text
| mit | Python |
|
7d9fd2eed72a2a65744259af1bd8580253f282d3 | Create a.py | y-sira/atcoder,y-sira/atcoder | abc067/a.py | abc067/a.py | a, b = map(int, input().split())
if a % 3 == 0 or b % 3 == 0 or (a + b) % 3 == 0:
print('Possible')
else:
print('Impossible')
| mit | Python |
|
c4d5d04a957fed09228995aa7f84ed19c64e3831 | Add previously forgotten afterflight utilities module | foobarbecue/afterflight,foobarbecue/afterflight,foobarbecue/afterflight,foobarbecue/afterflight | af_utils.py | af_utils.py | #Copyright 2013 Aaron Curtis
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import calendar, datetime, re, numpy
def dt2jsts(datetime):
"""
Given a python datetime, convert to javascript timestamp format (milliseconds since Jan 1 1970).
Do so with microsecond precision, and without adding any timezone offset.
"""
return calendar.timegm(datetime.timetuple())*1e3+datetime.microsecond/1e3
def logpath2dt(filepath):
"""
given a dataflashlog in the format produced by Mission Planner,
return a datetime which says when the file was downloaded from the APM
"""
return datetime.datetime.strptime(re.match(r'.*/(.*) .*$',filepath).groups()[0],'%Y-%m-%d %H-%M')
class UTC(datetime.tzinfo):
"""
No timezones are provided in python stdlib (gaargh) so we have to make one here
"""
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return datetime.timedelta(0)
utc=UTC()
def cross(series, cross=0, direction='cross'):
"""
From http://stackoverflow.com/questions/10475488/calculating-crossing-intercept-points-of-a-series-or-dataframe
Given a Series returns all the index values where the data values equal
the 'cross' value.
Direction can be 'rising' (for rising edge), 'falling' (for only falling
edge), or 'cross' for both edges
"""
# Find if values are above or bellow yvalue crossing:
above=series.values > cross
below=numpy.logical_not(above)
left_shifted_above = above[1:]
left_shifted_below = below[1:]
x_crossings = []
# Find indexes on left side of crossing point
if direction == 'rising':
idxs = (left_shifted_above & below[0:-1]).nonzero()[0]
elif direction == 'falling':
idxs = (left_shifted_below & above[0:-1]).nonzero()[0]
else:
rising = left_shifted_above & below[0:-1]
falling = left_shifted_below & above[0:-1]
idxs = (rising | falling).nonzero()[0]
# Calculate x crossings with interpolation using formula for a line:
x1 = series.index.values[idxs]
x2 = series.index.values[idxs+1]
y1 = series.values[idxs]
y2 = series.values[idxs+1]
x_crossings = (cross-y1)*(x2-x1)/(y2-y1) + x1
return x_crossings
| apache-2.0 | Python |
|
229d5b93d6e5474dfcd125536c7744f6a7ec86d0 | Create blender tool | hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR,hlange/LogSoCR | waflib/Tools/blender.py | waflib/Tools/blender.py | #!/usr/bin/env python
# encoding: utf-8
# Michal Proszek, 2014 (poxip)
"""
Detect the version of Blender, path
and install the extension:
def options(opt):
opt.load('blender')
def configure(cnf):
cnf.load('blender')
def build(bld):
bld(name='io_mesh_raw',
feature='blender',
files=['file1.py', 'file2.py']
)
If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
Use ./waf configure --system to set the installation directory to system path
"""
import os
import re
from sys import platform as _platform
from getpass import getuser
from waflib import Utils
from waflib.TaskGen import feature
from waflib.Configure import conf
def options(opt):
opt.add_option(
'-s', '--system',
dest='directory_system',
default=False,
action='store_true',
help='determines installation directory (default: user)'
)
@conf
def find_blender(ctx):
'''Return version number of blender, if not exist return None'''
blender = ctx.find_program('blender')
try:
output = ctx.cmd_and_log(blender + ['--version'])
m = re.search(r'Blender\s*((\d+(\.|))*)', output)
blender_version = m.group(1)
except Exception:
ctx.fatal('Could not retrieve blender version')
ctx.env['BLENDER_VERSION'] = blender_version
return blender
@conf
def configure_paths(ctx):
"""Setup blender paths"""
# Get the username
user = getuser()
# Default: Linux
config_path = {
'user': '/home/%s/.config/blender/' % user,
'system': '/usr/share/blender/'
}
if _platform == 'darwin':
# MAC OS X
config_path['user'] = \
'/Users/%s/Library/Application Support/Blender/' % user
config_path['system'] = '/Library/Application Support/Blender/'
elif _platform == 'win32':
# Windows
appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
config_path['system'] = \
'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
blender_version = ctx.env['BLENDER_VERSION']
config_path['user'] += blender_version + '/'
config_path['system'] += blender_version + '/'
ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
if ctx.options.directory_system:
ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
)
Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
def configure(ctx):
ctx.find_blender()
ctx.configure_paths()
@feature('blender_list')
def blender(self):
# Two ways to install a blender extension: as a module or just .py files
dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
Utils.check_dir(dest_dir)
self.bld.install_files(
dest_dir,
getattr(self, 'files', '.')
)
| agpl-3.0 | Python |
|
f68b0bb1e1f10b10e58057f60e17377f027690f8 | add a util function for ungzip. | kilfu0701/Wedding-QRcode-Web,kilfu0701/Wedding-QRcode-Web,kilfu0701/Wedding-QRcode-Web,kilfu0701/Wedding-QRcode-Web | web/my_util/compress.py | web/my_util/compress.py | import gzip
from StringIO import StringIO
def ungzip(resp):
if resp.info().get('Content-Encoding') == 'gzip':
buf = StringIO(resp.read())
f = gzip.GzipFile(fileobj=buf)
data = f.read()
return data
else:
return resp.read()
| mit | Python |
|
eab925debf62d4ba180b1a114841b7a4f0fe8e76 | add basic command line interface | BenDoan/domjudge-utils | basicInterface.py | basicInterface.py | import sys
import requests
import random
passchars = map(chr,range(ord('a'),ord('z')+1) + range(ord('A'),ord('Z')+1) + range(ord('0'),ord('9')+1) )
class User():
def __init__(self,name,username,bracket):
self.name = name
self.username = username
self.bracket = bracket
self.password = self.gen_password(8)
def gen_password(self,length):
return "".join([random.choice(passchars) for i in range(length)])
def to_csv(self):
return "{},{},{},{}\n".format(self.name,self.username,self.bracket,self.password)
def to_printable(self):
return self.to_readable(32)
def to_readable(self,width):
return (
"""{: ^{width}}
{: ^{width}}
{: ^{width}}""").format(
"{} <{}>".format(self.name,self.bracket),
"username: {}".format(self.username),
"password: {}".format(self.password),width=width)
def to_comm(self,token):
return {"name":self.name,"username":self.username,"password":self.password,"token":token,"bracket":self.bracket}
def to_verify(self):
return (
"""NAME : {}
USERNAME : {}
BRACKET : {}""").format(self.name,self.username,self.bracket)
def read_name(userin,userout):
userout.write("WHAT... is your name? ")
name = userin.readline().strip()
if name == "" :
userout.write("HEY!, you must have some sort of name.\n")
return read_name(userin,userout)
return name
def read_username(userin,userout):
userout.write("WHAT... is your preferred username? ")
username = userin.readline().strip()
if username == "" :
userout.write("Nothing is an unacceptable username\n")
return read_username(userin,userout)
return username
def read_bracket(userin,userout):
userout.write("WHAT... is your bracket? [1400/1620/open] ")
bracket = userin.readline().strip().lower()
if bracket not in ["1400","1620","open"] :
userout.write("Your bracket must be one of 1400, 1620, or open\n")
return read_bracket(userin,userout)
return bracket
def verify(user,userin,userout,first=True):
if first :
userout.write("\n{}\ndoes this look correct? [y/N] ".format(user.to_verify()))
else :
userout.write("\n{}\nis everything correct now? [y/N] ".format(user.to_verify()))
if userin.readline().strip().lower().startswith("y"):
return user
thingmap = {"The name":("name",read_name),"The username":("username",read_username),"The bracket":("bracket",read_bracket)}
thinglist = [ x for x in thingmap ]
for x in range(len(thinglist)) :
userout.write("{}) {}\n".format(x+1,thinglist[x]))
val = len(thinglist)+1
userout.write("{}) Nevermind, nothing was wrong.\n".format(val))
num = numchoose(val,userin,userout)
if num == val :
userout.write("Okay.\n")
return user
tup = thingmap[thinglist[num-1]]
user.__dict__[tup[0]] = tup[1](userin,userout)
return verify(user,userin,userout,False)
def numchoose(maxnum,userin,userout):
userout.write("choose the number of what was incorrect: ")
inval = 0
try :
inval = int(userin.readline().strip())
except ValueError :
userout.write("hey, that was not an integer!\n")
return numchoose(maxnum,userin,userout)
if inval > maxnum or inval < 1 :
userout.write("that was not a valid choice\n")
return numchoose(maxnum,userin,userout)
return inval
def finalize(user,userin,userout,url,token) :
user = verify(user,userin,userout)
result = requests.post(url,data=user.to_comm(token))
if result.text != "Success" :
if "Duplicate" in result.text:
userout.write("someone already has that username, please choose a different one.\n")
user.username = read_username(userin,userout)
else :
userout.write("the server did not like your data, here is what it said:\n{}".format(result.text))
return finalize(user,userin,userout,url,token)
return user
def interface(userin,userout,printout,logout,url,token):
while True :
userout.write("STOP! who would enter the contest must answer me these questions three, ere contest site he see.\n")
name = read_name(userin,userout)
username = read_username(userin,userout)
bracket = read_bracket(userin,userout)
user = User(name,username,bracket)
user = finalize(user,userin,userout,url,token)
printout.write(user.to_printable())
printout.write("\n\n\n\n")
logout.write(user.to_csv())
interface(sys.stdin,sys.stdout,sys.stdout,open("interface.log","wa"),"http://bdo.pw:5000/user/add","acmsecret")
| unlicense | Python |
|
f66b799a22f2c74b88f867266c2e51eda1377b1c | Create find_the_mine.py | Kunalpod/codewars,Kunalpod/codewars | find_the_mine.py | find_the_mine.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Find the Mine!
#Problem level: 6 kyu
def mineLocation(field):
for i in range(len(field)):
for j in range(len(field)):
if field[i][j]==1: return [i,j]
| mit | Python |
|
febc735e79f3cc1b5f2e5fe2882bf28c458f638a | Initialize init file | tranlyvu/find-link,tranlyvu/findLink | wikilink/db/__init__.py | wikilink/db/__init__.py | """
wikilink
~~~~~~~~
wiki-link is a web-scraping application to find minimum number
of links between two given wiki pages.
:copyright: (c) 2016 - 2018 by Tran Ly VU. All Rights Reserved.
:license: Apache License 2.0.
"""
__all__ = ["db", "base", "page", "link"]
__author__ = "Tran Ly Vu ([email protected])"
__version__ = "1.2.0"
__copyright__ = "Copyright (c) 2016 - 2019 Tran Ly Vu. All Rights Reserved."
__license__ = "Apache License 2.0"
| apache-2.0 | Python |
|
3fdb673977de57e5555eafb18e36544f3ea8c056 | Solve the absurd problem with an absurd file | stefanv/selective-inference,selective-inference/selective-inference,selective-inference/selective-inference,stefanv/selective-inference,selective-inference/selective-inference,stefanv/selective-inference,stefanv/selective-inference,selective-inference/selective-inference | selection/absurd.py | selection/absurd.py | import kmeans
import numpy as np
kmeans = reload(kmeans)
n_sample = 100
p_array = []
for i in range(n_sample):
if i%10 == 0:
print i, " / ", n_sample
kmeans = reload(kmeans)
p = kmeans.f(10)
p_array.append(p)
import matplotlib.pyplot as plt
p_array = sorted(p_array)
x = np.arange(0, 1, 1./len(p_array));
plt.plot(x, p_array, 'ro')
| bsd-3-clause | Python |
|
c2bce27530f9997bffcb04f80a8d78db65ff98b2 | Create GPS.py | mecax/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,mecax/pyrobotlab | home/kmcgerald/GPS.py | home/kmcgerald/GPS.py | from time import sleep
# The geofence and measure distance methods should be available in MRL > 1.0.86
gps1 = Runtime.start("gps1", "GPS")
gps1.connect("/dev/tty.palmOneGPS-GPSSerialOut")
sleep(1)
# define some points ...
# Lets use Nova Labs 1.0
lat1 = 38.950829
lon1 = -77.339502
# and Nova Labs 2.0
lat2 = 38.954471
lon2 = -77.338271
# and the nearest Metro station
lat3 = 38.947254
lon3 = -77.337844
# and the Sand Trap out back
lat4 = 38.954844
lon4 = -77.338797
def input():
startingAngle = 0
Latitude = msg_gps1_publishGGAData.data[0][2]
Longitude = msg_gps1_publishGGAData.data[0][4]
altitude = msg_gps1_publishGGAData.data[0][9]
print "Lat: " + Latitude
print "Long: " + Longitude
print "Alt: " + altitude + "\n"
#have python listening to lidar
gps1.addListener("publishGGAData", python.name, "input")
print "Ready to receive Data from GPS..."
print "Let's put a 100 meter GeoFence around around Nova Labs 2.0"
# create a point based geofence with a 100m radius
geofence = gps1.setPointGeoFence(lat2, lon2, 100)
distance = gps1.calculateDistance(lon1, lat1, lon2, lat2)
# check if a GPS point is inside the fence
if (gps1.checkInside(geofence, lat1, lon1)):
print "Inside the Fence"
else:
print "Outside the Fence"
print "Distance (meters): ",distance," between Nova Labs 1.0 and Nova Labs 2.0\n"
distance = gps1.calculateDistance(lon2, lat2, lon3, lat3)
# check if a GPS point is inside the fence
if (gps1.checkInside(geofence, lat3, lon3)):
print "Inside the Fence"
else:
print "Outside the Fence"
print "Distance (meters): ",distance, " between NL 2 and the nearest Metro Station\n"
distance = gps1.calculateDistance(lon2, lat2, lon4, lat4)
# check if a GPS point is inside the fence
if (gps1.checkInside(geofence, lat4, lon4)):
print "Inside the Fence"
else:
print "Outside the Fence"
print "Distance (meters): ",distance, "between NL 2 and the nearest sand trap\n"
| apache-2.0 | Python |
|
ddc80392b17a3fadcbea09f82ea5f6936f0fd459 | add fbcode_builder_config for mvfst build in oss | ReactiveSocket/reactivesocket-cpp,phoad/rsocket-cpp,phoad/rsocket-cpp,phoad/rsocket-cpp,rsocket/rsocket-cpp,phoad/rsocket-cpp,ReactiveSocket/reactivesocket-cpp,ReactiveSocket/reactivesocket-cpp,phoad/rsocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp,rsocket/rsocket-cpp | build/fbcode_builder/specs/mvfst.py | build/fbcode_builder/specs/mvfst.py | #!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
def fbcode_builder_spec(builder):
return {
'depends_on': [folly, fizz],
'steps': [
builder.fb_github_cmake_install(
'mvfst/build',
github_org='facebookincubator',
),
],
}
| unknown | Python |
|
84098985420d56d9db375531afb5083e7c7f0d08 | Add an example using pygame (pygameLiveView.py). Connects to camera and show LiveView images via X or console (fbcon/svglib/etc). | Bloodevil/sony_camera_api,mungewell/sony_camera_api,Bloodevil/sony_camera_api | src/example/pygameLiveView.py | src/example/pygameLiveView.py |
from pysony import SonyAPI, common_header, payload_header
import argparse
import binascii
import io
import pygame
import os
# Global Variables
options = None
incoming_image = None
frame_sequence = None
frame_info = None
frame_data = None
done = False
parser = argparse.ArgumentParser(prog="pygameLiveView")
# General Options
parser.set_defaults(debug=None, file=None, width=None, height=None)
parser.add_argument("-l", "--large", action="store_true", dest="large", help="Use HighRes liveview (if available)" )
parser.add_argument("-i", "--info", action="store_true", dest="info", help="Enable LiveFrameInfo (if available)" )
parser.add_argument("-z", "--zoom", action="store_true", dest="zoom", help="Zoom image to fill screen" )
options = parser.parse_args()
# Connect and set-up camera
camera = SonyAPI()
#camera = SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
# Check if we need to do 'startRecMode'
mode = camera.getAvailableApiList()
# Need a better method to check for the presence of a camera
if type(mode) != dict:
print "No camera found, aborting"
quit()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
if 'setLiveviewFrameInfo' in (mode['result'])[0]:
if options.info:
camera.setLiveviewFrameInfo([{"frameInfo": True}])
else:
camera.setLiveviewFrameInfo([{"frameInfo": False}])
if 'getAvailableLiveviewSize' in (mode['result'])[0]:
if options.large and len((camera.getAvailableLiveviewSize()['result'])[0]) > 1:
incoming = camera.liveview(["L"])
else:
incoming = camera.liveview()
else:
incoming = camera.liveview()
# Use PyGame to display images full screen
disp_no = os.getenv("DISPLAY")
found = False
if disp_no:
pygame.display.init()
found = True
else:
drivers = ['directfb', 'fbcon', 'svgalib', 'dga', 'ggi', 'vgl', 'aalib']
for driver in drivers:
if not os.getenv('SDL_VIDEODRIVER'):
os.putenv('SDL_VIDEODRIVER', driver)
try:
pygame.display.init()
except pygame.error:
print 'Driver: {0} failed.'.format(driver)
continue
found = True
break
if not found:
raise Exception('No suitable video driver found!')
infoObject = pygame.display.Info()
screen = pygame.display.set_mode((infoObject.current_w, infoObject.current_h))
# Loop forever, or until user quits or presses a key
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.KEYDOWN:
done = True
# read next image
data = incoming.read(8)
common = common_header(data)
data = incoming.read(128)
if common['payload_type']==1:
payload = payload_header(data)
image_file = io.BytesIO(incoming.read(payload['jpeg_data_size']))
incoming_image = pygame.image.load(image_file)
if options.zoom:
incoming_image = pygame.transform.scale(incoming_image, \
(infoObject.current_w, infoObject.current_h))
incoming.read(payload['padding_size'])
elif common['payload_type']==2:
frame_info = payload_header(data, 2)
if frame_info['jpeg_data_size']:
frame_sequence = common['sequence_number']
frame_data = incoming.read(frame_info['jpeg_data_size'])
incoming.read(frame_info['padding_size'])
# copy image to the display
if incoming_image:
screen.fill((0,0,0))
screen.blit(incoming_image,(0,0))
if frame_info and frame_sequence >= common['sequence_number']-1 \
and payload['jpeg_data_size']:
(left, top, width, height) = incoming_image.get_rect()
left = int(binascii.hexlify(frame_data[0:2]), 16) * width / 10000
top = int(binascii.hexlify(frame_data[2:4]), 16) * height / 10000
right = int(binascii.hexlify(frame_data[4:6]), 16) * width / 10000
bottom = int(binascii.hexlify(frame_data[6:8]), 16) * height / 10000
pygame.draw.lines(screen, 0xffffff, True, \
[(left, top), (right, top), (right, bottom), (left, bottom)], 2)
pygame.display.flip()
| mit | Python |
|
20da50a3c6cee33caf2205562d0d05be6c6721fb | Create enforce_posting_limits.py | kprdev/reddit-mod-posting-limits | enforce_posting_limits.py | enforce_posting_limits.py | #!/usr/bin/python
import sys
import time
import logging
import praw
def main():
# SET THESE - reddit application configuration
user_agent = ''
client_id = ''
client_secret = ''
username = ''
password = ''
# SET THESE - Customize these for your subreddit.
subreddit_name = ''
post_limit_count = 2
post_limit_hours = 4
# Adjustable, but you shouldn't have to touch these.
max_new_submissions = 25
loop_delay = 119 # seconds
logging.basicConfig(
format='%(asctime)s %(levelname)s %(message)s',
level=logging.INFO
)
logging.info('Watching subreddit: %s', subreddit_name)
logging.info('Current limit set to %d posts in %d hours',
post_limit_count, post_limit_hours)
reddit = praw.Reddit(user_agent=user_agent,
client_id=client_id,
client_secret=client_secret,
username=username,
password=password)
subreddit = reddit.subreddit(subreddit_name)
# Initial search range will start 10m ago.
last_new_post_time = time.time() - (60*10)
# The loop
running = True
while running:
submissions = subreddit.new(limit=max_new_submissions)
new_submissions = []
for submission in submissions:
# New submissions will come in newest first.
# Save the ones newer than last_new_post_time.
if submission.created_utc > last_new_post_time:
new_submissions.append(submission)
logging.debug("New submission count is %d", len(new_submissions))
if len(new_submissions) > 0:
new_submissions.reverse()
# Now they should be oldest first.
for submission in new_submissions:
stamp = time.strftime("%a, %d %b %Y %H:%M:%S %Z",
time.gmtime(submission.created_utc))
logging.info('New post "%s" by "%s" at %s',
submission.title, submission.author.name, stamp)
check_user_submissions(subreddit, submission, post_limit_hours,
post_limit_count)
last_new_post_time = submission.created_utc
time.sleep(loop_delay)
def check_user_submissions(subreddit, submission, limit_hours, limit_posts):
start_time = submission.created_utc - (limit_hours * 60 * 60)
# Exclude the current post from the range check since reddit sometimes
# doesn't include it (cache?). We will add it in manually later.
stop_time = submission.created_utc - 1
username = submission.author.name
params = "author:'" + username + "'"
user_submissions = list(subreddit.submissions(start_time, stop_time, params))
# Count includes the post excluded earlier
count = len(user_submissions) + 1
logging.info('User "%s" post count is %d in the last %d hours.',
username, count, limit_hours)
if count > limit_posts:
logging.info('Removing the post')
try:
subreddit.mod.remove(submission)
except Exception as e:
# If the login user isn't permitted to remove posts, don't stop
print (e)
else:
msg_link = "/message/compose/?to=/"+subreddit._path
reply_text = (
"Your submission was automatically removed because you have "
"exceeded **{}** submissions within the last **{}** hours.\n\n"
"*I am a bot, and this action was performed automatically. "
"Please [contact the moderators of this subreddit]"
"("+msg_link+") if you have questions or "
"concerns.*").format(limit_posts, limit_hours)
submission.reply(reply_text)
if __name__ == '__main__':
main()
| mit | Python |
|
d8ff61b72c07a9f0b22e5cbaefe6277bf2697afc | Create project.py | gwsilva/project-surgery | project_surgery/project.py | project_surgery/project.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Gideoni Silva (Omnes)
# Copyright 2013-2014 Omnes Tecnologia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, osv, fields
class project(orm.Model):
_inherit = "project.project"
_columns = {
'doctor_id': fields.many2one(
'res.partner', 'Doctor',
domain = "[('is_company','=',False)]",
required=True,change_default=True, select=True, track_visibility='always'
),
'patient_id': fields.many2one(
'res.partner', 'Patient',
domain = "[('is_company','=',False)]",
required=True,change_default=True, select=True, track_visibility='always'
),
'hospital_id': fields.many2one(
'res.partner', 'Hospital',
domain = "[('is_company','=',True)]",
required=True,change_default=True, select=True, track_visibility='always'),
'box_ids': fields.many2many(
'stock.tracking','project_stock_track_rel','project_id','stock_tracking_id',
string='Used Surgical Boxes ',
help="Selecione as Caixas Cirúrgicas para a Cirurgia"
)
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
|
afb62cebced6bcbbbde2576be2d9b9d4b9ad3964 | add chisquare test comparing random sample with cdf (first try of commit) | scipy/scipy-svn,jasonmccampbell/scipy-refactor,scipy/scipy-svn,scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt | scipy/stats/tests/test_discrete_chisquare.py | scipy/stats/tests/test_discrete_chisquare.py |
import numpy as np
from scipy import stats
debug = False
def check_discrete_chisquare(distname, arg, alpha = 0.01):
'''perform chisquare test for random sample of a discrete distribution
Parameters
----------
distname : string
name of distribution function
arg : sequence
parameters of distribution
alpha : float
significance level, threshold for p-value
Returns
-------
result : bool
0 if test passes, 1 if test fails
uses global variable debug for printing results
'''
# define parameters for test
n=50000
nsupp = 20
wsupp = 1.0/nsupp
distfn = getattr(stats, distname)
rvs = distfn.rvs(size=n,*arg)
# construct intervals with minimum mass 1/nsupp
# intervalls are left-half-open as in a cdf difference
distsupport = xrange(max(distfn.a, -1000), min(distfn.b, 1000) + 1)
last = 0
distsupp = [max(distfn.a, -1000)]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii,*arg)
if current - last >= wsupp-1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1-wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1-last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp+1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
freq,hsupp = np.histogram(rvs,histsupp,new=True)
cdfs = distfn.cdf(distsupp,*arg)
(chis,pval) = stats.chisquare(np.array(freq),n*distmass)
# print and return results
if debug:
print 'chis,pval:', chis, pval
print 'len(distsupp), len(distmass), len(hsupp), len(freq)'
print len(distsupp), len(distmass), len(hsupp), len(freq)
print 'distsupp', distsupp
print 'distmass', n*np.array(distmass)
print 'freq', freq
print 'itemfreq', stats.itemfreq(rvs)
print 'n*pmf', n*distfn.pmf(list(distsupport)[:10],*arg)
assert (pval > alpha), 'chisquare - test for %s' \
'at arg = %s' % (distname,str(arg))
def test_discrete_rvs_cdf():
distdiscrete = [
['bernoulli',(0.3,)],
['binom', (5, 0.4)],
['boltzmann',(1.4, 19)],
['dlaplace', (0.8,)],
['geom', (0.5,)],
['hypergeom',(30, 12, 6)],
['logser', (0.6,)],
['nbinom', (5, 0.5)],
['planck', (4.1,)],
['poisson', (0.6,)],
['randint', (7, 31)],
['zipf', (2,)] ]
for distname, arg in distdiscrete:
if debug:
print distname
yield check_discrete_chisquare, distname, arg
if __name__ == '__main__':
import nose
nose.run(argv=['', __file__])
| bsd-3-clause | Python |
|
e87982d03edeb7c16d3c183309adfff4be50d168 | Add Qt4 file to start on creating a Qt-based GUI | CodingAnarchy/Amon | gui/qt.py | gui/qt.py | from lib.version import AMON_VERSION
from lib.keybase import KeybaseUser
from lib.gmail import GmailUser
from lib.addresses import AddressBook
import lib.gpg as gpg
import sys
import logging
import json
from PyQt4 import QtGui
class Amon(QtGui.QMainWindow):
def __init__(self):
super(Amon, self).__init__()
self.keybase_user = KeybaseUser()
self.gmail = GmailUser()
self.address_book = AddressBook()
| unlicense | Python |
|
3972c4a16894732db418a2d04f36b5104e0fac86 | add rms code in own namespace | bartscheers/tkp,transientskp/tkp,mkuiack/tkp,bartscheers/tkp,transientskp/tkp,mkuiack/tkp | tkp/quality/rms.py | tkp/quality/rms.py | from tkp.utility import nice_format
def rms_invalid(rms, noise, low_bound=1, high_bound=50):
"""
Is the RMS value of an image too high?
:param rms: RMS value of an image, can be computed with
tkp.quality.statistics.rms
:param noise: Theoretical noise level of instrument, can be calculated with
tkp.lofar.noise.noise_level
:param low_bound: multiplied with noise to define lower threshold
:param high_bound: multiplied with noise to define upper threshold
:returns: True/False
"""
if (rms < noise * low_bound) or (rms > noise * high_bound):
ratio = rms / noise
return "rms value (%s) is %s times theoretical noise (%s)" % \
(nice_format(rms), nice_format(ratio), nice_format(noise))
else:
return False | bsd-2-clause | Python |
|
5009b158f0c47ea885ba5fdcbd76dd1fc2bb6986 | Use a script to post metrics to an ingest endpoint. | shintasmith/blueflood,rackerlabs/blueflood,rackerlabs/blueflood,rackerlabs/blueflood,shintasmith/blueflood,shintasmith/blueflood,shintasmith/blueflood,rackerlabs/blueflood,rackerlabs/blueflood,shintasmith/blueflood,shintasmith/blueflood,rackerlabs/blueflood | bfclient.py | bfclient.py | #!/usr/bin/env python
import argparse
from os import environ
import datetime
import requests
import time
def get_unix_time(dt):
return int(time.mktime(dt.timetuple()))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
BF_URL = environ.get('BF_URL', None)
BF_TOKEN = environ.get('BF_TOKEN', None)
parser.add_argument('--debug', action='store_true',
help='Display additional info.')
parser.add_argument('--url', type=str, action='store', default=BF_URL,
help='The endpoint to send HTTP requests to.')
parser.add_argument('--token', type=str, action='store', default=BF_TOKEN,
help='The authentication token of the account making '
'the request')
subs = parser.add_subparsers(help='subparsers?', dest='command')
ingest_sub = subs.add_parser('ingest', help='Send metrics to blueflood.')
ingest_sub.add_argument('tenant')
ingest_sub.add_argument(metavar='metric-name', dest='metric_name')
ingest_sub.add_argument('unit',
choices=('minutes', 'hours', 'days', 'months',
'years', 'decades'))
ingest_sub.add_argument('value', type=int)
ingest_sub.add_argument('--ttl-seconds', type=int, default=172800)
ingest_sub.add_argument('--collection-time')
args = parser.parse_args()
print('args: {}'.format(args))
if args.command == 'ingest':
base_url = args.url
if not base_url:
print('Error: No url specified.')
exit(1)
tenant = args.tenant
metric_name = args.metric_name
unit = args.unit
value = args.value
ttl_seconds = args.ttl_seconds
collection_time = args.collection_time
if collection_time is None:
collection_time = datetime.datetime.now()
url = '{}/v2.0/{}/ingest/multi'.format(base_url, tenant)
payload = [{
'tenantId': str(tenant),
'metricName': metric_name,
'unit': unit,
'metricValue': value,
'ttlInSeconds': ttl_seconds,
'collectionTime': get_unix_time(collection_time) * 1000
}]
request = requests.Request('POST', url, json=payload)
if args.token:
request.headers['X-Auth-Token'] = args.token
preq = request.prepare()
if args.debug:
print('Sending:')
print(' {} {}'.format(preq.method, preq.path_url))
for name, value in preq.headers.iteritems():
print(' {}: {}'.format(name, value))
if preq.body:
print('')
print(' {}'.format(preq.body))
print('')
session = requests.session()
response = session.send(preq)
if args.debug:
print('')
print('Received:')
print(' {} {}'.format(response.status_code, response.reason))
for name, value in response.headers.iteritems():
print(' {}: {}'.format(name, value))
print('')
if response.text:
print(' {}'.format(response.text))
success = 200 <= response.status_code < 300
print(response.text)
exit(0 if success else 1)
# print(payload_dict)
# exit(0)
else:
print('Unknown command "{}"'.format(args.command))
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.