commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
a93c281e126f41d9ac388ec2dafd829eed2ea6b1
|
add coverage flags
|
miroi/fortran_input_reader
|
test.py
|
test.py
|
import os
import sys
import subprocess
import shlex
import shutil
import sys
import time
import datetime
HERE = os.path.abspath(os.path.dirname(__file__))
# ------------------------------------------------------------------------------
def exe(command):
"""
Executes command and returns string representations of stdout and stderr captured from the console.
When universal_newlines=True stdout and stderr are opened in text mode.
Otherwise, they are opened in binary mode. In that case captured stdout and stderr
are not strings and Python 3 throws type error when compared against strings later in tests.
Note:
This feature is only available if Python is built with universal newline support (the default).
Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the
communicate() method.
See https://docs.python.org/2/library/subprocess.html
"""
stdout, stderr = subprocess.Popen(shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True).communicate()
if stderr:
sys.stderr.write(stderr)
return stdout, stderr
# ------------------------------------------------------------------------------
def configure_build_and_exe(name, setup_command):
stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
os.chdir(os.path.join(HERE, name))
if sys.platform == 'win32':
setup_command += ' --generator="MinGW Makefiles"'
setup_command += ' build-%s' % stamp
stdout, stderr = exe(setup_command)
os.chdir(os.path.join(HERE, name, 'build-%s' % stamp))
if sys.platform == 'win32':
stdout, stderr = exe('mingw32-make')
shutil.copy('..\src\example\EXAMPLE.INP', 'bin\EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('example.exe')
else:
stdout, stderr = exe('make')
shutil.copy('../src/example/EXAMPLE.INP', 'bin/EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('./example')
return stdout, stderr
# ------------------------------------------------------------------------------
def test_examples():
stdout, stderr = configure_build_and_exe('', 'python setup.py --fc=gfortran --coverage')
assert 'Fortran Input Reader OK!' in stdout
|
import os
import sys
import subprocess
import shlex
import shutil
import sys
import time
import datetime
HERE = os.path.abspath(os.path.dirname(__file__))
# ------------------------------------------------------------------------------
def exe(command):
"""
Executes command and returns string representations of stdout and stderr captured from the console.
When universal_newlines=True stdout and stderr are opened in text mode.
Otherwise, they are opened in binary mode. In that case captured stdout and stderr
are not strings and Python 3 throws type error when compared against strings later in tests.
Note:
This feature is only available if Python is built with universal newline support (the default).
Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the
communicate() method.
See https://docs.python.org/2/library/subprocess.html
"""
stdout, stderr = subprocess.Popen(shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True).communicate()
if stderr:
sys.stderr.write(stderr)
return stdout, stderr
# ------------------------------------------------------------------------------
def configure_build_and_exe(name, setup_command):
stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
os.chdir(os.path.join(HERE, name))
if sys.platform == 'win32':
setup_command += ' --generator="MinGW Makefiles"'
setup_command += ' build-%s' % stamp
stdout, stderr = exe(setup_command)
os.chdir(os.path.join(HERE, name, 'build-%s' % stamp))
if sys.platform == 'win32':
stdout, stderr = exe('mingw32-make')
shutil.copy('..\src\example\EXAMPLE.INP', 'bin\EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('example.exe')
else:
stdout, stderr = exe('make')
shutil.copy('../src/example/EXAMPLE.INP', 'bin/EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('./example')
return stdout, stderr
# ------------------------------------------------------------------------------
def test_examples():
stdout, stderr = configure_build_and_exe('', 'python setup.py')
assert 'Fortran Input Reader OK!' in stdout
|
mit
|
Python
|
c04d010366009eb49f94960ddbdaedbb5850dd98
|
Fix typo in test.py
|
miedzinski/steamodd,Lagg/steamodd
|
test.py
|
test.py
|
import steam, sys
valid_modes = ["bp", "schema", "assets-catalog"]
try:
testmode = sys.argv[2]
testkey = sys.argv[1]
if testmode not in valid_modes: raise Exception
except:
sys.stderr.write("Run " + sys.argv[0] + " <apikey> " + "<" + ", ".join(valid_modes) + ">\n")
raise SystemExit
steam.set_api_key(testkey)
test_schema = steam.tf2.item_schema(lang = "en")
def print_item_list(items):
for item in items:
print("\n\x1b[1m" + str(item) + "\x1b[0m\n")
for attr in item:
print attr
if testmode == "bp":
test_pack = steam.tf2.backpack("stragglerastic", schema = test_schema)
print_item_list(test_pack)
elif testmode == "schema":
print_item_list(test_schema)
elif testmode == "assets-catalog":
assets = steam.tf2.assets(currency = "usd")
for item in test_schema:
try:
print("\x1b[1m" + str(item) + "\x1b[0m:\t $" + str(assets[item]))
except KeyError:
pass
|
import steam, sys
valid_modes = ["bp", "schema", "assets-catalog"]
try:
testmode = sys.argv[2]
testkey = sys.argv[1]
if testmode not in valid_modes: raise Exception
except:
sys.stderr.write("Run " + sys.argv[0] + " <apikey> " + "<" + ", ".join(valid_modes) + ">\n")
raise SystemExit
steam.set_api_key(testkey)
test_schema = steam.tf2.item_schema(lang = "en")
def print_item_list(items):
for item in items:
print("\n\x1b[1m" + str(item) + "\x1b[0m\n")
for attr in item:
print attr
if testmode == "bp":
test_pack = steam.tf2.backpack("stragglerastic", schema = test_schema)
print_item_list(test_pack)
elif testmode == "schema":
print_item_list(test_chema)
elif testmode == "assets-catalog":
assets = steam.tf2.assets(currency = "usd")
for item in test_schema:
try:
print("\x1b[1m" + str(item) + "\x1b[0m:\t $" + str(assets[item]))
except KeyError:
pass
|
isc
|
Python
|
4893105835a8acf4ee19a96c6fefce45f08ec08f
|
fix some
|
zhyq0826/test-lab,zhyq0826/test-lab
|
test.py
|
test.py
|
from __future__ import print_function
import logging
from logging import StreamHandler
from memory_profiler import profile
logger = logging.getLogger()
logger.addHandler(StreamHandler())
logger.setLevel(logging.DEBUG)
def glow_pyconf_ppt():
import requests
for i in range(1, 24):
s = requests.get('http://boolan.com/Courses/a4e31043-f830-49ff-8488-0801a84dcc0c/slide%s.jpg'%i)
f = open("/home/zhyq/slide%s.jpg"%i,"w")
f.write(s.content)
#@profile
def test_reference_cycle():
import gc
import time
MAX = 10
LIFE = 10
class A(object):
def __init__(self, b=None):
self.b = b
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('abcdefgaaaaaaa')
class B(object):
def __init__(self, a=None):
self.a = a
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('gfedcbaaaaaaaa')
logger.debug('start generate object')
a = A()
b = B()
logger.debug('object exists last %ss'%LIFE)
time.sleep(LIFE)
#a.b = b
#b.a = a
logger.debug('delete a, b')
a.l = None
b.l = None
a = None
b = None
del a
del b
logger.debug('a and b is deleted')
logger.debug('wait %s to collect'%LIFE)
time.sleep(LIFE)
gc.collect()
logger.debug('collect is finished')
time.sleep(LIFE)
def test_inspect():
import inspect
def hello(route, objid, val=10, *args, **kwargs):
import time
time.sleep(100)
print('hello world')
print(inspect.getargspec(hello))
#print(inspect.formatargspec())
print(inspect.getcallargs(hello, 'paper', 'id'))
if __name__ == '__main__':
test_reference_cycle()
|
from __future__ import print_function
import logging
from logging import StreamHandler
from memory_profiler import profile
logger = logging.getLogger()
logger.addHandler(StreamHandler())
logger.setLevel(logging.DEBUG)
def glow_pyconf_ppt():
import requests
for i in range(1, 24):
s = requests.get('http://boolan.com/Courses/a4e31043-f830-49ff-8488-0801a84dcc0c/slide%s.jpg'%i)
f = open("/home/zhyq/slide%s.jpg"%i,"w")
f.write(s.content)
@profile
def test_reference_cycle():
import gc
import time
MAX = 10000
LIFE = 15
class A(object):
def __init__(self, b=None):
self.b = b
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('abcdefgaaaaaaa')
class B(object):
def __init__(self, a=None):
self.a = a
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('gfedcbaaaaaaaa')
logger.debug('start generate object')
a = A()
b = B()
logger.debug('object exists last %ss'%LIFE)
time.sleep(LIFE)
#a.b = b
#b.a = a
logger.debug('delete a, b')
a = None
b = None
logger.debug('a and b is deleted')
time.sleep(LIFE)
if __name__ == '__main__':
test_reference_cycle()
|
mit
|
Python
|
e057b586e2dc43ff367cb1ed6fc5bbb7dbfe514c
|
print flask
|
flenter/django-exploration
|
test.py
|
test.py
|
import flask
print flask
|
import flask
|
mit
|
Python
|
848751ca2906a5e1e8e5ccf3828bf13994b074fe
|
Update test script
|
raviqqe/shakyo
|
test.py
|
test.py
|
import xorcise
try:
console = xorcise.turn_on_console()
console.erase()
line = xorcise.Line()
console.print_line(0, line)
line = xorcise.Line(
xorcise.Character("h", xorcise.ColorAttribute.get_best_match((0, 0, 0))),
xorcise.Character("e", xorcise.ColorAttribute.get_best_match((0, 0, 255))),
xorcise.Character("l", xorcise.ColorAttribute.get_best_match((255, 0, 0))),
xorcise.Character("l", xorcise.ColorAttribute.get_best_match((0, 255, 0))),
xorcise.Character("o", xorcise.ColorAttribute.get_best_match((255, 255, 0))),
xorcise.Character(","),
xorcise.Character("\t", xorcise.ColorAttribute.get_best_match((0, 255, 255))
| xorcise.RenditionAttribute.reverse),
xorcise.Character("w", xorcise.ColorAttribute.get_best_match((255, 0, 255))),
xorcise.Character("o", xorcise.ColorAttribute.get_best_match((255, 255, 255))),
xorcise.Character("r"),
xorcise.Character("l"),
xorcise.Character("d"),
xorcise.Character("!", xorcise.ColorAttribute.get_best_match((255, 255, 255))
| xorcise.RenditionAttribute.underline),
)
console.print_line(1, line)
console.print_line(2, xorcise.Line(
xorcise.Character("\t"),
xorcise.Character(" "),
xorcise.Character("A", xorcise.ColorAttribute.get_best_match((0, 255, 255)))))
console.refresh()
console.get_char()
with open("debug.log", "w") as f:
for char in line:
f.write(char.value)
f.write("\n")
for char in line.normalized:
f.write(char.value)
finally:
xorcise.turn_off_console()
|
import xorcise
try:
console = xorcise.turn_on_console()
console.erase()
line = xorcise.Line()
console.print_line(0, line)
line = xorcise.Line(
xorcise.Character("h", xorcise.ColorAttribute.black),
xorcise.Character("e", xorcise.ColorAttribute.blue),
xorcise.Character("l", xorcise.ColorAttribute.red),
xorcise.Character("l", xorcise.ColorAttribute.green),
xorcise.Character("o", xorcise.ColorAttribute.yellow),
xorcise.Character(","),
xorcise.Character("\t", xorcise.ColorAttribute.cyan
| xorcise.RenditionAttribute.reverse),
xorcise.Character("w", xorcise.ColorAttribute.magenta),
xorcise.Character("o", xorcise.ColorAttribute.white),
xorcise.Character("r"),
xorcise.Character("l", xorcise.ColorAttribute.white),
xorcise.Character("d"),
xorcise.Character("!", xorcise.ColorAttribute.white
| xorcise.RenditionAttribute.underline),
)
console.print_line(1, line)
console.print_line(2, xorcise.Line(
xorcise.Character("\t"),
xorcise.Character(" "),
xorcise.Character("A", xorcise.ColorAttribute.cyan)))
console.refresh()
console.get_char()
with open("debug.log", "w") as f:
for char in line:
f.write(char.value)
f.write("\n")
for char in line.normalized:
f.write(char.value)
finally:
xorcise.turn_off_console()
|
unlicense
|
Python
|
35b2028ed09f64442092bdcb617c80acd1741948
|
Fix for ticket #18
|
RavenB/modoboa,modoboa/modoboa,mehulsbhatt/modoboa,carragom/modoboa,carragom/modoboa,bearstech/modoboa,tonioo/modoboa,mehulsbhatt/modoboa,mehulsbhatt/modoboa,modoboa/modoboa,tonioo/modoboa,bearstech/modoboa,carragom/modoboa,RavenB/modoboa,modoboa/modoboa,bearstech/modoboa,tonioo/modoboa,modoboa/modoboa,bearstech/modoboa,RavenB/modoboa
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
from mailng.extensions import loadextensions, loadmenus
loadextensions()
urlpatterns = patterns('',
# Example:
# (r'^mailng/', include('mailng.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
(r'^mailng/admin/', include('mailng.admin.urls')),
(r'^mailng/main/', include('mailng.main.urls')),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog',
{'packages': ('mailng',),})
)
menus = loadmenus()
if menus != ():
urlpatterns += patterns('', menus)
if settings.DEBUG:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/home/tonio/projets/mailng/static'})
)
|
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
from mailng.extensions import loadextensions, loadmenus
loadextensions()
urlpatterns = patterns('',
# Example:
# (r'^mailng/', include('mailng.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
(r'^mailng/admin/', include('mailng.admin.urls')),
(r'^mailng/main/', include('mailng.main.urls')),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog',
{'packages': ('mailng',),})
)
urlpatterns += patterns('', loadmenus())
if settings.DEBUG:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/home/tonio/projets/mailng/static'})
)
|
isc
|
Python
|
a5357056bda5daf741a5096f88c50dc93bfff1b7
|
fix typo
|
sigurdga/nidarholm,sigurdga/nidarholm,sigurdga/nidarholm
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^forum/', include('nidarholm.forum.urls.debate')),
(r'^news/', include('nidarholm.news.urls.story')),
(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^forum/', include('nidarholm.forum.urls.debate')),
(r'^news/', include('nidarholm.news.urls.story'))
(r'^admin/', include(admin.site.urls)),
)
|
agpl-3.0
|
Python
|
fbba73e772e5055dce81dd2a3f8814011733f882
|
Add ajax_select lookup url.
|
umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator
|
urls.py
|
urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
## Author: Adriano Monteiro Marques <[email protected]>
## Author: Diogo Pinheiro <[email protected]>
##
## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from django.conf.urls.defaults import *
from django.contrib import admin
from ajax_select import urls as ajax_select_urls
admin.autodiscover()
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns('',
('^_ah/warmup$', 'djangoappengine.views.warmup'),
(r'', include('gui.urls')),
(r'', include('geoip.urls')),
(r'^map/$', 'gui.views.map'),
(r'^realtimebox/$', 'gui.views.realtimebox'),
(r'^events/(?P<event_id>\d+)/$', 'gui.views.event'),
(r'^twitter/', include('twitter.urls')),
(r'^accounts/', include('registration.urls')),
(r'^notification/', include('notificationsystem.urls')),
(r'^api/', include('api.urls')),
(r'^decision/', include('decision.urls')),
(r'^agents/', include('agents.urls')),
(r'^ajax/lookups/', include(ajax_select_urls)),
(r'^admin/', include(admin.site.urls)),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
## Author: Adriano Monteiro Marques <[email protected]>
## Author: Diogo Pinheiro <[email protected]>
##
## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns('',
('^_ah/warmup$', 'djangoappengine.views.warmup'),
(r'', include('gui.urls')),
(r'', include('geoip.urls')),
(r'^map/$', 'gui.views.map'),
(r'^realtimebox/$', 'gui.views.realtimebox'),
(r'^events/(?P<event_id>\d+)/$', 'gui.views.event'),
(r'^twitter/', include('twitter.urls')),
(r'^accounts/', include('registration.urls')),
(r'^notification/', include('notificationsystem.urls')),
(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls')),
(r'^decision/', include('decision.urls')),
(r'^agents/', include('agents.urls')),
)
|
agpl-3.0
|
Python
|
3ac72f0a9f83988584cee89896eaeb5c6f06d6b0
|
Fix `previous_float` in util.py
|
rstebbing/bspline-regression
|
util.py
|
util.py
|
# util.py
# Imports
import re
# raise_if_not_shape
def raise_if_not_shape(name, A, shape):
"""Raise a `ValueError` if the np.ndarray `A` does not have dimensions
`shape`."""
if A.shape != shape:
raise ValueError('{}.shape != {}'.format(name, shape))
# previous_float
PARSE_FLOAT_RE = re.compile(r'([+-]*)0x1\.([\da-f]{13})p(.*)')
def previous_float(x):
"""Return the next closest float (towards zero)."""
s, f, e = PARSE_FLOAT_RE.match(float(x).hex().lower()).groups()
f, e = int(f, 16), int(e)
if f > 0:
f -= 1
else:
f = int('f' * 13, 16)
e -= 1
return float.fromhex('{}0x1.{:013x}p{:d}'.format(s, f, e))
|
# util.py
# Imports
import re
# raise_if_not_shape
def raise_if_not_shape(name, A, shape):
"""Raise a `ValueError` if the np.ndarray `A` does not have dimensions
`shape`."""
if A.shape != shape:
raise ValueError('{}.shape != {}'.format(name, shape))
# previous_float
PARSE_FLOAT_RE = re.compile(r'([+-]*)0x1\.([\d]{13})p(.*)')
def previous_float(x):
"""Return the next closest float (towards zero)."""
s, f, e = PARSE_FLOAT_RE.match(float(x).hex()).groups()
f, e = int(f, 16), int(e)
if f > 0:
f -= 1
else:
f = int('f' * 13, 16)
e -= 1
return float.fromhex('{}0x1.{:013x}p{:d}'.format(s, f, e))
|
mit
|
Python
|
7e3f28329d887229345fa0e8085ca7e09fe7686e
|
Improve wsgi.py
|
Cinemair/cinemair-server,Cinemair/cinemair-server
|
wsgi.py
|
wsgi.py
|
# -*- coding: utf-8 -*-
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
application = get_wsgi_application()
|
mit
|
Python
|
8947f6f7733593ec2b701aaa0b6fb98d973b7850
|
Add pull to wsgi startup process, and /uptime route to the app
|
chbrown/divvy-history
|
wsgi.py
|
wsgi.py
|
import os
import sys
import time
import datetime
from apscheduler.scheduler import Scheduler
from bottle import Bottle, mako_view
# sys.path is a global for this python thread, so this enables local imports throughout the app
sys.path.insert(0, '.')
from fetch import fetch
from settings import datadir
from sync import sync, get_dir
from logger import logger
logger.debug('Initializing with datadir from github.')
get_dir(datadir)
schedule = Scheduler()
schedule.start()
schedule.add_interval_job(fetch, minutes=1)
schedule.add_interval_job(sync, hours=6)
utc_now = datetime.datetime.utcnow()
logger.debug('Scheduler initialized. UTC=%s', utc_now.isoformat())
application = Bottle()
application_started = time.time()
def linecount(filepath):
lines = -1
with open(filepath, 'rb') as fp:
for lines, _ in enumerate(fp):
pass
return lines
@application.route('/')
@mako_view('index.mako')
def index():
filenames = sorted(os.listdir(datadir))
filepaths = [os.path.join(datadir, filename) for filename in filenames]
files = [dict(name=filepath, lines=linecount(filepath)) for filepath in sorted(filepaths)]
return dict(files=files)
@application.route('/fetch')
def get_fetch():
started = time.time()
fetch()
ended = time.time()
return 'Fetch done. Took %0.3f seconds.' % (ended - started)
@application.route('/sync')
def get_sync():
started = time.time()
sync()
ended = time.time()
return 'Sync done. Took %0.3f seconds.' % (ended - started)
@application.route('/uptime')
def get_uptime():
return 'Uptime: %0.3f seconds.' % (time.time() - application_started)
|
import os
import sys
import time
import datetime
from apscheduler.scheduler import Scheduler
from bottle import Bottle, mako_view
# sys.path is a global for this python thread, so this enables local imports throughout the app
sys.path.insert(0, '.')
from fetch import fetch
from settings import datadir
from sync import sync
from logger import logger
schedule = Scheduler()
schedule.start()
schedule.add_interval_job(fetch, minutes=1)
schedule.add_interval_job(sync, hours=6)
# schedule.add_interval_job(sync, minutes=5)
now = datetime.datetime.utcnow()
logger.debug('Scheduler initialized. UTC=%s', now.isoformat())
application = Bottle()
def linecount(filepath):
lines = -1
with open(filepath, 'rb') as fp:
for lines, _ in enumerate(fp):
pass
return lines
@application.route('/')
@mako_view('index.mako')
def index():
filenames = sorted(os.listdir(datadir))
filepaths = [os.path.join(datadir, filename) for filename in filenames]
files = [dict(name=filepath, lines=linecount(filepath)) for filepath in sorted(filepaths)]
return dict(files=files)
@application.route('/fetch')
def get_fetch():
started = time.time()
fetch()
ended = time.time()
return 'Fetch done. Took %0.2f seconds.' % (ended - started)
@application.route('/sync')
def get_sync():
started = time.time()
sync()
ended = time.time()
return 'Sync done. Took %0.2f seconds.' % (ended - started)
|
mit
|
Python
|
0f570e5a0f33583dbc419be5d6d71ce9c804e131
|
Upgrade comments
|
ryu22e/django_template,ryu22e/django_template
|
wsgi.py
|
wsgi.py
|
"""
WSGI config for {{ project_name }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.production")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
application = Cling(MediaCling(get_wsgi_application()))
|
"""
WSGI config for {{ project_name }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.production")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
application = Cling(MediaCling(get_wsgi_application()))
|
mit
|
Python
|
1e362ab8704c76e3606fae9317dd85eeb06259ea
|
remove superflous block size
|
marshki/pyWipe,marshki/pyWipe
|
zero.py
|
zero.py
|
#!/bin/py
import os
count = 1
def zeroToDrive():
''' write zeros to drive '''
wipes = 1
for int in range(count):
os.system(("dd if=/dev/zero |pv --progress --timer --rate --bytes| dd of=/dev/null bs=4096"))
# os.system(os.system(("dd if=/dev/zero| pv -ptrb | dd of=/dev/null bs=4096"))
wipes+=1
zeroToDrive()
|
#!/bin/py
import os
count = 1
def zeroToDrive():
''' write zeros to drive '''
wipes = 1
for int in range(count):
os.system(("dd if=/dev/zero bs=4096 | pv --progress --timer --rate --bytes| dd of=/dev/null bs=4096"))
# os.system(os.system(("dd if=/dev/zero bs=4096 | pv -ptrb | dd of=/dev/null bs=4096"))
wipes+=1
zeroToDrive()
|
mit
|
Python
|
425de26b5a7041905862519c727491dd8ec17173
|
Add root tag.
|
hanjoes/anivis,hanjoes/anivis,hanjoes/anivis
|
api/gen_detail_file.py
|
api/gen_detail_file.py
|
import timeit
import gevent
import math
from gevent.queue import Queue
from detail import AnimeDetail
from gen_id_file import IDS_FILENAME
from gevent import monkey; monkey.patch_socket()
DETAILS_FILENAME = 'animes.xml'
BATCH_SIZE = 10
WORKER_NUM = 8
MAXIMUM_WORKER_NUM = 8
NAMES_FOR_WORKER = ['Joe', 'Adam', 'Matt', 'Bob', 'Sam', 'Mary', 'Jack', 'Peter']
FILE_SUFFIX = '_batch.xml'
# stores tuple like (start, end)
tasks = Queue()
def worker(name, work):
with open(name + FILE_SUFFIX, 'w') as f:
f.write('<root>')
gevent.sleep(0)
ad = AnimeDetail()
while not tasks.empty():
task = tasks.get()
request = '/'.join([id.strip() for id in work[task[0]:task[1]]])
print name + ' woke up doing work.. ' + request
batch_data = ad.fetch_details(request)
f.write(batch_data)
f.write('</root>')
def boss(name, work):
print name + ' woke up...'
count = 0
for i in range(int(math.ceil(float(len(work)) / BATCH_SIZE))):
start = i * BATCH_SIZE
end = min((i + 1) * BATCH_SIZE, len(work))
tasks.put((start, end))
count += 1
print 'Work has been divided into ' + str(count) + ' batches.'
def process(list, num_workers):
# make sure worker num doesn't exceeds limitation
num_workers = min(num_workers, MAXIMUM_WORKER_NUM)
# boss starts
gevent.spawn(boss, 'Terence', work).join()
# workers start
gevent.joinall([gevent.spawn(worker, NAMES_FOR_WORKER[i], work) for i in range(num_workers)])
if __name__ == '__main__':
# put all details into string
ad = AnimeDetail()
work = []
try:
with open(IDS_FILENAME, 'r') as idsf:
work = [id for id in idsf]
except IOError as e:
print 'Please run gen_id_file.py first.'
start = timeit.default_timer()
process(work, WORKER_NUM)
stop = timeit.default_timer()
print 'It took ' + str(stop - start) + 's to run ' + str(len(work)) + ' queries.'
|
import timeit
import gevent
import math
from gevent.queue import Queue
from detail import AnimeDetail
from gen_id_file import IDS_FILENAME
from gevent import monkey; monkey.patch_socket()
DETAILS_FILENAME = 'animes.xml'
BATCH_SIZE = 10
WORKER_NUM = 8
MAXIMUM_WORKER_NUM = 8
NAMES_FOR_WORKER = ['Joe', 'Adam', 'Matt', 'Bob', 'Sam', 'Mary', 'Jack', 'Peter']
FILE_SUFFIX = '_batch.xml'
# stores tuple like (start, end)
tasks = Queue()
def worker(name, work):
with open(name + FILE_SUFFIX, 'w') as f:
gevent.sleep(0)
ad = AnimeDetail()
while not tasks.empty():
task = tasks.get()
request = '/'.join([id.strip() for id in work[task[0]:task[1]]])
print name + ' woke up doing work.. ' + request
batch_data = ad.fetch_details(request)
f.write(batch_data)
def boss(name, work):
print name + ' woke up...'
count = 0
for i in range(int(math.ceil(float(len(work)) / BATCH_SIZE))):
start = i * BATCH_SIZE
end = min((i + 1) * BATCH_SIZE, len(work))
tasks.put((start, end))
count += 1
print 'Work has been divided into ' + str(count) + ' batches.'
def process(list, num_workers):
# make sure worker num doesn't exceeds limitation
num_workers = min(num_workers, MAXIMUM_WORKER_NUM)
# boss starts
gevent.spawn(boss, 'Terence', work).join()
# workers start
gevent.joinall([gevent.spawn(worker, NAMES_FOR_WORKER[i], work) for i in range(num_workers)])
if __name__ == '__main__':
# put all details into string
ad = AnimeDetail()
detail = ''
work = []
try:
with open(IDS_FILENAME, 'r') as idsf:
work = [id for id in idsf]
except IOError as e:
print 'Please run gen_id_file.py first.'
start = timeit.default_timer()
process(work, WORKER_NUM)
stop = timeit.default_timer()
print 'It took ' + str(stop - start) + 's to run ' + str(len(work)) + ' queries.'
|
mit
|
Python
|
5a1cb6408397e3dd8eed227f0adcea924de7b0e1
|
enable new checkin report
|
jlutz777/FreeStore,jlutz777/FreeStore,jlutz777/FreeStore
|
reporting/utils.py
|
reporting/utils.py
|
"""
Utility functions for reports
"""
from .reports import FamilyTotalOverTimeReport, DependentsTotalOverTimeReport
from .reports import FamilyCheckoutsPerWeekReport, DependentCheckoutsPerWeekReport
from .reports import EmptyFamilyCheckoutsPerWeekReport, FamilyCheckInsPerWeekReport
from .reports import ItemsPerCategoryPerMonthReport, IndividualsByAgeReport
from .reports import FamiliesPerZipReport, CheckoutFrequencyPerMonthReport
from .reports import VolunteersHoursWorkedReport
availableReports = {}
availableReports[1] = FamilyTotalOverTimeReport
availableReports[2] = DependentsTotalOverTimeReport
availableReports[3] = FamilyCheckoutsPerWeekReport
availableReports[4] = EmptyFamilyCheckoutsPerWeekReport
availableReports[5] = FamilyCheckInsPerWeekReport
availableReports[6] = DependentCheckoutsPerWeekReport
availableReports[7] = ItemsPerCategoryPerMonthReport
availableReports[8] = IndividualsByAgeReport
availableReports[9] = FamiliesPerZipReport
availableReports[10] = CheckoutFrequencyPerMonthReport
availableReports[11] = VolunteersHoursWorkedReport
def determineAndCreateReport(report_num, startDate='', endDate=''):
"""Determine the report"""
return availableReports[report_num](startDate, endDate)
|
"""
Utility functions for reports
"""
from .reports import FamilyTotalOverTimeReport, DependentsTotalOverTimeReport
from .reports import FamilyCheckoutsPerWeekReport, DependentCheckoutsPerWeekReport
from .reports import EmptyFamilyCheckoutsPerWeekReport
from .reports import ItemsPerCategoryPerMonthReport, IndividualsByAgeReport
from .reports import FamiliesPerZipReport, CheckoutFrequencyPerMonthReport
from .reports import VolunteersHoursWorkedReport
availableReports = {}
availableReports[1] = FamilyTotalOverTimeReport
availableReports[2] = DependentsTotalOverTimeReport
availableReports[3] = FamilyCheckoutsPerWeekReport
availableReports[4] = EmptyFamilyCheckoutsPerWeekReport
availableReports[5] = DependentCheckoutsPerWeekReport
availableReports[6] = ItemsPerCategoryPerMonthReport
availableReports[7] = IndividualsByAgeReport
availableReports[8] = FamiliesPerZipReport
availableReports[9] = CheckoutFrequencyPerMonthReport
availableReports[10] = VolunteersHoursWorkedReport
def determineAndCreateReport(report_num, startDate='', endDate=''):
"""Determine the report"""
return availableReports[report_num](startDate, endDate)
|
mit
|
Python
|
0c1b0a7787bd6824815ae208edab8f208b53af09
|
Add comment to override of status code
|
Ghalko/osf.io,billyhunt/osf.io,wearpants/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,GageGaskins/osf.io,njantrania/osf.io,mluke93/osf.io,baylee-d/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,wearpants/osf.io,kwierman/osf.io,leb2dg/osf.io,leb2dg/osf.io,chennan47/osf.io,arpitar/osf.io,leb2dg/osf.io,erinspace/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,petermalcolm/osf.io,adlius/osf.io,abought/osf.io,icereval/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,felliott/osf.io,Nesiehr/osf.io,felliott/osf.io,SSJohns/osf.io,baylee-d/osf.io,KAsante95/osf.io,billyhunt/osf.io,sbt9uc/osf.io,mluke93/osf.io,mluo613/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,samanehsan/osf.io,mluke93/osf.io,crcresearch/osf.io,ticklemepierce/osf.io,petermalcolm/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,amyshi188/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,billyhunt/osf.io,arpitar/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,samchrisinger/osf.io,emetsger/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,alexschiller/osf.io,doublebits/osf.io,abought/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,SSJohns/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,sbt9uc/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,Nesiehr/osf.io,binoculars/osf.io,kwierman/osf.io,emetsger/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,zamattiac/osf.io,crcresearch/osf.io,felliott/osf.io,doublebits/osf.io,rdhyee/osf.io,leb2dg/osf.io,alexschiller/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,acshi/osf.io,njantrania/osf.io,billyhunt/osf.io,SSJohns/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,cslzchen/osf.io,zamattiac/osf.io,emetsger/osf.io,abought/osf.io,mluke93/osf.io,RomanZWang/osf.io,danielneis/osf.io,acshi/osf.io,danielneis/osf.io,erinspace/osf.io,samchrisinger/osf.io,wearpants/osf.io,aaxelb/osf.io,rdhyee/osf.io,doublebits/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,KAsante95/osf.io,GageGaskins/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,hmoco/osf.io,icereval/osf.io,danielneis/osf.io,TomHeatwole/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,SSJohns/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,njantrania/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,jnayak1/osf.io,billyhunt/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,mluo613/osf.io,chennan47/osf.io,binoculars/osf.io,hmoco/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,samanehsan/osf.io,caneruguz/osf.io,kch8qx/osf.io,doublebits/osf.io,samanehsan/osf.io,acshi/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,sloria/osf.io,KAsante95/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,petermalcolm/osf.io,wearpants/osf.io,emetsger/osf.io,mluo613/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,chrisseto/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,brandonPurvis/osf.io,adlius/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,sloria/osf.io,monikagrabowska/osf.io,cosenal/osf.io,doublebits/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,KAsante95/osf.io,saradbowman/osf.io,haoyuchen1992/osf.io,mfraezz/osf.io,kch8qx/osf.io,crcresearch/osf.io,jnayak1/osf.io,abought/osf.io,cosenal/osf.io,cslzchen/osf.io,icereval/osf.io,monikagrabowska/osf.io,mluo613/osf.io,mfraezz/osf.io,danielneis/osf.io,sloria/osf.io,jnayak1/osf.io,acshi/osf.io,caseyrygt/osf.io,mluo613/osf.io,cwisecarver/osf.io,cosenal/osf.io,hmoco/osf.io,amyshi188/osf.io,acshi/osf.io,TomHeatwole/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,aaxelb/osf.io,adlius/osf.io,KAsante95/osf.io,TomBaxter/osf.io,mattclark/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,arpitar/osf.io,binoculars/osf.io,arpitar/osf.io,brianjgeiger/osf.io,cosenal/osf.io,mfraezz/osf.io,alexschiller/osf.io,baylee-d/osf.io,chrisseto/osf.io,GageGaskins/osf.io,mattclark/osf.io,zachjanicki/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,samchrisinger/osf.io,brianjgeiger/osf.io,adlius/osf.io,cslzchen/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,erinspace/osf.io,amyshi188/osf.io,cwisecarver/osf.io,felliott/osf.io,mattclark/osf.io,cslzchen/osf.io
|
api/base/exceptions.py
|
api/base/exceptions.py
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
# Returns 401 instead of 403 during unauthorized requests without having user to log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
apache-2.0
|
Python
|
0bd4224bfa737942930bfa1e1a67ecfdf25e70e7
|
Apply WidgetAdapter to CheckboxSelectMultiple.
|
gasman/wagtail,zerolab/wagtail,jnns/wagtail,rsalmaso/wagtail,mixxorz/wagtail,thenewguy/wagtail,mixxorz/wagtail,rsalmaso/wagtail,torchbox/wagtail,zerolab/wagtail,wagtail/wagtail,thenewguy/wagtail,mixxorz/wagtail,torchbox/wagtail,gasman/wagtail,wagtail/wagtail,rsalmaso/wagtail,wagtail/wagtail,jnns/wagtail,wagtail/wagtail,mixxorz/wagtail,torchbox/wagtail,thenewguy/wagtail,zerolab/wagtail,rsalmaso/wagtail,jnns/wagtail,thenewguy/wagtail,mixxorz/wagtail,wagtail/wagtail,torchbox/wagtail,jnns/wagtail,gasman/wagtail,zerolab/wagtail,zerolab/wagtail,thenewguy/wagtail,gasman/wagtail,gasman/wagtail,rsalmaso/wagtail
|
wagtail/core/widget_adapters.py
|
wagtail/core/widget_adapters.py
|
"""
Register Telepath adapters for core Django form widgets, so that they can
have corresponding Javascript objects with the ability to render new instances
and extract field values.
"""
from django import forms
from django.utils.functional import cached_property
from wagtail.admin.staticfiles import versioned_static
from wagtail.core.telepath import Adapter, register
class WidgetAdapter(Adapter):
js_constructor = 'wagtail.widgets.Widget'
def js_args(self, widget):
return [
widget.render('__NAME__', None, attrs={'id': '__ID__'}),
widget.id_for_label('__ID__'),
]
def get_media(self, widget):
media = super().get_media(widget)
return media + widget.media
@cached_property
def media(self):
return forms.Media(js=[
versioned_static('wagtailadmin/js/telepath/widgets.js'),
])
register(WidgetAdapter(), forms.widgets.Input)
register(WidgetAdapter(), forms.Textarea)
register(WidgetAdapter(), forms.Select)
register(WidgetAdapter(), forms.CheckboxSelectMultiple)
class RadioSelectAdapter(WidgetAdapter):
js_constructor = 'wagtail.widgets.RadioSelect'
register(RadioSelectAdapter(), forms.RadioSelect)
|
"""
Register Telepath adapters for core Django form widgets, so that they can
have corresponding Javascript objects with the ability to render new instances
and extract field values.
"""
from django import forms
from django.utils.functional import cached_property
from wagtail.admin.staticfiles import versioned_static
from wagtail.core.telepath import Adapter, register
class WidgetAdapter(Adapter):
js_constructor = 'wagtail.widgets.Widget'
def js_args(self, widget):
return [
widget.render('__NAME__', None, attrs={'id': '__ID__'}),
widget.id_for_label('__ID__'),
]
def get_media(self, widget):
media = super().get_media(widget)
return media + widget.media
@cached_property
def media(self):
return forms.Media(js=[
versioned_static('wagtailadmin/js/telepath/widgets.js'),
])
register(WidgetAdapter(), forms.widgets.Input)
register(WidgetAdapter(), forms.Textarea)
register(WidgetAdapter(), forms.Select)
class RadioSelectAdapter(WidgetAdapter):
js_constructor = 'wagtail.widgets.RadioSelect'
register(RadioSelectAdapter(), forms.RadioSelect)
|
bsd-3-clause
|
Python
|
8b38f24fbdee425fde52d33b2154812d10c9d3db
|
Bump version
|
marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator,marteinn/wagtail-alt-generator
|
wagtailaltgenerator/__init__.py
|
wagtailaltgenerator/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
wagtailaltgenerator
----------
Insert image description and tags with the help of computer vision
"""
__title__ = "wagtailaltgenerator"
__version__ = "4.1.1"
__build__ = 411
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2016-2019 Martin Sandström"
default_app_config = "wagtailaltgenerator.apps.AltGeneratorAppConfig"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
wagtailaltgenerator
----------
Insert image description and tags with the help of computer vision
"""
__title__ = "wagtailaltgenerator"
__version__ = "4.1.0"
__build__ = 410
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2016-2018 Martin Sandström"
default_app_config = "wagtailaltgenerator.apps.AltGeneratorAppConfig"
|
mit
|
Python
|
a155e8654a95969abc2290d4198622991d6cb00e
|
Remove duplicate entry for vikidia and gutenberg in burundi boxes
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
ideascube/conf/idb_bdi.py
|
ideascube/conf/idb_bdi.py
|
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
|
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
|
agpl-3.0
|
Python
|
923bcc555c6a039df649d1c2725cc67ad8c79660
|
Make CMS errors JSON-able if requested via AJAX
|
nagyistoce/edx-platform,fly19890211/edx-platform,xinjiguaike/edx-platform,nagyistoce/edx-platform,sameetb-cuelogic/edx-platform-test,zubair-arbi/edx-platform,romain-li/edx-platform,DNFcode/edx-platform,martynovp/edx-platform,torchingloom/edx-platform,don-github/edx-platform,RPI-OPENEDX/edx-platform,chauhanhardik/populo,praveen-pal/edx-platform,jswope00/griffinx,xinjiguaike/edx-platform,sameetb-cuelogic/edx-platform-test,EduPepperPDTesting/pepper2013-testing,antoviaque/edx-platform,teltek/edx-platform,zadgroup/edx-platform,rationalAgent/edx-platform-custom,wwj718/ANALYSE,amir-qayyum-khan/edx-platform,tanmaykm/edx-platform,hkawasaki/kawasaki-aio8-1,jjmiranda/edx-platform,eduNEXT/edunext-platform,playm2mboy/edx-platform,mcgachey/edx-platform,alu042/edx-platform,pepeportela/edx-platform,IndonesiaX/edx-platform,doismellburning/edx-platform,xinjiguaike/edx-platform,TeachAtTUM/edx-platform,JioEducation/edx-platform,morenopc/edx-platform,don-github/edx-platform,jswope00/griffinx,mcgachey/edx-platform,nikolas/edx-platform,xuxiao19910803/edx,olexiim/edx-platform,nanolearning/edx-platform,BehavioralInsightsTeam/edx-platform,10clouds/edx-platform,zofuthan/edx-platform,simbs/edx-platform,mahendra-r/edx-platform,EDUlib/edx-platform,leansoft/edx-platform,don-github/edx-platform,yokose-ks/edx-platform,dsajkl/123,praveen-pal/edx-platform,cselis86/edx-platform,motion2015/edx-platform,auferack08/edx-platform,ahmadio/edx-platform,halvertoluke/edx-platform,cselis86/edx-platform,knehez/edx-platform,morpheby/levelup-by,devs1991/test_edx_docmode,edx-solutions/edx-platform,LearnEra/LearnEraPlaftform,knehez/edx-platform,arbrandes/edx-platform,kamalx/edx-platform,mcgachey/edx-platform,arifsetiawan/edx-platform,nttks/edx-platform,bigdatauniversity/edx-platform,bitifirefly/edx-platform,utecuy/edx-platform,waheedahmed/edx-platform,ESOedX/edx-platform,ak2703/edx-platform,auferack08/edx-platform,CredoReference/edx-platform,bdero/edx-platform,ahmadiga/min_edx,CourseTalk/edx-platform,TsinghuaX/edx-platform,SivilTaram/edx-platform,franosincic/edx-platform,zubair-arbi/edx-platform,shashank971/edx-platform,pku9104038/edx-platform,xingyepei/edx-platform,louyihua/edx-platform,nanolearningllc/edx-platform-cypress-2,valtech-mooc/edx-platform,kamalx/edx-platform,angelapper/edx-platform,4eek/edx-platform,cpennington/edx-platform,vasyarv/edx-platform,ahmadiga/min_edx,mitocw/edx-platform,ovnicraft/edx-platform,chauhanhardik/populo,nanolearningllc/edx-platform-cypress,adoosii/edx-platform,yokose-ks/edx-platform,AkA84/edx-platform,arbrandes/edx-platform,eestay/edx-platform,kamalx/edx-platform,abdoosh00/edx-rtl-final,jjmiranda/edx-platform,ubc/edx-platform,wwj718/edx-platform,WatanabeYasumasa/edx-platform,jbzdak/edx-platform,Semi-global/edx-platform,arifsetiawan/edx-platform,vismartltd/edx-platform,10clouds/edx-platform,deepsrijit1105/edx-platform,olexiim/edx-platform,Edraak/edx-platform,AkA84/edx-platform,zofuthan/edx-platform,kursitet/edx-platform,ahmadiga/min_edx,SivilTaram/edx-platform,eemirtekin/edx-platform,inares/edx-platform,shubhdev/edx-platform,lduarte1991/edx-platform,benpatterson/edx-platform,cselis86/edx-platform,J861449197/edx-platform,edx/edx-platform,Livit/Livit.Learn.EdX,Unow/edx-platform,hkawasaki/kawasaki-aio8-2,kmoocdev2/edx-platform,ahmadiga/min_edx,Endika/edx-platform,miptliot/edx-platform,romain-li/edx-platform,UXE/local-edx,fly19890211/edx-platform,ferabra/edx-platform,vasyarv/edx-platform,Softmotions/edx-platform,nanolearning/edx-platform,adoosii/edx-platform,LICEF/edx-platform,vismartltd/edx-platform,chrisndodge/edx-platform,mjg2203/edx-platform-seas,playm2mboy/edx-platform,eestay/edx-platform,chauhanhardik/populo_2,antonve/s4-project-mooc,jazztpt/edx-platform,franosincic/edx-platform,beacloudgenius/edx-platform,zerobatu/edx-platform,ubc/edx-platform,Shrhawk/edx-platform,chauhanhardik/populo_2,benpatterson/edx-platform,sudheerchintala/LearnEraPlatForm,edry/edx-platform,ferabra/edx-platform,polimediaupv/edx-platform,hamzehd/edx-platform,kursitet/edx-platform,eestay/edx-platform,msegado/edx-platform,tiagochiavericosta/edx-platform,solashirai/edx-platform,CourseTalk/edx-platform,morenopc/edx-platform,jonathan-beard/edx-platform,ferabra/edx-platform,itsjeyd/edx-platform,RPI-OPENEDX/edx-platform,Livit/Livit.Learn.EdX,procangroup/edx-platform,fintech-circle/edx-platform,rue89-tech/edx-platform,hastexo/edx-platform,pomegranited/edx-platform,Livit/Livit.Learn.EdX,msegado/edx-platform,Shrhawk/edx-platform,teltek/edx-platform,nttks/jenkins-test,CredoReference/edx-platform,sudheerchintala/LearnEraPlatForm,kalebhartje/schoolboost,Kalyzee/edx-platform,chand3040/cloud_that,atsolakid/edx-platform,UXE/local-edx,playm2mboy/edx-platform,hkawasaki/kawasaki-aio8-1,beni55/edx-platform,nanolearning/edx-platform,JCBarahona/edX,nagyistoce/edx-platform,LICEF/edx-platform,pabloborrego93/edx-platform,philanthropy-u/edx-platform,vasyarv/edx-platform,vikas1885/test1,ovnicraft/edx-platform,OmarIthawi/edx-platform,defance/edx-platform,jruiperezv/ANALYSE,Shrhawk/edx-platform,MakeHer/edx-platform,pelikanchik/edx-platform,zubair-arbi/edx-platform,ZLLab-Mooc/edx-platform,cecep-edu/edx-platform,doganov/edx-platform,jazkarta/edx-platform-for-isc,marcore/edx-platform,chand3040/cloud_that,Edraak/edraak-platform,raccoongang/edx-platform,J861449197/edx-platform,torchingloom/edx-platform,B-MOOC/edx-platform,LearnEra/LearnEraPlaftform,doismellburning/edx-platform,eduNEXT/edx-platform,ZLLab-Mooc/edx-platform,jbzdak/edx-platform,tiagochiavericosta/edx-platform,playm2mboy/edx-platform,ak2703/edx-platform,utecuy/edx-platform,atsolakid/edx-platform,morpheby/levelup-by,teltek/edx-platform,nanolearningllc/edx-platform-cypress-2,praveen-pal/edx-platform,pomegranited/edx-platform,franosincic/edx-platform,xuxiao19910803/edx-platform,jazkarta/edx-platform-for-isc,ahmadio/edx-platform,nagyistoce/edx-platform,pku9104038/edx-platform,stvstnfrd/edx-platform,PepperPD/edx-pepper-platform,pabloborrego93/edx-platform,stvstnfrd/edx-platform,valtech-mooc/edx-platform,EduPepperPDTesting/pepper2013-testing,nanolearningllc/edx-platform-cypress,fintech-circle/edx-platform,apigee/edx-platform,jamesblunt/edx-platform,apigee/edx-platform,pdehaye/theming-edx-platform,mitocw/edx-platform,ESOedX/edx-platform,Edraak/edraak-platform,deepsrijit1105/edx-platform,iivic/BoiseStateX,Kalyzee/edx-platform,beni55/edx-platform,unicri/edx-platform,rationalAgent/edx-platform-custom,dcosentino/edx-platform,alexthered/kienhoc-platform,doismellburning/edx-platform,praveen-pal/edx-platform,stvstnfrd/edx-platform,jolyonb/edx-platform,beacloudgenius/edx-platform,MSOpenTech/edx-platform,kamalx/edx-platform,eduNEXT/edx-platform,jzoldak/edx-platform,iivic/BoiseStateX,ubc/edx-platform,Ayub-Khan/edx-platform,Stanford-Online/edx-platform,alu042/edx-platform,olexiim/edx-platform,synergeticsedx/deployment-wipro,kalebhartje/schoolboost,rationalAgent/edx-platform-custom,Unow/edx-platform,ahmadiga/min_edx,shubhdev/edxOnBaadal,B-MOOC/edx-platform,waheedahmed/edx-platform,shubhdev/openedx,edx-solutions/edx-platform,cselis86/edx-platform,itsjeyd/edx-platform,appliedx/edx-platform,cecep-edu/edx-platform,Endika/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,ESOedX/edx-platform,gsehub/edx-platform,cselis86/edx-platform,rationalAgent/edx-platform-custom,jzoldak/edx-platform,analyseuc3m/ANALYSE-v1,jelugbo/tundex,openfun/edx-platform,MSOpenTech/edx-platform,Semi-global/edx-platform,TsinghuaX/edx-platform,jonathan-beard/edx-platform,miptliot/edx-platform,beacloudgenius/edx-platform,Endika/edx-platform,hmcmooc/muddx-platform,Unow/edx-platform,zofuthan/edx-platform,lduarte1991/edx-platform,adoosii/edx-platform,leansoft/edx-platform,kursitet/edx-platform,zerobatu/edx-platform,apigee/edx-platform,cognitiveclass/edx-platform,RPI-OPENEDX/edx-platform,kamalx/edx-platform,naresh21/synergetics-edx-platform,pepeportela/edx-platform,rhndg/openedx,chauhanhardik/populo_2,synergeticsedx/deployment-wipro,chand3040/cloud_that,adoosii/edx-platform,mjg2203/edx-platform-seas,nttks/edx-platform,jelugbo/tundex,Lektorium-LLC/edx-platform,motion2015/a3,cognitiveclass/edx-platform,halvertoluke/edx-platform,carsongee/edx-platform,gymnasium/edx-platform,doganov/edx-platform,kalebhartje/schoolboost,ampax/edx-platform-backup,xingyepei/edx-platform,abdoosh00/edraak,doganov/edx-platform,jbzdak/edx-platform,kxliugang/edx-platform,ampax/edx-platform,eemirtekin/edx-platform,cecep-edu/edx-platform,jamesblunt/edx-platform,Stanford-Online/edx-platform,xingyepei/edx-platform,pepeportela/edx-platform,LICEF/edx-platform,hastexo/edx-platform,devs1991/test_edx_docmode,appliedx/edx-platform,peterm-itr/edx-platform,chrisndodge/edx-platform,EduPepperPD/pepper2013,edry/edx-platform,angelapper/edx-platform,alexthered/kienhoc-platform,hamzehd/edx-platform,unicri/edx-platform,ampax/edx-platform-backup,hmcmooc/muddx-platform,martynovp/edx-platform,nagyistoce/edx-platform,Stanford-Online/edx-platform,proversity-org/edx-platform,Lektorium-LLC/edx-platform,motion2015/edx-platform,EduPepperPDTesting/pepper2013-testing,jbassen/edx-platform,alu042/edx-platform,antoviaque/edx-platform,prarthitm/edxplatform,jswope00/griffinx,nttks/edx-platform,jswope00/griffinx,MakeHer/edx-platform,kalebhartje/schoolboost,nanolearningllc/edx-platform-cypress-2,chrisndodge/edx-platform,shubhdev/edxOnBaadal,wwj718/ANALYSE,cognitiveclass/edx-platform,synergeticsedx/deployment-wipro,louyihua/edx-platform,Edraak/circleci-edx-platform,peterm-itr/edx-platform,shurihell/testasia,antonve/s4-project-mooc,ZLLab-Mooc/edx-platform,dsajkl/reqiop,stvstnfrd/edx-platform,kxliugang/edx-platform,zadgroup/edx-platform,ampax/edx-platform,hamzehd/edx-platform,jjmiranda/edx-platform,mjirayu/sit_academy,xinjiguaike/edx-platform,MakeHer/edx-platform,IONISx/edx-platform,shubhdev/edxOnBaadal,openfun/edx-platform,nikolas/edx-platform,edry/edx-platform,doganov/edx-platform,hkawasaki/kawasaki-aio8-2,louyihua/edx-platform,IITBinterns13/edx-platform-dev,hastexo/edx-platform,devs1991/test_edx_docmode,carsongee/edx-platform,jruiperezv/ANALYSE,IONISx/edx-platform,antoviaque/edx-platform,jazztpt/edx-platform,MakeHer/edx-platform,alu042/edx-platform,doismellburning/edx-platform,UOMx/edx-platform,chauhanhardik/populo,zadgroup/edx-platform,yokose-ks/edx-platform,4eek/edx-platform,jazztpt/edx-platform,beni55/edx-platform,sudheerchintala/LearnEraPlatForm,atsolakid/edx-platform,kursitet/edx-platform,andyzsf/edx,jonathan-beard/edx-platform,WatanabeYasumasa/edx-platform,atsolakid/edx-platform,alexthered/kienhoc-platform,jruiperezv/ANALYSE,jazkarta/edx-platform,zubair-arbi/edx-platform,antonve/s4-project-mooc,vikas1885/test1,Edraak/circleci-edx-platform,devs1991/test_edx_docmode,pdehaye/theming-edx-platform,marcore/edx-platform,mushtaqak/edx-platform,zhenzhai/edx-platform,motion2015/a3,romain-li/edx-platform,shashank971/edx-platform,cpennington/edx-platform,zadgroup/edx-platform,chauhanhardik/populo,CourseTalk/edx-platform,JCBarahona/edX,edx/edx-platform,ak2703/edx-platform,ampax/edx-platform,beacloudgenius/edx-platform,hkawasaki/kawasaki-aio8-0,devs1991/test_edx_docmode,jswope00/GAI,kxliugang/edx-platform,shubhdev/openedx,etzhou/edx-platform,4eek/edx-platform,martynovp/edx-platform,naresh21/synergetics-edx-platform,mbareta/edx-platform-ft,mtlchun/edx,proversity-org/edx-platform,abdoosh00/edraak,kmoocdev/edx-platform,IONISx/edx-platform,hamzehd/edx-platform,gsehub/edx-platform,abdoosh00/edraak,y12uc231/edx-platform,nttks/jenkins-test,rhndg/openedx,JioEducation/edx-platform,PepperPD/edx-pepper-platform,TeachAtTUM/edx-platform,eestay/edx-platform,hmcmooc/muddx-platform,knehez/edx-platform,y12uc231/edx-platform,pdehaye/theming-edx-platform,fintech-circle/edx-platform,pelikanchik/edx-platform,caesar2164/edx-platform,PepperPD/edx-pepper-platform,shurihell/testasia,vismartltd/edx-platform,mcgachey/edx-platform,xuxiao19910803/edx,atsolakid/edx-platform,eemirtekin/edx-platform,jzoldak/edx-platform,mushtaqak/edx-platform,mcgachey/edx-platform,BehavioralInsightsTeam/edx-platform,wwj718/ANALYSE,sameetb-cuelogic/edx-platform-test,longmen21/edx-platform,Edraak/circleci-edx-platform,Edraak/edx-platform,kmoocdev/edx-platform,itsjeyd/edx-platform,chudaol/edx-platform,BehavioralInsightsTeam/edx-platform,waheedahmed/edx-platform,eduNEXT/edx-platform,iivic/BoiseStateX,kursitet/edx-platform,morenopc/edx-platform,raccoongang/edx-platform,appliedx/edx-platform,B-MOOC/edx-platform,rue89-tech/edx-platform,edry/edx-platform,vismartltd/edx-platform,rue89-tech/edx-platform,syjeon/new_edx,xuxiao19910803/edx-platform,openfun/edx-platform,rue89-tech/edx-platform,xinjiguaike/edx-platform,Edraak/edx-platform,xuxiao19910803/edx,zhenzhai/edx-platform,franosincic/edx-platform,mahendra-r/edx-platform,nanolearningllc/edx-platform-cypress,yokose-ks/edx-platform,cognitiveclass/edx-platform,10clouds/edx-platform,dcosentino/edx-platform,knehez/edx-platform,Ayub-Khan/edx-platform,romain-li/edx-platform,morpheby/levelup-by,hkawasaki/kawasaki-aio8-0,kalebhartje/schoolboost,tanmaykm/edx-platform,nttks/jenkins-test,jruiperezv/ANALYSE,iivic/BoiseStateX,nttks/edx-platform,hkawasaki/kawasaki-aio8-2,B-MOOC/edx-platform,jamiefolsom/edx-platform,yokose-ks/edx-platform,a-parhom/edx-platform,raccoongang/edx-platform,ubc/edx-platform,EduPepperPD/pepper2013,kmoocdev2/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,gsehub/edx-platform,shubhdev/openedx,solashirai/edx-platform,jbzdak/edx-platform,cecep-edu/edx-platform,andyzsf/edx,appliedx/edx-platform,leansoft/edx-platform,vasyarv/edx-platform,SivilTaram/edx-platform,Edraak/edx-platform,Semi-global/edx-platform,chauhanhardik/populo_2,halvertoluke/edx-platform,SravanthiSinha/edx-platform,ak2703/edx-platform,shurihell/testasia,franosincic/edx-platform,bdero/edx-platform,chudaol/edx-platform,bitifirefly/edx-platform,jamesblunt/edx-platform,eduNEXT/edunext-platform,jjmiranda/edx-platform,teltek/edx-platform,EduPepperPDTesting/pepper2013-testing,abdoosh00/edx-rtl-final,kmoocdev/edx-platform,edx-solutions/edx-platform,solashirai/edx-platform,DNFcode/edx-platform,cpennington/edx-platform,dsajkl/reqiop,polimediaupv/edx-platform,waheedahmed/edx-platform,gymnasium/edx-platform,amir-qayyum-khan/edx-platform,shashank971/edx-platform,fly19890211/edx-platform,raccoongang/edx-platform,pomegranited/edx-platform,jbassen/edx-platform,UOMx/edx-platform,kmoocdev2/edx-platform,syjeon/new_edx,caesar2164/edx-platform,simbs/edx-platform,Ayub-Khan/edx-platform,jamiefolsom/edx-platform,motion2015/edx-platform,valtech-mooc/edx-platform,B-MOOC/edx-platform,shashank971/edx-platform,abdoosh00/edx-rtl-final,motion2015/edx-platform,wwj718/edx-platform,eduNEXT/edunext-platform,carsongee/edx-platform,LearnEra/LearnEraPlaftform,rhndg/openedx,bigdatauniversity/edx-platform,DefyVentures/edx-platform,zerobatu/edx-platform,shashank971/edx-platform,chudaol/edx-platform,cyanna/edx-platform,zhenzhai/edx-platform,ampax/edx-platform-backup,4eek/edx-platform,a-parhom/edx-platform,Ayub-Khan/edx-platform,jolyonb/edx-platform,msegado/edx-platform,unicri/edx-platform,UOMx/edx-platform,rismalrv/edx-platform,SravanthiSinha/edx-platform,DNFcode/edx-platform,Ayub-Khan/edx-platform,etzhou/edx-platform,cecep-edu/edx-platform,chauhanhardik/populo_2,jazkarta/edx-platform-for-isc,bigdatauniversity/edx-platform,EduPepperPD/pepper2013,mitocw/edx-platform,IndonesiaX/edx-platform,msegado/edx-platform,jonathan-beard/edx-platform,antoviaque/edx-platform,halvertoluke/edx-platform,dsajkl/123,nanolearningllc/edx-platform-cypress,vasyarv/edx-platform,motion2015/a3,beni55/edx-platform,shabab12/edx-platform,ahmedaljazzar/edx-platform,jswope00/GAI,alexthered/kienhoc-platform,nanolearningllc/edx-platform-cypress,appliedx/edx-platform,ZLLab-Mooc/edx-platform,shurihell/testasia,IndonesiaX/edx-platform,nanolearningllc/edx-platform-cypress-2,mtlchun/edx,cyanna/edx-platform,nttks/edx-platform,arifsetiawan/edx-platform,ferabra/edx-platform,AkA84/edx-platform,cyanna/edx-platform,philanthropy-u/edx-platform,ahmedaljazzar/edx-platform,MSOpenTech/edx-platform,dsajkl/reqiop,analyseuc3m/ANALYSE-v1,tiagochiavericosta/edx-platform,OmarIthawi/edx-platform,Kalyzee/edx-platform,inares/edx-platform,halvertoluke/edx-platform,olexiim/edx-platform,edry/edx-platform,SravanthiSinha/edx-platform,MakeHer/edx-platform,IONISx/edx-platform,longmen21/edx-platform,shubhdev/edxOnBaadal,shubhdev/openedx,eemirtekin/edx-platform,prarthitm/edxplatform,torchingloom/edx-platform,Stanford-Online/edx-platform,lduarte1991/edx-platform,Semi-global/edx-platform,pelikanchik/edx-platform,tanmaykm/edx-platform,jazkarta/edx-platform,zhenzhai/edx-platform,JioEducation/edx-platform,beacloudgenius/edx-platform,kxliugang/edx-platform,mjg2203/edx-platform-seas,solashirai/edx-platform,vismartltd/edx-platform,auferack08/edx-platform,Semi-global/edx-platform,angelapper/edx-platform,hkawasaki/kawasaki-aio8-1,vikas1885/test1,angelapper/edx-platform,ubc/edx-platform,marcore/edx-platform,shubhdev/edx-platform,polimediaupv/edx-platform,CourseTalk/edx-platform,Edraak/edraak-platform,jelugbo/tundex,solashirai/edx-platform,ampax/edx-platform-backup,hkawasaki/kawasaki-aio8-2,jswope00/GAI,mitocw/edx-platform,ahmedaljazzar/edx-platform,Edraak/edx-platform,jelugbo/tundex,mjirayu/sit_academy,shabab12/edx-platform,jolyonb/edx-platform,jazkarta/edx-platform-for-isc,doismellburning/edx-platform,unicri/edx-platform,dsajkl/reqiop,PepperPD/edx-pepper-platform,torchingloom/edx-platform,miptliot/edx-platform,kmoocdev/edx-platform,shurihell/testasia,Kalyzee/edx-platform,xuxiao19910803/edx-platform,OmarIthawi/edx-platform,shabab12/edx-platform,carsongee/edx-platform,sameetb-cuelogic/edx-platform-test,nikolas/edx-platform,utecuy/edx-platform,zhenzhai/edx-platform,appsembler/edx-platform,rhndg/openedx,JioEducation/edx-platform,shubhdev/edx-platform,DNFcode/edx-platform,JCBarahona/edX,deepsrijit1105/edx-platform,WatanabeYasumasa/edx-platform,JCBarahona/edX,simbs/edx-platform,waheedahmed/edx-platform,olexiim/edx-platform,valtech-mooc/edx-platform,IITBinterns13/edx-platform-dev,deepsrijit1105/edx-platform,philanthropy-u/edx-platform,J861449197/edx-platform,tiagochiavericosta/edx-platform,analyseuc3m/ANALYSE-v1,mahendra-r/edx-platform,mahendra-r/edx-platform,utecuy/edx-platform,lduarte1991/edx-platform,Edraak/circleci-edx-platform,10clouds/edx-platform,Shrhawk/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,appsembler/edx-platform,jazkarta/edx-platform-for-isc,leansoft/edx-platform,rismalrv/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,amir-qayyum-khan/edx-platform,rationalAgent/edx-platform-custom,proversity-org/edx-platform,polimediaupv/edx-platform,edx/edx-platform,DefyVentures/edx-platform,leansoft/edx-platform,ovnicraft/edx-platform,EDUlib/edx-platform,EDUlib/edx-platform,mushtaqak/edx-platform,a-parhom/edx-platform,y12uc231/edx-platform,eduNEXT/edunext-platform,arbrandes/edx-platform,xuxiao19910803/edx,SivilTaram/edx-platform,auferack08/edx-platform,UOMx/edx-platform,xingyepei/edx-platform,OmarIthawi/edx-platform,RPI-OPENEDX/edx-platform,PepperPD/edx-pepper-platform,abdoosh00/edx-rtl-final,jswope00/griffinx,EDUlib/edx-platform,mbareta/edx-platform-ft,kmoocdev2/edx-platform,openfun/edx-platform,dkarakats/edx-platform,longmen21/edx-platform,inares/edx-platform,arifsetiawan/edx-platform,syjeon/new_edx,a-parhom/edx-platform,jazkarta/edx-platform,syjeon/new_edx,jbassen/edx-platform,benpatterson/edx-platform,wwj718/ANALYSE,mushtaqak/edx-platform,mbareta/edx-platform-ft,hkawasaki/kawasaki-aio8-1,BehavioralInsightsTeam/edx-platform,ferabra/edx-platform,synergeticsedx/deployment-wipro,motion2015/a3,procangroup/edx-platform,adoosii/edx-platform,prarthitm/edxplatform,Endika/edx-platform,bigdatauniversity/edx-platform,cyanna/edx-platform,martynovp/edx-platform,Lektorium-LLC/edx-platform,knehez/edx-platform,inares/edx-platform,zadgroup/edx-platform,Unow/edx-platform,AkA84/edx-platform,sameetb-cuelogic/edx-platform-test,philanthropy-u/edx-platform,jazztpt/edx-platform,morenopc/edx-platform,simbs/edx-platform,EduPepperPD/pepper2013,SravanthiSinha/edx-platform,iivic/BoiseStateX,zubair-arbi/edx-platform,naresh21/synergetics-edx-platform,xuxiao19910803/edx,chand3040/cloud_that,dkarakats/edx-platform,shubhdev/edx-platform,peterm-itr/edx-platform,procangroup/edx-platform,jamiefolsom/edx-platform,longmen21/edx-platform,MSOpenTech/edx-platform,pku9104038/edx-platform,alexthered/kienhoc-platform,ak2703/edx-platform,nttks/jenkins-test,y12uc231/edx-platform,shabab12/edx-platform,mahendra-r/edx-platform,pomegranited/edx-platform,inares/edx-platform,dcosentino/edx-platform,mjirayu/sit_academy,cpennington/edx-platform,pelikanchik/edx-platform,pabloborrego93/edx-platform,zofuthan/edx-platform,rismalrv/edx-platform,SravanthiSinha/edx-platform,louyihua/edx-platform,pku9104038/edx-platform,polimediaupv/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform-backup,xuxiao19910803/edx-platform,vikas1885/test1,sudheerchintala/LearnEraPlatForm,AkA84/edx-platform,CredoReference/edx-platform,utecuy/edx-platform,etzhou/edx-platform,xingyepei/edx-platform,Livit/Livit.Learn.EdX,Softmotions/edx-platform,pepeportela/edx-platform,bigdatauniversity/edx-platform,hkawasaki/kawasaki-aio8-0,jbassen/edx-platform,romain-li/edx-platform,jswope00/GAI,etzhou/edx-platform,nikolas/edx-platform,defance/edx-platform,cyanna/edx-platform,morenopc/edx-platform,gsehub/edx-platform,zerobatu/edx-platform,peterm-itr/edx-platform,J861449197/edx-platform,ahmadio/edx-platform,defance/edx-platform,DefyVentures/edx-platform,simbs/edx-platform,doganov/edx-platform,shubhdev/edx-platform,wwj718/edx-platform,pomegranited/edx-platform,dsajkl/123,pdehaye/theming-edx-platform,dcosentino/edx-platform,wwj718/ANALYSE,MSOpenTech/edx-platform,Softmotions/edx-platform,jzoldak/edx-platform,hastexo/edx-platform,4eek/edx-platform,nanolearningllc/edx-platform-cypress-2,etzhou/edx-platform,zerobatu/edx-platform,shubhdev/openedx,ahmadio/edx-platform,antonve/s4-project-mooc,edx-solutions/edx-platform,Shrhawk/edx-platform,Softmotions/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,dkarakats/edx-platform,torchingloom/edx-platform,andyzsf/edx,procangroup/edx-platform,JCBarahona/edX,xuxiao19910803/edx-platform,gymnasium/edx-platform,dkarakats/edx-platform,motion2015/edx-platform,dsajkl/123,caesar2164/edx-platform,jamiefolsom/edx-platform,bitifirefly/edx-platform,dcosentino/edx-platform,rismalrv/edx-platform,longmen21/edx-platform,ovnicraft/edx-platform,proversity-org/edx-platform,Lektorium-LLC/edx-platform,unicri/edx-platform,ovnicraft/edx-platform,J861449197/edx-platform,LearnEra/LearnEraPlaftform,jbzdak/edx-platform,andyzsf/edx,msegado/edx-platform,nttks/jenkins-test,arbrandes/edx-platform,shubhdev/edxOnBaadal,analyseuc3m/ANALYSE-v1,eestay/edx-platform,ampax/edx-platform,IITBinterns13/edx-platform-dev,dsajkl/123,IONISx/edx-platform,LICEF/edx-platform,fly19890211/edx-platform,rhndg/openedx,SivilTaram/edx-platform,chand3040/cloud_that,ahmadio/edx-platform,EduPepperPDTesting/pepper2013-testing,Edraak/edraak-platform,valtech-mooc/edx-platform,nanolearning/edx-platform,UXE/local-edx,kxliugang/edx-platform,EduPepperPDTesting/pepper2013-testing,mtlchun/edx,jruiperezv/ANALYSE,dkarakats/edx-platform,marcore/edx-platform,mbareta/edx-platform-ft,IITBinterns13/edx-platform-dev,hkawasaki/kawasaki-aio8-0,mtlchun/edx,gymnasium/edx-platform,martynovp/edx-platform,edx/edx-platform,mjirayu/sit_academy,mjirayu/sit_academy,fintech-circle/edx-platform,chauhanhardik/populo,defance/edx-platform,wwj718/edx-platform,pabloborrego93/edx-platform,IndonesiaX/edx-platform,devs1991/test_edx_docmode,tanmaykm/edx-platform,bdero/edx-platform,arifsetiawan/edx-platform,rue89-tech/edx-platform,don-github/edx-platform,jamesblunt/edx-platform,DefyVentures/edx-platform,fly19890211/edx-platform,y12uc231/edx-platform,wwj718/edx-platform,WatanabeYasumasa/edx-platform,ahmedaljazzar/edx-platform,don-github/edx-platform,EduPepperPD/pepper2013,UXE/local-edx,jbassen/edx-platform,prarthitm/edxplatform,Softmotions/edx-platform,eemirtekin/edx-platform,TsinghuaX/edx-platform,itsjeyd/edx-platform,jazkarta/edx-platform,TsinghuaX/edx-platform,kmoocdev/edx-platform,jelugbo/tundex,jonathan-beard/edx-platform,RPI-OPENEDX/edx-platform,bitifirefly/edx-platform,benpatterson/edx-platform,rismalrv/edx-platform,caesar2164/edx-platform,apigee/edx-platform,zofuthan/edx-platform,chudaol/edx-platform,openfun/edx-platform,TeachAtTUM/edx-platform,mjg2203/edx-platform-seas,bdero/edx-platform,jamesblunt/edx-platform,nanolearning/edx-platform,jamiefolsom/edx-platform,jazkarta/edx-platform,DefyVentures/edx-platform,hmcmooc/muddx-platform,bitifirefly/edx-platform,mushtaqak/edx-platform,vikas1885/test1,beni55/edx-platform,jolyonb/edx-platform,Edraak/circleci-edx-platform,playm2mboy/edx-platform,appsembler/edx-platform,TeachAtTUM/edx-platform,motion2015/a3,ZLLab-Mooc/edx-platform,nikolas/edx-platform,Kalyzee/edx-platform,antonve/s4-project-mooc,kmoocdev2/edx-platform,hamzehd/edx-platform,morpheby/levelup-by,CredoReference/edx-platform,cognitiveclass/edx-platform,abdoosh00/edraak,mtlchun/edx,LICEF/edx-platform,chudaol/edx-platform,amir-qayyum-khan/edx-platform,jazztpt/edx-platform,DNFcode/edx-platform
|
cms/djangoapps/contentstore/views/error.py
|
cms/djangoapps/contentstore/views/error.py
|
from django.http import (HttpResponse, HttpResponseServerError,
HttpResponseNotFound)
from mitxmako.shortcuts import render_to_string, render_to_response
import functools
import json
__all__ = ['not_found', 'server_error', 'render_404', 'render_500']
def jsonable_error(status=500, message="The Studio servers encountered an error"):
"""
A decorator to make an error view return an JSON-formatted message if
it was requested via AJAX.
"""
def outer(func):
@functools.wraps(func)
def inner(request, *args, **kwargs):
if request.is_ajax():
content = json.dumps({"error": message})
return HttpResponse(content, content_type="application/json",
status=status)
else:
return func(request, *args, **kwargs)
return inner
return outer
@jsonable_error(404, "Resource not found")
def not_found(request):
return render_to_response('error.html', {'error': '404'})
@jsonable_error(500, "The Studio servers encountered an error")
def server_error(request):
return render_to_response('error.html', {'error': '500'})
@jsonable_error(404, "Resource not found")
def render_404(request):
return HttpResponseNotFound(render_to_string('404.html', {}))
@jsonable_error(500, "The Studio servers encountered an error")
def render_500(request):
return HttpResponseServerError(render_to_string('500.html', {}))
|
from django.http import HttpResponseServerError, HttpResponseNotFound
from mitxmako.shortcuts import render_to_string, render_to_response
__all__ = ['not_found', 'server_error', 'render_404', 'render_500']
def not_found(request):
return render_to_response('error.html', {'error': '404'})
def server_error(request):
return render_to_response('error.html', {'error': '500'})
def render_404(request):
return HttpResponseNotFound(render_to_string('404.html', {}))
def render_500(request):
return HttpResponseServerError(render_to_string('500.html', {}))
|
agpl-3.0
|
Python
|
f164dd8141133cb78ceeab3ecccc5d756c36da3c
|
Add optional structured properties for og:image and og:video
|
michaelhelmick/lassie,jpadilla/lassie,michaelhelmick/lassie,jpadilla/lassie
|
lassie/filters/social.py
|
lassie/filters/social.py
|
# -*- coding: utf-8 -*-
"""
lassie.filters.social
~~~~~~~~~~~~~~~~~~~~~
This module contains data social related content to help Lassie filter for content.
"""
from ..compat import str
import re
SOCIAL_MAPS = {
'meta': {
'open_graph': { # http://ogp.me/
'pattern': re.compile(r"^og:", re.I),
'map': {
'og:url': 'url',
'og:title': 'title',
'og:description': 'description',
'og:locale': 'locale',
'og:image': 'src',
'og:image:url': 'src',
'og:image:secure_url': 'secure_src',
'og:image:width': 'width',
'og:image:height': 'height',
'og:image:type': 'type',
'og:video': 'src',
'og:video:url': 'src',
'og:video:secure_url': 'secure_src',
'og:video:width': 'width',
'og:video:height': 'height',
'og:video:type': 'type',
},
'image_key': str('og:image'),
'video_key': str('og:video'),
'key': 'property',
},
'twitter_card': { # https://dev.twitter.com/docs/cards
'pattern': re.compile(r"^twitter:", re.I),
'map': {
'twitter:url': 'url',
'twitter:title': 'title',
'twitter:description': 'description',
'twitter:image': 'src',
'twitter:image:width': 'width',
'twitter:image:height': 'height',
'twitter:player': 'src',
'twitter:player:width': 'width',
'twitter:player:height': 'height',
'twitter:player:content_type': 'type',
},
'image_key': str('twitter:image'),
'video_key': str('twitter:player'),
'key': 'name',
},
}
}
|
# -*- coding: utf-8 -*-
"""
lassie.filters.social
~~~~~~~~~~~~~~~~~~~~~
This module contains data social related content to help Lassie filter for content.
"""
from ..compat import str
import re
SOCIAL_MAPS = {
'meta': {
'open_graph': { # http://ogp.me/
'pattern': re.compile(r"^og:", re.I),
'map': {
'og:url': 'url',
'og:title': 'title',
'og:description': 'description',
'og:locale': 'locale',
'og:image': 'src',
'og:image:width': 'width',
'og:image:height': 'height',
'og:video': 'src',
'og:video:width': 'width',
'og:video:height': 'height',
'og:video:type': 'type',
},
'image_key': str('og:image'),
'video_key': str('og:video'),
'key': 'property',
},
'twitter_card': { # https://dev.twitter.com/docs/cards
'pattern': re.compile(r"^twitter:", re.I),
'map': {
'twitter:url': 'url',
'twitter:title': 'title',
'twitter:description': 'description',
'twitter:image': 'src',
'twitter:image:width': 'width',
'twitter:image:height': 'height',
'twitter:player': 'src',
'twitter:player:width': 'width',
'twitter:player:height': 'height',
'twitter:player:content_type': 'type',
},
'image_key': str('twitter:image'),
'video_key': str('twitter:player'),
'key': 'name',
},
}
}
|
mit
|
Python
|
bc199a9eaa2416b35d1d691f580e6c9ca0b1a2ae
|
Remove node counts and update docstrings on new view for activity
|
monikagrabowska/osf.io,binoculars/osf.io,monikagrabowska/osf.io,acshi/osf.io,laurenrevere/osf.io,cslzchen/osf.io,laurenrevere/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,mfraezz/osf.io,aaxelb/osf.io,alexschiller/osf.io,Nesiehr/osf.io,mluo613/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,acshi/osf.io,erinspace/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,caneruguz/osf.io,adlius/osf.io,binoculars/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,acshi/osf.io,adlius/osf.io,icereval/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,icereval/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,mfraezz/osf.io,mattclark/osf.io,crcresearch/osf.io,rdhyee/osf.io,aaxelb/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,cslzchen/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,icereval/osf.io,mattclark/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,pattisdr/osf.io,sloria/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,sloria/osf.io,mluo613/osf.io,felliott/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,pattisdr/osf.io,leb2dg/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,aaxelb/osf.io,caneruguz/osf.io,adlius/osf.io,cwisecarver/osf.io,hmoco/osf.io,TomBaxter/osf.io,erinspace/osf.io,acshi/osf.io,leb2dg/osf.io,mattclark/osf.io,alexschiller/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,binoculars/osf.io,acshi/osf.io,chennan47/osf.io,cwisecarver/osf.io,caneruguz/osf.io,chrisseto/osf.io,baylee-d/osf.io,felliott/osf.io,felliott/osf.io,caseyrollins/osf.io,chrisseto/osf.io,saradbowman/osf.io,cwisecarver/osf.io,hmoco/osf.io,erinspace/osf.io,crcresearch/osf.io,rdhyee/osf.io,chrisseto/osf.io,rdhyee/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,cslzchen/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,felliott/osf.io,alexschiller/osf.io,Nesiehr/osf.io,hmoco/osf.io,mluo613/osf.io
|
website/discovery/views.py
|
website/discovery/views.py
|
from website import settings
from website.project import Node
from website.project import utils
from modularodm.query.querydialect import DefaultQueryDialect as Q
def activity():
"""Reads node activity from pre-generated popular projects and registrations.
New and Noteworthy projects are set manually or through `scripts/populate_new_and_noteworthy_projects.py`
Popular projects and registrations are generated by `scripts/populate_popular_projects_and_registrations.py`
"""
# New and Noreworthy Projects
new_and_noteworthy_pointers = Node.find_one(Q('_id', 'eq', settings.NEW_AND_NOTEWORTHY_LINKS_NODE)).nodes_pointer
new_and_noteworthy_projects = [pointer.node for pointer in new_and_noteworthy_pointers]
# Popular Projects
popular_public_projects = Node.find_one(Q('_id', 'eq', settings.POPULAR_LINKS_NODE)).nodes_pointer
# Popular Registrations
popular_public_registrations = Node.find_one(Q('_id', 'eq', settings.POPULAR_LINKS_NODE_REGISTRATIONS)).nodes_pointer
return {
'new_and_noteworthy_projects': new_and_noteworthy_projects,
'recent_public_registrations': utils.recent_public_registrations(),
'popular_public_projects': popular_public_projects,
'popular_public_registrations': popular_public_registrations,
}
|
from website import settings
from website.project import Node
from website.project import utils
from modularodm.query.querydialect import DefaultQueryDialect as Q
def activity():
node_data = utils.get_node_data()
if node_data:
hits = utils.hits(node_data)
else:
hits = {}
# New Projects
new_and_noteworthy_pointers = Node.find_one(Q('_id', 'eq', settings.NEW_AND_NOTEWORTHY_LINKS_NODE)).nodes_pointer
new_and_noteworthy_projects = [pointer.node for pointer in new_and_noteworthy_pointers]
# Popular Projects
popular_public_projects = Node.find_one(Q('_id', 'eq', settings.POPULAR_LINKS_NODE)).nodes_pointer
# Popular Registrations
popular_public_registrations = Node.find_one(Q('_id', 'eq', settings.POPULAR_LINKS_NODE_REGISTRATIONS)).nodes_pointer
return {
'new_and_noteworthy_projects': new_and_noteworthy_projects,
'recent_public_registrations': utils.recent_public_registrations(),
'popular_public_projects': popular_public_projects,
'popular_public_registrations': popular_public_registrations,
'hits': hits,
}
|
apache-2.0
|
Python
|
a9803a8a523b8603db93ae18eb7e75e63bb8c44b
|
Add `dpi` option into `plot_setup`; Fix an importing bug with `matplotlib.pyplot` (#4)
|
gaomy3832/easypyplot,gaomy3832/easypyplot
|
easypyplot/pdf.py
|
easypyplot/pdf.py
|
""" $lic$
Copyright (c) 2016-2021, Mingyu Gao
This program is free software: you can redistribute it and/or modify it under
the terms of the Modified BSD-3 License as published by the Open Source
Initiative.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the BSD-3 License for more details.
You should have received a copy of the Modified BSD-3 License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
"""
from contextlib import contextmanager
import matplotlib.backends.backend_pdf
import matplotlib.pyplot
from .format import paper_plot
def plot_setup(name, figsize=None, fontsize=9, font='paper', dpi=None):
""" Setup a PDF page for plot.
name: PDF file name. If not ending with .pdf, will automatically append.
figsize: dimension of the plot in inches, should be an array of length two.
fontsize: fontsize for legends and labels.
font: font for legends and labels, 'paper' uses Times New Roman, 'default'
uses default, a tuple of (family, font, ...) customizes font.
dpi: resolution of the figure.
"""
paper_plot(fontsize=fontsize, font=font)
if not name.endswith('.pdf'):
name += '.pdf'
pdfpage = matplotlib.backends.backend_pdf.PdfPages(name)
fig = matplotlib.pyplot.figure(figsize=figsize, dpi=dpi)
return pdfpage, fig
def plot_teardown(pdfpage, fig=None):
""" Tear down a PDF page after plotting.
pdfpage: PDF page.
fig: the figure to save.
"""
pdfpage.savefig(fig)
pdfpage.close()
@contextmanager
def plot_open(name, figsize=None, fontsize=9, font='paper', dpi=None):
""" Open a context of PDF page for plot, used for the `with` statement.
name: PDF file name. If not ending with .pdf, will automatically append.
figsize: dimension of the plot in inches, should be an array of length two.
fontsize: fontsize for legends and labels.
font: font for legends and labels, 'paper' uses Times New Roman, 'default'
uses default, a tuple of (family, font, ...) customizes font.
dpi: resolution of the figure.
"""
pdfpage, fig = plot_setup(name, figsize=figsize, fontsize=fontsize,
font=font, dpi=dpi)
yield fig
plot_teardown(pdfpage, fig)
|
""" $lic$
Copyright (c) 2016-2021, Mingyu Gao
This program is free software: you can redistribute it and/or modify it under
the terms of the Modified BSD-3 License as published by the Open Source
Initiative.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the BSD-3 License for more details.
You should have received a copy of the Modified BSD-3 License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
"""
from contextlib import contextmanager
import matplotlib.backends.backend_pdf
from .format import paper_plot
def plot_setup(name, figsize=None, fontsize=9, font='paper'):
""" Setup a PDF page for plot.
name: PDF file name. If not ending with .pdf, will automatically append.
figsize: dimension of the plot in inches, should be an array of length two.
fontsize: fontsize for legends and labels.
font: font for legends and labels, 'paper' uses Times New Roman, 'default'
uses default, a tuple of (family, font, ...) customizes font.
"""
paper_plot(fontsize=fontsize, font=font)
if not name.endswith('.pdf'):
name += '.pdf'
pdfpage = matplotlib.backends.backend_pdf.PdfPages(name)
fig = matplotlib.pyplot.figure(figsize=figsize)
return pdfpage, fig
def plot_teardown(pdfpage, fig=None):
""" Tear down a PDF page after plotting.
pdfpage: PDF page.
fig: the figure to save.
"""
pdfpage.savefig(fig)
pdfpage.close()
@contextmanager
def plot_open(name, figsize=None, fontsize=9, font='paper'):
""" Open a context of PDF page for plot, used for the `with` statement.
name: PDF file name. If not ending with .pdf, will automatically append.
figsize: dimension of the plot in inches, should be an array of length two.
fontsize: fontsize for legends and labels.
font: font for legends and labels, 'paper' uses Times New Roman, 'default'
uses default, a tuple of (family, font, ...) customizes font.
"""
pdfpage, fig = plot_setup(name, figsize=figsize, fontsize=fontsize,
font=font)
yield fig
plot_teardown(pdfpage, fig)
|
bsd-3-clause
|
Python
|
1147781292b32177e0733be134f8c83101f0abbf
|
add comment
|
austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp,austinlostinboston/mitsWebApp
|
weiss/flows/flowManager.py
|
weiss/flows/flowManager.py
|
"""
Flow Manager
NOTE: this class should be created by singleton factory in factory.py
this class is responsible for
1. register a user's flow
2. loop up flow by user id
Author: Ming Fang <[email protected]>
"""
import logging
logger = logging.getLogger(__name__)
class FlowManager:
def __init__(self):
self._flowTable = {}
def register(self, uid, flow):
"""Register a user's flow object
:param uid: the user id
:param flow: the flow obj associated with the user
:return: void
"""
self._flowTable[uid] = flow
return
def lookUp(self, uid):
"""Look up a flow given a user id
:param uid: the user id to be looked up, get it by request.user
:return: the flow obj associated with the given user, or None if not found
"""
return self._flowTable.get(uid, None)
|
"""
Flow Mnager
NOTE: this class should be created by signleton factory in factory.py
this class is responsible for
1. user state factory.
2. loop up state by sid
3. make transit from one state based on a given action
Author: Ming Fang <[email protected]>
"""
import logging
logger = logging.getLogger(__name__)
class FlowManager:
def __init__(self):
self._flowTable = {}
def register(self, uid, flow):
self._flowTable[uid] = flow
return
def lookUp(self, uid):
if not self._flowTable.has_key(uid):
return None
else:
return self._flowTable[uid]
|
apache-2.0
|
Python
|
c36b90db0f3e633f459afa3328e50b6f5e8b4e1b
|
Remove debug print.
|
pubnative/redash,M32Media/redash,easytaxibr/redash,getredash/redash,chriszs/redash,hudl/redash,jmvasquez/redashtest,useabode/redash,pubnative/redash,getredash/redash,moritz9/redash,guaguadev/redash,ninneko/redash,amino-data/redash,akariv/redash,denisov-vlad/redash,denisov-vlad/redash,jmvasquez/redashtest,rockwotj/redash,stefanseifert/redash,imsally/redash,vishesh92/redash,chriszs/redash,EverlyWell/redash,vishesh92/redash,hudl/redash,easytaxibr/redash,useabode/redash,M32Media/redash,getredash/redash,amino-data/redash,ninneko/redash,44px/redash,M32Media/redash,alexanderlz/redash,crowdworks/redash,pubnative/redash,amino-data/redash,vishesh92/redash,denisov-vlad/redash,ninneko/redash,ninneko/redash,alexanderlz/redash,stefanseifert/redash,44px/redash,chriszs/redash,guaguadev/redash,guaguadev/redash,crowdworks/redash,hudl/redash,jmvasquez/redashtest,chriszs/redash,moritz9/redash,guaguadev/redash,alexanderlz/redash,imsally/redash,hudl/redash,rockwotj/redash,vishesh92/redash,M32Media/redash,44px/redash,denisov-vlad/redash,pubnative/redash,amino-data/redash,akariv/redash,alexanderlz/redash,rockwotj/redash,crowdworks/redash,EverlyWell/redash,crowdworks/redash,getredash/redash,rockwotj/redash,EverlyWell/redash,denisov-vlad/redash,easytaxibr/redash,EverlyWell/redash,jmvasquez/redashtest,imsally/redash,easytaxibr/redash,akariv/redash,stefanseifert/redash,imsally/redash,easytaxibr/redash,pubnative/redash,useabode/redash,akariv/redash,guaguadev/redash,akariv/redash,jmvasquez/redashtest,stefanseifert/redash,useabode/redash,ninneko/redash,stefanseifert/redash,moritz9/redash,moritz9/redash,44px/redash,getredash/redash
|
redash/authentication.py
|
redash/authentication.py
|
import functools
import hashlib
import hmac
from flask import request, make_response
from flask.ext.googleauth import GoogleFederated
import time
from werkzeug.contrib.fixers import ProxyFix
import werkzeug.wrappers
from redash import data, settings
class HMACAuthentication(object):
def __init__(self, auth):
self.auth = auth
def required(self, fn):
wrapped_fn = self.auth.required(fn)
@functools.wraps(fn)
def decorated(*args, **kwargs):
signature = request.args.get('signature')
expires = int(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
if signature and query_id and time.time() < expires:
query = data.models.Query.objects.get(pk=query_id)
h = hmac.new(str(query.api_key), msg=request.path, digestmod=hashlib.sha1)
h.update(str(expires))
if query.api_key and signature == h.hexdigest():
return fn(*args, **kwargs)
# Work around for flask-restful testing only for flask.wrappers.Resource instead of
# werkzeug.wrappers.Response
resp = wrapped_fn(*args, **kwargs)
if isinstance(resp, werkzeug.wrappers.Response):
resp = make_response(resp)
return resp
return decorated
def setup_authentication(app):
openid_auth = GoogleFederated(settings.GOOGLE_APPS_DOMAIN, app)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = settings.COOKIE_SECRET
openid_auth.force_auth_on_every_request = True
return HMACAuthentication(openid_auth)
|
import functools
import hashlib
import hmac
from flask import request, make_response
from flask.ext.googleauth import GoogleFederated
import time
from werkzeug.contrib.fixers import ProxyFix
import werkzeug.wrappers
from redash import data, settings
class HMACAuthentication(object):
def __init__(self, auth):
self.auth = auth
def required(self, fn):
wrapped_fn = self.auth.required(fn)
@functools.wraps(fn)
def decorated(*args, **kwargs):
signature = request.args.get('signature')
expires = int(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
if signature and query_id and time.time() < expires:
query = data.models.Query.objects.get(pk=query_id)
h = hmac.new(str(query.api_key), msg=request.path, digestmod=hashlib.sha1)
h.update(str(expires))
print h.hexdigest()
if query.api_key and signature == h.hexdigest():
return fn(*args, **kwargs)
# Work around for flask-restful testing only for flask.wrappers.Resource instead of
# werkzeug.wrappers.Response
resp = wrapped_fn(*args, **kwargs)
if isinstance(resp, werkzeug.wrappers.Response):
resp = make_response(resp)
return resp
return decorated
def setup_authentication(app):
openid_auth = GoogleFederated(settings.GOOGLE_APPS_DOMAIN, app)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = settings.COOKIE_SECRET
openid_auth.force_auth_on_every_request = True
return HMACAuthentication(openid_auth)
|
bsd-2-clause
|
Python
|
0e2d058efd310c2060df2a86b1de989eca1e2ea5
|
Modify cafetaria scraper
|
ZeusWPI/hydra,ZeusWPI/hydra,ZeusWPI/hydra
|
server/scraper/resto/cafetaria.py
|
server/scraper/resto/cafetaria.py
|
import argparse
import os
from backoff import retry_session
from bs4 import BeautifulSoup
from util import parse_money, write_json_to_file
import sys
from requests.exceptions import ConnectionError, Timeout
HTML_PARSER = 'lxml'
BASE_URL = 'https://www.ugent.be/student/nl/meer-dan-studeren/resto/ophetmenu/'
def get_breakfast():
r = retry_session.get(BASE_URL + 'ontbijt.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.table.find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
def get_drinks():
r = retry_session.get(BASE_URL + 'desserten-drank.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.table.find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
def get_desserts():
r = retry_session.get(BASE_URL + 'desserten-drank.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.find_all('table')[1].find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
def main(output):
result = {
'breakfast': get_breakfast(),
'drinks': get_drinks(),
'desserts': get_desserts()
}
output_file = os.path.join(output, "extrafood.json")
write_json_to_file(result, output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run cafetaria scraper')
parser.add_argument('output',
help='Path of the folder in which the output must be written. Will be created if needed.')
args = parser.parse_args()
output_path = os.path.abspath(args.output) # Like realpath
os.makedirs(output_path, exist_ok=True) # Like mkdir -p
try:
main(output_path)
except (ConnectionError, Timeout) as e:
print("Failed to connect: ", e, file=sys.stderr)
sys.exit(1)
|
from backoff import retry_session
from bs4 import BeautifulSoup
from util import parse_money, stderr_print
import json
import sys
from requests.exceptions import ConnectionError, Timeout
HTML_PARSER = 'lxml'
OUTFILE = "resto/2.0/extrafood.json"
BASE_URL = 'https://www.ugent.be/student/nl/meer-dan-studeren/resto/ophetmenu/'
def get_breakfast():
r = retry_session.get(BASE_URL + 'ontbijt.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.table.find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
def get_drinks():
r = retry_session.get(BASE_URL + 'desserten-drank.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.table.find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
def get_desserts():
r = retry_session.get(BASE_URL + 'desserten-drank.htm')
soup = BeautifulSoup(r.text, HTML_PARSER)
data = []
for row in soup.find_all('table')[1].find_all('tr'):
columns = row.find_all('td')
data.append({'name': columns[0].string,
'price': parse_money(columns[1].string)})
return data
if __name__ == '__main__':
try:
data = {'breakfast': get_breakfast(), 'drinks': get_drinks(), 'desserts': get_desserts()}
except (ConnectionError, Timeout) as e:
stderr_print("Failed to connect: ", e)
sys.exit(1)
with open(OUTFILE, 'w') as outfile:
json.dump(data, outfile, sort_keys=True, indent=4, separators=(',', ': '))
|
mit
|
Python
|
bb35e83b7977e15cd7df9d1f2e810a730dbafa45
|
update happy number
|
lujin123/algorithms,lujin123/algorithms
|
leetcode/happy_number.py
|
leetcode/happy_number.py
|
# Created by lujin at 6/3/2017
#
# 202. Happy Number
#
# Description:
#
# Write an algorithm to determine if a number is "happy".
#
# A happy number is a number defined by the following process: Starting with any positive integer,
# replace the number by the sum of the squares of its digits, and repeat the process until the number
# equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1.
# Those numbers for which this process ends in 1 are happy numbers.
#
# Example: 19 is a happy number
#
# 12 + 92 = 82
# 82 + 22 = 68
# 62 + 82 = 100
# 12 + 02 + 02 = 1
#
import math
class Solution(object):
def isHappy(self, n):
"""
找寻happy number,如果最后结果为1,则true,否则会进入循环,而且循环的数字是固定的那么几个...
:type n: int
:rtype: bool
"""
if n <= 0:
return False
loop = []
while n != 1:
num = 0
while n:
num += int(math.pow(n % 10, 2))
n //= 10
n = num
if n in loop:
break
else:
loop.append(num)
return n == 1
def isHappy2(self, n):
"""
重复的数字中必定有4,所以可以直接判断,含有4,必定不是happy number
:param n:
:return:
"""
if n <= 0:
return False
while n != 1 and n != 4:
num = 0
while n:
num += int(math.pow(n % 10, 2))
n //= 10
n = num
return n == 1
def test(self):
print(self.isHappy(1))
print(self.isHappy(7))
print(self.isHappy(11))
print(self.isHappy(100))
if __name__ == '__main__':
Solution().test()
|
# Created by lujin at 6/3/2017
#
# 202. Happy Number
#
# Description:
#
# Write an algorithm to determine if a number is "happy".
#
# A happy number is a number defined by the following process: Starting with any positive integer,
# replace the number by the sum of the squares of its digits, and repeat the process until the number
# equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1.
# Those numbers for which this process ends in 1 are happy numbers.
#
# Example: 19 is a happy number
#
# 12 + 92 = 82
# 82 + 22 = 68
# 62 + 82 = 100
# 12 + 02 + 02 = 1
#
import math
class Solution(object):
def isHappy(self, n):
"""
找寻happy number,如果最后结果为1,则true,否则会进入循环,而且循环的数字是固定的那么几个...
:type n: int
:rtype: bool
"""
if n <= 0:
return False
loop = []
while n != 1:
num = 0
while n:
num += int(math.pow(n % 10, 2))
n //= 10
n = num
if n in loop:
break
else:
loop.append(num)
return n == 1
def isHappy(self, n):
"""
重复的数字中必定有4,所以可以直接判断,含有4,必定不是happy number
:param n:
:return:
"""
if n <= 0:
return False
while n != 1 and n != 4:
num = 0
while n:
num += int(math.pow(n % 10, 2))
n //= 10
n = num
return n == 1
def test(self):
print(self.isHappy(1))
print(self.isHappy(7))
print(self.isHappy(11))
print(self.isHappy(100))
if __name__ == '__main__':
Solution().test()
|
mit
|
Python
|
06e0499c5a83a15dafc8563579295b9a8b51f79f
|
Remove typing annotations for backwards compatibility
|
tbenthompson/cppimport,tbenthompson/cppimport,tbenthompson/cppimport
|
cppimport/find.py
|
cppimport/find.py
|
import os
import sys
import cppimport.config
def find_file_in_folders(filename, paths):
for d in paths:
if not os.path.exists(d):
continue
if os.path.isfile(d):
continue
for f in os.listdir(d):
if f == filename:
return os.path.join(d, f)
return None
def find_matching_path_dirs(moduledir):
if moduledir is '':
return sys.path
ds = []
for dir in sys.path:
test_path = os.path.join(dir, moduledir)
if os.path.exists(test_path) and os.path.isdir(test_path):
ds.append(test_path)
return ds
def find_module_cpppath(modulename):
modulepath_without_ext = modulename.replace('.', os.sep)
moduledir = os.path.dirname(modulepath_without_ext + '.throwaway')
matching_dirs = find_matching_path_dirs(moduledir)
matching_dirs = [os.getcwd() if d == '' else d for d in matching_dirs]
for ext in cppimport.config.file_exts:
modulefilename = os.path.basename(modulepath_without_ext + ext)
outfilename = find_file_in_folders(modulefilename, matching_dirs)
if outfilename is not None:
return outfilename
return None
def find_module_path(module_name, search_path=None):
"""
Find the module path (pyd / so), while accounting for platform/arch naming
:param module_name: The name of the module
:param search_path: The path to search in. If None, searches system path.
:return: The full path to the library or None if not found.
"""
# Use importlib if python 3.4+, else imp
if sys.version_info[0] > 3 or (sys.version_info[0] == 3 and sys.version_info[1] >= 4):
from importlib.machinery import FileFinder, ExtensionFileLoader, EXTENSION_SUFFIXES
file_finder = FileFinder(search_path, (ExtensionFileLoader, EXTENSION_SUFFIXES))
# The search caches must be cleared to guaranteed find dynamically created modules
file_finder.invalidate_caches()
result = file_finder.find_spec(module_name)
return None if not result else result.origin
else:
from imp import find_module # Deprecated in 3.4
try:
result = find_module(module_name, [search_path])
except ImportError:
result = None
return None if not result else result[1]
|
import os
import sys
import cppimport.config
def find_file_in_folders(filename, paths):
for d in paths:
if not os.path.exists(d):
continue
if os.path.isfile(d):
continue
for f in os.listdir(d):
if f == filename:
return os.path.join(d, f)
return None
def find_matching_path_dirs(moduledir):
if moduledir is '':
return sys.path
ds = []
for dir in sys.path:
test_path = os.path.join(dir, moduledir)
if os.path.exists(test_path) and os.path.isdir(test_path):
ds.append(test_path)
return ds
def find_module_cpppath(modulename):
modulepath_without_ext = modulename.replace('.', os.sep)
moduledir = os.path.dirname(modulepath_without_ext + '.throwaway')
matching_dirs = find_matching_path_dirs(moduledir)
matching_dirs = [os.getcwd() if d == '' else d for d in matching_dirs]
for ext in cppimport.config.file_exts:
modulefilename = os.path.basename(modulepath_without_ext + ext)
outfilename = find_file_in_folders(modulefilename, matching_dirs)
if outfilename is not None:
return outfilename
return None
def find_module_path(module_name: str, search_path: str = None) -> str:
"""
Find the module path (pyd / so), while accounting for platform/arch naming
:param module_name: The name of the module
:param search_path: The path to search in. If None, searches system path.
:return: The full path to the library or None if not found.
"""
# Use importlib if python 3.4+, else imp
if sys.version_info[0] > 3 or (sys.version_info[0] == 3 and sys.version_info[1] >= 4):
from importlib.machinery import FileFinder, ExtensionFileLoader, EXTENSION_SUFFIXES
file_finder = FileFinder(search_path, (ExtensionFileLoader, EXTENSION_SUFFIXES))
# The search caches must be cleared to guaranteed find dynamically created modules
file_finder.invalidate_caches()
result = file_finder.find_spec(module_name)
return None if not result else result.origin
else:
from imp import find_module # Deprecated in 3.4
try:
result = find_module(module_name, [search_path])
except ImportError:
result = None
return None if not result else result[1]
|
mit
|
Python
|
aa059d6f2fd45ec0d7de7bb2cd0477ff3552eb14
|
Handle TLS errors in receivequeuethread
|
hb9kns/PyBitmessage,hb9kns/PyBitmessage,hb9kns/PyBitmessage,hb9kns/PyBitmessage
|
src/network/receivequeuethread.py
|
src/network/receivequeuethread.py
|
import errno
import Queue
import socket
import sys
import threading
import time
import addresses
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.bmproto import BMProto
from queues import receiveDataQueue
import protocol
import state
class ReceiveQueueThread(threading.Thread, StoppableThread):
def __init__(self, num=0):
threading.Thread.__init__(self, name="ReceiveQueue_%i" %(num))
self.initStop()
self.name = "ReceiveQueue_%i" % (num)
logger.info("init receive queue thread %i", num)
def run(self):
while not self._stopped and state.shutdown == 0:
try:
dest = receiveDataQueue.get(block=True, timeout=1)
except Queue.Empty:
continue
if self._stopped:
break
# cycle as long as there is data
# methods should return False if there isn't enough data, or the connection is to be aborted
# state_* methods should return False if there isn't enough data,
# or the connection is to be aborted
try:
BMConnectionPool().getConnectionByAddr(dest).process()
# KeyError = connection object not found
# AttributeError = state isn't implemented
except (KeyError, AttributeError):
pass
except socket.error as err:
if err.errno == errno.EBADF:
BMConnectionPool().getConnectionByAddr(dest).set_state("close", 0)
else:
logger.error("Socket error: %s", str(err))
receiveDataQueue.task_done()
|
import Queue
import sys
import threading
import time
import addresses
from bmconfigparser import BMConfigParser
from debug import logger
from helper_threading import StoppableThread
from inventory import Inventory
from network.connectionpool import BMConnectionPool
from network.bmproto import BMProto
from queues import receiveDataQueue
import protocol
import state
class ReceiveQueueThread(threading.Thread, StoppableThread):
def __init__(self, num=0):
threading.Thread.__init__(self, name="ReceiveQueue_%i" %(num))
self.initStop()
self.name = "ReceiveQueue_%i" % (num)
logger.info("init receive queue thread %i", num)
def run(self):
while not self._stopped and state.shutdown == 0:
try:
dest = receiveDataQueue.get(block=True, timeout=1)
except Queue.Empty:
continue
if self._stopped:
break
# cycle as long as there is data
# methods should return False if there isn't enough data, or the connection is to be aborted
# state_* methods should return False if there isn't enough data,
# or the connection is to be aborted
try:
BMConnectionPool().getConnectionByAddr(dest).process()
# KeyError = connection object not found
# AttributeError = state isn't implemented
except (KeyError, AttributeError):
pass
receiveDataQueue.task_done()
|
mit
|
Python
|
e968983001cced5391a163ab282ef2f2ded492f6
|
Remove link to private document.
|
ScatterHQ/eliot,ClusterHQ/eliot,ScatterHQ/eliot,iffy/eliot,ScatterHQ/eliot
|
eliot/__init__.py
|
eliot/__init__.py
|
"""
Eliot: Logging as Storytelling
Suppose we turn from outside estimates of a man, to wonder, with keener
interest, what is the report of his own consciousness about his doings or
capacity: with what hindrances he is carrying on his daily labors; what
fading of hopes, or what deeper fixity of self-delusion the years are
marking off within him; and with what spirit he wrestles against universal
pressure, which will one day be too heavy for him, and bring his heart to
its final pause.
-- George Eliot, "Middlemarch"
"""
# Expose the public API:
from ._message import Message
from ._action import startAction, startTask, Action
from ._output import ILogger, Logger, MemoryLogger
from ._validation import Field, MessageType, ActionType
from ._traceback import writeTraceback, writeFailure
addDestination = Logger._destinations.add
removeDestination = Logger._destinations.remove
__all__ = ["Message", "writeTraceback", "writeFailure",
"startAction", "startTask", "Action",
"Field", "MessageType", "ActionType",
"ILogger", "Logger", "MemoryLogger", "addDestination",
"removeDestination",
]
|
"""
Eliot: An Opinionated Logging Library
Suppose we turn from outside estimates of a man, to wonder, with keener
interest, what is the report of his own consciousness about his doings or
capacity: with what hindrances he is carrying on his daily labors; what
fading of hopes, or what deeper fixity of self-delusion the years are
marking off within him; and with what spirit he wrestles against universal
pressure, which will one day be too heavy for him, and bring his heart to
its final pause.
-- George Eliot, "Middlemarch"
See http://wiki.hybrid-cluster.com/index.php?title=Logging_Design_Document for
motivation.
"""
# Expose the public API:
from ._message import Message
from ._action import startAction, startTask, Action
from ._output import ILogger, Logger, MemoryLogger
from ._validation import Field, MessageType, ActionType
from ._traceback import writeTraceback, writeFailure
addDestination = Logger._destinations.add
removeDestination = Logger._destinations.remove
__all__ = ["Message", "writeTraceback", "writeFailure",
"startAction", "startTask", "Action",
"Field", "MessageType", "ActionType",
"ILogger", "Logger", "MemoryLogger", "addDestination",
"removeDestination",
]
|
apache-2.0
|
Python
|
f1a84b0dc2d14b25b7d6e2e3f2f2d68002600d1a
|
Update routes
|
helix90/proptrunk,helix90/proptrunk
|
proptrunk/proptrunk.py
|
proptrunk/proptrunk.py
|
from flask import Flask, request, render_template
app = Flask(__name__)
@app.route('/', methods=['GET'])
def hello_world():
return render_template('templates/index.html'. title='Prop Trunk')
@app.route('/inventory', methods=['GET'])
def get_inventory():
return 'Inventory'
@app.route('/inventory', methods=['POST'])
def post_inventory():
# Process post body (JSON)
# Insert
# return object id
return 'Inventory'
@app.route('/checkout')
def checkout():
return 'Checkout'
@app.route('/checkin')
def checkin():
return 'Checkin'
@app.route('/users')
def user():
return 'Users'
@app.route('/reports')
def reports():
return 'Repoert'
@app.errorhandler(404)
def not_found(error):
return render_template('error.html'), 404
if __name__ == '__main__':
app.run()
|
from flask import Flask, request, render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('templates/index.html'. title='Prop Trunk')
@app.route('/inventory')
def inventory():
return 'Inventory'
@app.route('/checkout')
def checkout():
return 'Checkout'
@app.route('/checkin')
def checkin():
return 'Checkin'
@app.route('/users')
def user():
return 'Users'
@app.route('/reports')
def reports():
return 'Repoert'
@app.errorhandler(404)
def not_found(error):
return render_template('error.html'), 404
if __name__ == '__main__':
app.run()
|
apache-2.0
|
Python
|
cbe84c3220a3da4db36f10d84c63dc08231b3b9f
|
fix indentation
|
evansde77/cirrus,evansde77/cirrus,evansde77/cirrus
|
src/cirrus/plugins/uploaders/fabric_put.py
|
src/cirrus/plugins/uploaders/fabric_put.py
|
#!/usr/bin/env python
"""
_fabric_put_
Uploader plugin that uses fabric to do a remote put
"""
from fabric.operations import put
from cirrus.fabric_helpers import FabricHelper
from cirrus.logger import get_logger
from cirrus.upload_plugins import Uploader
from cirrus.configuration import get_pypi_auth
LOGGER = get_logger()
class Pypi(Uploader):
PLUGGAGE_OBJECT_NAME = 'fabric'
def upload(self, opts, build_artifact):
"""
upload to pypi via fabric over ssh
"""
pypi_conf = self.package_conf.pypi_config()
pypi_auth = get_pypi_auth()
if opts.pypi_url:
pypi_url = opts.pypi_url
else:
pypi_url = pypi_conf['pypi_url']
if pypi_auth['ssh_username'] is not None:
pypi_user = pypi_auth['ssh_username']
else:
pypi_user = pypi_auth['username']
package_dir = pypi_conf['pypi_upload_path']
LOGGER.info("Uploading {0} to {1}".format(build_artifact, pypi_url))
with FabricHelper(pypi_url, pypi_user, pypi_auth['ssh_key']):
# fabric put the file onto the pypi server
put(build_artifact, package_dir, use_sudo=opts.pypi_sudo)
|
#!/usr/bin/env python
"""
_fabric_put_
Uploader plugin that uses fabric to do a remote put
"""
from fabric.operations import put
from cirrus.fabric_helpers import FabricHelper
from cirrus.logger import get_logger
from cirrus.upload_plugins import Uploader
from cirrus.configuration import get_pypi_auth
LOGGER = get_logger()
class Pypi(Uploader):
PLUGGAGE_OBJECT_NAME = 'fabric'
def upload(self, opts, build_artifact):
"""
upload to pypi via fabric over ssh
"""
pypi_conf = self.package_conf.pypi_config()
pypi_auth = get_pypi_auth()
if opts.pypi_url:
pypi_url = opts.pypi_url
else:
pypi_url = pypi_conf['pypi_url']
if pypi_auth['ssh_username'] is not None:
pypi_user = pypi_auth['ssh_username']
else:
pypi_user = pypi_auth['username']
package_dir = pypi_conf['pypi_upload_path']
LOGGER.info("Uploading {0} to {1}".format(build_artifact, pypi_url))
with FabricHelper(
pypi_url,
pypi_user,
pypi_auth['ssh_key']):
# fabric put the file onto the pypi server
put(build_artifact, package_dir, use_sudo=opts.pypi_sudo)
|
apache-2.0
|
Python
|
3b053c1f1c490ee9426bf65c5032e24e9f184568
|
Increase delay
|
bootstraponline/scrapy-broken-links
|
broken_links/broken_links/settings.py
|
broken_links/broken_links/settings.py
|
# -*- coding: utf-8 -*-
# Scrapy settings for broken_links project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'broken_links'
SPIDER_MODULES = ['broken_links.spiders']
NEWSPIDER_MODULE = 'broken_links.spiders'
# Custom useragent to enable easy server side monitoring
USER_AGENT = "scrapy_link_spider"
# Allow saving non-200 status codes.
HTTPERROR_ALLOW_ALL = True
# Make sure the crawler doesn't cause problems for the servers.
# http://doc.scrapy.org/en/latest/topics/autothrottle.html
AUTOTHROTTLE_ENABLED = True
AUTOTHROTTLE_START_DELAY = 20 # default is 5
AUTOTHROTTLE_MAX_DELAY = 10 * 60 # default is 60
|
# -*- coding: utf-8 -*-
# Scrapy settings for broken_links project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'broken_links'
SPIDER_MODULES = ['broken_links.spiders']
NEWSPIDER_MODULE = 'broken_links.spiders'
# Custom useragent to enable easy server side monitoring
USER_AGENT = "scrapy_link_spider"
# Allow saving non-200 status codes.
HTTPERROR_ALLOW_ALL = True
# Make sure the crawler doesn't cause problems for the servers.
# http://doc.scrapy.org/en/latest/topics/autothrottle.html
AUTOTHROTTLE_ENABLED = True
AUTOTHROTTLE_START_DELAY = 10.0 # default is 5
AUTOTHROTTLE_MAX_DELAY = 120.0 # default is 60
|
apache-2.0
|
Python
|
5bda2697254744dc42420751c1b49219f65be380
|
add head 'animation'
|
duarte-pompeu/snake,duarte-pompeu/snake
|
src/snake.py
|
src/snake.py
|
#!/usr/bin/python
from lib_common import *
class Snake:
def __init__(self, x=0, y=0, size=0):
self.position = Vec2(x,y)
self.speed = Vec2(0,0)
self.body_parts = list()
self.body_parts.append(self.position)
self.turning_points = list()
self.turning_points.append(self.position)
for i in xrange(1, size+1):
self.body_parts.append(Vec2(x-i, y))
def turn(self, x,y):
self.speed.x = x
self.speed.y = y
def update(self):
for part in self.body_parts:
part.add(self.speed)
def size(self):
return len(self.body_parts)
def draw(self):
head_symbol = self.get_head_symbol()
draw_cur(self.getX(), self.getY(), head_symbol)
for piece in self.body_parts[1:]:
draw_cur(piece.x, piece.y, "=")
def get_head_symbol(self):
speed_x = self.getSpeedX()
speed_y = self.getSpeedY()
if speed_x > 0:
return ">"
elif speed_x < 0:
return "<"
elif speed_y > 0:
return "^"
elif speed_y < 0:
return "V"
def toString(self):
msg = "SNAKE size: %d\n" % self.size()
msg += "pos: (%s) $ speed (%s)" %(self.position.toString(), self.speed.toString())
return msg
def getX(self):
return self.position.x
def getY(self):
return self.position.y
def getSpeedX(self):
return self.speed.x
def getSpeedY(self):
return self.speed.y
|
#!/usr/bin/python
from lib_common import *
class Snake:
def __init__(self, x=0, y=0, size=0):
self.position = Vec2(x,y)
self.speed = Vec2(0,0)
self.body_parts = list()
self.body_parts.append(self.position)
self.turning_points = list()
self.turning_points.append(self.position)
for i in xrange(1, size+1):
self.body_parts.append(Vec2(x-i, y))
def turn(self, x,y):
self.speed.x = x
self.speed.y = y
def update(self):
for part in self.body_parts:
part.add(self.speed)
def size(self):
return len(self.body_parts)
def toString(self):
msg = "SNAKE size: %d\n" % self.size()
msg += "pos: (%s) $ speed (%s)" %(self.position.toString(), self.speed.toString())
def draw(self):
draw_cur(self.getX(), self.getY(), "$")
for piece in self.body_parts[1:]:
draw_cur(piece.x, piece.y, "=")
def getX(self):
return self.position.x
def getY(self):
return self.position.y
return msg
|
mit
|
Python
|
075877c728e2db439b53a894fc66dff7d35922ba
|
Add search in landing
|
juanpflores94/Hodor,juanpflores94/Hodor,juanpflores94/Hodor
|
cultures/views.py
|
cultures/views.py
|
from django.shortcuts import get_object_or_404
from django.db.models import Q
from django.views.generic import(
ListView,
DetailView,
TemplateView
)
from .models import (
Culture,
God,
Temple,
Museum,
CultureHasPeriod,
)
class LandingView(ListView):
template_name = 'prueba.html'
model = Culture
def get_queryset(self):
queryset = self.queryset
q = self.request.GET.get('q', None)
if q is not None:
queryset = Culture.objects.filter(
Q(name__icontains=q) | Q(summary__icontains=q) |
Q(religion__name__icontains=q) | Q(religion__description__icontains=q) | # NOQA
Q(region__name__icontains=q) | Q(region__country__name__icontains=q) | # NOQA
Q(region__description__icontains=q)
)
return queryset
else:
return queryset
class AdventureView(DetailView):
context_object_name = 'culture'
template_name = 'landing-maya.html'
model = Culture
def get_object(self):
culture = get_object_or_404(Culture, name__icontains=self.kwargs['name']) # NOQA
return culture
def get_context_data(self, **kwargs):
context = super(AdventureView, self).get_context_data(**kwargs)
context['gods'] = God.objects.filter(culture=self.get_object().pk)
context['temples'] = Temple.objects.filter(culture=self.get_object().pk) # NOQA
context['museums'] = Museum.objects.filter(cultures=self.get_object().pk) # NOQA
context['periods'] = CultureHasPeriod.objects.filter(culture=self.get_object().pk).order_by('pk') # NOQA
return context
|
from django.shortcuts import get_object_or_404
from django.views.generic import(
ListView,
DetailView,
TemplateView
)
from .models import (
Culture,
God,
Temple,
Museum,
CultureHasPeriod,
)
class LandingView(ListView):
template_name = 'prueba.html'
model = Culture
class AdventureView(DetailView):
context_object_name = 'culture'
template_name = 'landing-maya.html'
model = Culture
def get_object(self):
culture = get_object_or_404(Culture, name__icontains=self.kwargs['name']) # NOQA
return culture
def get_context_data(self, **kwargs):
context = super(AdventureView, self).get_context_data(**kwargs)
context['gods'] = God.objects.filter(culture=self.get_object().pk)
context['temples'] = Temple.objects.filter(culture=self.get_object().pk) # NOQA
context['museums'] = Museum.objects.filter(cultures=self.get_object().pk) # NOQA
context['periods'] = CultureHasPeriod.objects.filter(culture=self.get_object().pk).order_by('pk') # NOQA
return context
|
apache-2.0
|
Python
|
446fe02ce407e3d14c1dddd5e58c7b3f54b2ccf9
|
delete functions
|
piraaa/VideoDigitalWatermarking
|
src/video.py
|
src/video.py
|
#
# video.py
# Created by pira on 2017/07/31.
#
#coding: utf-8
u"""For video processing."""
import numpy as np
import cv2
def video2image(filename, n=0):
u"""Read mpeg video and divide into jpeg images.
@param filename:video filename
@param n :number of export images (if n=0, this function exports all images in video.)
@return count :number of exported images
"""
count = 1
fnin = filename[:filename.rfind('.')] #拡張子をとったファイル名を取得
cap = cv2.VideoCapture(filename)
if n == 0:
n = int(cap.get(7)) #CV_CAP_PROP_FRAME_COUNT
fps = round(cap.get(5)) #CV_CAP_PROP_FPS
height = int(cap.get(4)) #CV_CAP_PROP_FRAME_HEIGHT
width = int(cap.get(3)) #CV_CAP_PROP_FRAME_WIDTH
print('frame num =', n)
print('fps =', fps)
print('hright =', height)
print('width =', width, '\n')
for i in np.arange(n):
count = i+1
fnout = '%06d' % count
fnout = fnin + fnout + '.jpg'
ret, frame = cap.read()
cv2.imwrite(fnout, frame)
print('Export', count, 'jpeg Images.')
return count
#filename = 'test.mov'
#n = video2image(filename, )
#print(n)
|
#
# video.py
# Created by pira on 2017/07/31.
#
#coding: utf-8
u"""For video processing."""
import numpy as np
import cv2
def showVideo(filename):
pass
def video2image(filename, n=0):
u"""Read mpeg video and divide into jpeg images.
@param filename:video filename
@param n :number of export images (if n=0, this function exports all images in video.)
@return count :number of exported images
"""
count = 1
fnin = filename[:filename.rfind('.')] #拡張子をとったファイル名を取得
cap = cv2.VideoCapture(filename)
if n == 0:
n = int(cap.get(7)) #CV_CAP_PROP_FRAME_COUNT
fps = round(cap.get(5)) #CV_CAP_PROP_FPS
height = int(cap.get(4)) #CV_CAP_PROP_FRAME_HEIGHT
width = int(cap.get(3)) #CV_CAP_PROP_FRAME_WIDTH
print('frame num =', n)
print('fps =', fps)
print('hright =', height)
print('width =', width, '\n')
for i in np.arange(n):
count = i+1
fnout = '%06d' % count
fnout = fnin + fnout + '.jpg'
ret, frame = cap.read()
cv2.imwrite(fnout, frame)
print('Export', count, 'jpeg Images.')
return count
def image2video():
pass
#filename = 'test.mov'
#n = video2image(filename, )
#print(n)
|
mit
|
Python
|
f43450ac6ecff870693bf3fcb0d638fcf334d492
|
Update template file.
|
vikash-india/ProgrammingProblems,vikash-india/ProgrammingProblems
|
src/project_euler/P000_Template.py
|
src/project_euler/P000_Template.py
|
# Description: Write Here
import logging
"""
Technique
- Write Here
Note
- Write Here
Instrumentation
- System Details: 8x Intel Core i7-3630QM CPU @ 2.40GHz, 16GB RAM, Ubuntu 14.04
- Input Details: Write Here
- Time for 100 runs: Minimum - NA sec, Average - NA sec, Maximum NA sec
- Write Here
"""
def function_name():
pass
# Main
def main():
"""Main function to test the above implementation. """
# Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL
logging.basicConfig(level=logging.DEBUG)
result = function_name()
print 'Write Here {0} '.format(result)
# Call Main
main()
|
# Description: Write Here
"""
Technique
- Write Here
Note
- Write Here
Instrumentation
- System Details: 8x Intel Core i7-3630QM CPU @ 2.40GHz, 16GB RAM, Ubuntu 14.04
- Input Details: Write Here
- Time for 100 runs: Minimum - NA sec, Average - NA sec, Maximum NA sec
- Write Here
"""
def function_name():
pass
# Main
def main():
"""Main function to test the above implementation. """
result = function_name()
print 'Write Here {0} '.format(result)
# Call Main
main()
|
mit
|
Python
|
1ae42227d9df745420c1a3db11893589d91ba83e
|
Add search and search_auto_paging_iter abstract methods (#873)
|
stripe/stripe-python
|
stripe/api_resources/abstract/searchable_api_resource.py
|
stripe/api_resources/abstract/searchable_api_resource.py
|
from __future__ import absolute_import, division, print_function
from stripe.api_resources.abstract.api_resource import APIResource
class SearchableAPIResource(APIResource):
@classmethod
def _search(
cls,
search_url,
api_key=None,
stripe_version=None,
stripe_account=None,
**params
):
return cls._static_request(
"get",
search_url,
api_key=api_key,
stripe_version=stripe_version,
stripe_account=stripe_account,
params=params,
)
@classmethod
def search(cls, *args, **kwargs):
raise NotImplementedError
@classmethod
def search_auto_paging_iter(cls, *args, **kwargs):
raise NotImplementedError
|
from __future__ import absolute_import, division, print_function
from stripe.api_resources.abstract.api_resource import APIResource
class SearchableAPIResource(APIResource):
@classmethod
def _search(
cls,
search_url,
api_key=None,
stripe_version=None,
stripe_account=None,
**params
):
return cls._static_request(
"get",
search_url,
api_key=api_key,
stripe_version=stripe_version,
stripe_account=stripe_account,
params=params,
)
|
mit
|
Python
|
5f7bf4f2e885c2343b89bc180b7aaad634b3a011
|
Remove TestConnectRemote decorator for FreeBSD
|
apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb
|
test/functionalities/connect_remote/TestConnectRemote.py
|
test/functionalities/connect_remote/TestConnectRemote.py
|
"""
Test lldb 'process connect' command.
"""
import os
import unittest2
import lldb
import pexpect
from lldbtest import *
class ConnectRemoteTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_connect_remote(self):
"""Test "process connect connect:://localhost:12345"."""
# First, we'll start a fake debugserver (a simple echo server).
fakeserver = pexpect.spawn('./EchoServer.py')
# Turn on logging for what the child sends back.
if self.TraceOn():
fakeserver.logfile_read = sys.stdout
# Schedule the fake debugserver to be shutting down during teardown.
def shutdown_fakeserver():
fakeserver.close()
self.addTearDownHook(shutdown_fakeserver)
# Wait until we receive the server ready message before continuing.
fakeserver.expect_exact('Listening on localhost:12345')
# Connect to the fake server....
self.runCmd("process connect -p gdb-remote connect://localhost:12345")
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
Test lldb 'process connect' command.
"""
import os
import unittest2
import lldb
import pexpect
from lldbtest import *
class ConnectRemoteTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureFreeBSD('llvm.org/pr18313')
def test_connect_remote(self):
"""Test "process connect connect:://localhost:12345"."""
# First, we'll start a fake debugserver (a simple echo server).
fakeserver = pexpect.spawn('./EchoServer.py')
# Turn on logging for what the child sends back.
if self.TraceOn():
fakeserver.logfile_read = sys.stdout
# Schedule the fake debugserver to be shutting down during teardown.
def shutdown_fakeserver():
fakeserver.close()
self.addTearDownHook(shutdown_fakeserver)
# Wait until we receive the server ready message before continuing.
fakeserver.expect_exact('Listening on localhost:12345')
# Connect to the fake server....
self.runCmd("process connect -p gdb-remote connect://localhost:12345")
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
apache-2.0
|
Python
|
2c967d42f141f744e0d84c552dd66ea8e3d75db1
|
fix end timestep for flows
|
jdherman/stockflow,wolfd/stockflow
|
stockflow.py
|
stockflow.py
|
import numpy as np
import collections
class simulation:
def __init__(self, tstep):
self.stocks = {}
self.flows = collections.OrderedDict()
self.tstep = tstep
def __getattr__(self,key):
return self.stocks[key] if key in self.stocks else self.flows[key]['vals']
def validate_key(self,key):
if key in self.stocks or key in self.flows:
raise NameError("Variable " + key + " already defined.")
def init_stocks(self,icdict):
for k,v in icdict.items():
self.stock(k,v)
def stock(self, key, IC):
self.validate_key(key)
self.stocks[key] = np.full((self.tstep,), IC) # init time series of stock
def flow(self, key, f, start=None, end=None):
self.validate_key(key)
self.flows[key] = {'start': start, 'end': end, 'f': f, 'vals': np.zeros((self.tstep,))}
def run(self):
for t in xrange(1,self.tstep):
for stock in self.stocks.itervalues(): # initialize stocks at prior values
stock[t] = stock[t-1]
for flow in self.flows.itervalues(): # calculate flows only once. distribute to stocks.
flow['vals'][t-1] = flow['f'](t-1)
if flow['start'] is not None:
self.stocks[flow['start']][t] -= flow['vals'][t-1]
if flow['end'] is not None:
self.stocks[flow['end']][t] += flow['vals'][t-1]
for flow in self.flows.itervalues(): # calculate flows at final timestep
flow['vals'][self.tstep-1] = flow['f'](self.tstep-1)
|
import numpy as np
class simulation:
def __init__(self, tstep):
self.stocks = {}
self.flows = {}
self.tstep = tstep
def __getattr__(self,key):
return self.stocks[key] if key in self.stocks else self.flows[key]['vals']
def validate_key(self,key):
if key in self.stocks or key in self.flows:
raise NameError("Variable " + key + " already defined.")
def init_stocks(self,icdict):
for k,v in icdict.items():
self.stock(k,v)
def stock(self, key, IC):
self.validate_key(key)
self.stocks[key] = np.full((self.tstep,), IC) # init time series of stock
def flow(self, key, f, start=None, end=None):
self.validate_key(key)
self.flows[key] = {'start': start, 'end': end, 'f': f, 'vals': np.zeros((self.tstep,))}
def run(self):
for t in xrange(1,self.tstep):
for stock in self.stocks.itervalues(): # initialize stocks at prior values
stock[t] = stock[t-1]
for flow in self.flows.itervalues(): # calculate flows only once. distribute to stocks.
flow['vals'][t-1] = flow['f'](t-1)
if flow['start'] is not None:
self.stocks[flow['start']][t] -= flow['vals'][t-1]
if flow['end'] is not None:
self.stocks[flow['end']][t] += flow['vals'][t-1]
|
mit
|
Python
|
4791a4a97200a62c195931475c1ee3d7bb3731f8
|
Add Table to reverb/__init__.py.
|
deepmind/reverb,deepmind/reverb,deepmind/reverb,deepmind/reverb
|
reverb/__init__.py
|
reverb/__init__.py
|
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reverb."""
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
from reverb.platform.default import ensure_tf_install
ensure_tf_install.ensure_tf_version()
# Cleanup symbols to avoid polluting namespace.
del ensure_tf_install
# pylint: enable=g-bad-import-order
from reverb import distributions
from reverb import item_selectors as selectors
from reverb import rate_limiters
from reverb.client import Client
from reverb.client import Writer
from reverb.errors import ReverbError
# TODO(b/157210924): Rename TimeoutError.
from reverb.errors import TimeoutError # pylint: disable=redefined-builtin
from reverb.replay_sample import ReplaySample
from reverb.replay_sample import SampleInfo
from reverb.server import PriorityTable
from reverb.server import Server
from reverb.server import Table
from reverb.tf_client import ReplayDataset
from reverb.tf_client import TFClient
|
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reverb."""
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
from reverb.platform.default import ensure_tf_install
ensure_tf_install.ensure_tf_version()
# Cleanup symbols to avoid polluting namespace.
del ensure_tf_install
# pylint: enable=g-bad-import-order
from reverb import distributions
from reverb import item_selectors as selectors
from reverb import rate_limiters
from reverb.client import Client
from reverb.client import Writer
from reverb.errors import ReverbError
from reverb.errors import TimeoutError
from reverb.replay_sample import ReplaySample
from reverb.replay_sample import SampleInfo
from reverb.server import PriorityTable
from reverb.server import Server
from reverb.tf_client import ReplayDataset
from reverb.tf_client import TFClient
|
apache-2.0
|
Python
|
f82f41e8c14d665eef6dedc94dfef84054970e9d
|
modify how we define trusted root basedir
|
onelab-eu/sfa,yippeecw/sfa,onelab-eu/sfa,yippeecw/sfa,yippeecw/sfa,onelab-eu/sfa
|
geni/util/trustedroot.py
|
geni/util/trustedroot.py
|
import os
from gid import *
from geni.util.config import Config
class TrustedRootList():
def __init__(self, dir=None):
if not dir:
config = Config()
self.basedir = config.path + os.sep + 'trusted_roots'
else:
self.basedir = dir
# create the directory to hold the files
try:
os.makedirs(self.basedir)
# if the path already exists then pass
except OSError, (errno, strerr):
if errno == 17:
pass
def add_gid(self, gid):
fn = os.path.join(self.basedir, gid.get_hrn() + ".gid")
gid.save_to_file(fn)
def get_list(self):
gid_list = []
file_list = os.listdir(self.basedir)
for gid_file in file_list:
fn = os.path.join(self.basedir, gid_file)
if os.path.isfile(fn):
gid = GID(filename = fn)
gid_list.append(gid)
return gid_list
|
import os
from gid import *
class TrustedRootList():
def __init__(self, dir="./trusted_roots"):
self.basedir = dir
# create the directory to hold the files
try:
os.makedirs(self.basedir)
# if the path already exists then pass
except OSError, (errno, strerr):
if errno == 17:
pass
def add_gid(self, gid):
fn = os.path.join(self.basedir, gid.get_hrn() + ".gid")
gid.save_to_file(fn)
def get_list(self):
gid_list = []
file_list = os.listdir(self.basedir)
for gid_file in file_list:
fn = os.path.join(self.basedir, gid_file)
if os.path.isfile(fn):
gid = GID(filename = fn)
gid_list.append(gid)
return gid_list
|
mit
|
Python
|
6168b224249f68170f4c41eaff83e9a9dd379bfb
|
remove add_bindings method
|
jkpl/kaarmebot
|
kaarmebot/dispatcher.py
|
kaarmebot/dispatcher.py
|
import collections
Message = collections.namedtuple('Message', ['source', 'target', 'contents'])
class MessageDispatcher:
def __init__(self):
self.routing_classes = {}
def add_binding(self, routing_class, predicate, handler):
rc = self.routing_classes.get(routing_class)
if rc:
rc.append((predicate, handler))
else:
self.routing_classes[routing_class] = [(predicate, handler)]
def remove_binding(self, routing_class, predicate, handler):
rc = self.routing_classes.get(routing_class)
if rc:
rc.remove((predicate, handler))
def get_handlers_for_message(self, message):
rc = self.routing_classes.get(message.__class__)
if rc:
for predicate, handler in rc:
if predicate(message):
yield handler
def dispatch(self, message):
handler_generator = self.get_handlers_for_message(message)
return [handler(message) for handler in handler_generator]
|
import collections
Message = collections.namedtuple('Message', ['source', 'target', 'contents'])
class MessageDispatcher:
def __init__(self):
self.routing_classes = {}
def add_binding(self, routing_class, predicate, handler):
rc = self.routing_classes.get(routing_class)
if rc:
rc.append((predicate, handler))
else:
self.routing_classes[routing_class] = [(predicate, handler)]
def add_bindings(self, *bindings):
for binding in bindings:
self.add_binding(*binding)
def remove_binding(self, routing_class, predicate, handler):
rc = self.routing_classes.get(routing_class)
if rc:
rc.remove((predicate, handler))
def get_handlers_for_message(self, message):
rc = self.routing_classes.get(message.__class__)
if rc:
for predicate, handler in rc:
if predicate(message):
yield handler
def dispatch(self, message):
handler_generator = self.get_handlers_for_message(message)
return [handler(message) for handler in handler_generator]
|
bsd-2-clause
|
Python
|
98f33028451122b39c49a89d367a406dfd641dc0
|
use PID class
|
smaria/autonomous-sailing-robot,Southampton-Maritime-Robotics/autonomous-sailing-robot
|
src/boat_pid_control/src/boat_pid_control/rudderPID.py
|
src/boat_pid_control/src/boat_pid_control/rudderPID.py
|
"""
PID control for the sailing robot
controling sail position
based on goal sail direction
Inputs:
- current heading
- goal heading
Output:
- Change in motor position/motor position
TODO:
consider tack and jibe
"""
import rospy
from pid_controller_class import PID
PROPORTIONAL_GAIN = 0.1
INTEGRAL_GAIN = 0
DERIVATIVE_GAIN = 0
INTEGRAL_LIMIT = 1
controller = PID(PROPORTIONAL_GAIN, INTEGRAL_GAIN, DERIVATIVE_GAIN, INTEGRAL_LIMIT, -INTEGRAL_LIMIT)
currentHeading = 23
goalHeading = 35
def get_pid(currentHeading, goalHeading):
# with new ROS input for goal or current heading
# TODO Error calculation for angular error!
error = currentHeading - goalHeading
correction = controller.update_PID(error)
rudder_position = 2
#translate correction to servo change ...
return rudder_position
|
"""
PID control for the sailing robot
controling sail position
based on goal sail direction
Inputs:
- current heading
- goal heading
Output:
- Change in motor position/motor position
TODO:
consider tack and jibe
"""
import rospy
PROPORTIONAL_GAIN = 0.1
INTEGRAL_GAIN = 0
DERIVATIVE_GAIN = 0
currentHeading = 23
goalHeading = 35
def get_pid(currentHeading, goalHeading):
# with new ROS input for goal or current heading
# Error calculation for angular error!
error = currentHeading - goalHeading
p = error * PROPORTIONAL_GAIN
i = 0
d = 0
correction = p + i + d
rudder_position = 2
#translate correction to servo change ...
return rudder_position
|
bsd-2-clause
|
Python
|
ce51f1c3829ac44e67c51a8fc97577f5f897d9e7
|
bump version
|
collab-project/django-require-i18n,collab-project/django-require-i18n
|
require_i18n/__init__.py
|
require_i18n/__init__.py
|
# Copyright Collab 2015
# shortcuts
from .util import extract_tower_json
# version information
__version__ = (1, 0, 0, 'b2')
#: For example: `2.0.0`
short_version = '.'.join([str(x) for x in __version__[:3]])
#: For example: `2.0.0a1`
version = '{}{}'.format('.'.join([str(x) for x in __version__[:-1]]),
__version__[-1])
|
# Copyright Collab 2015
# shortcuts
from .util import extract_tower_json
# version information
__version__ = (1, 0, 0, 'b1')
#: For example: `2.0.0`
short_version = '.'.join([str(x) for x in __version__[:3]])
#: For example: `2.0.0a1`
version = '{}{}'.format('.'.join([str(x) for x in __version__[:-1]]),
__version__[-1])
|
mit
|
Python
|
4d46001296ad083df6827a9c97333f0f093f31bd
|
Document new evil magic, and add required var.
|
decklin/ennepe
|
example/config.py
|
example/config.py
|
# Mnemosyne configuration
# =======================
#
# This file is a Python script. When run, the following variables will be
# defined for you; you may change or add to them as you see fit.
#
# * ``entries_dir``: a Maildir containing all the blog entries.
# * ``layout_dir``: the blog's layout, as a skeleton directory tree.
# * ``style_dir``: empy styles used for filling layout templates.
# * ``output_dir``: location where we will write the generated pages.
#
# These will be $HOME/Mnemosyne/{entries,layout,style,htdocs} respectively.
#
# * ``locals``: a dict of default local variables passed to all templates.
#
# This will contain the keys __version__, __url__, __author__, and __email__.
#
# * ``MnemosyneEntry``: a class used to represent each entry passed to the
# templates.
#
# If you wish to extend this class, you may define a new class ``Entry`` here,
# using ``MnemosyneEntry`` as its base class. Any methods with a name of the
# form ``get_ATTRIBUTE`` will be used to provide e.ATTRIBUTE at runtime.
locals['blogname'] = 'Example Blog'
locals['base'] = 'http://example.invalid'
class Entry:
def get_organization(self):
return self.m.get('Organization')
|
# Mnemosyne configuration
# =======================
#
# This file is a Python script. When run, the following variables will be
# defined for you; you may change or add to them as you see fit.
#
# ``entries_dir``: a Maildir containing all the blog entries.
# ``layout_dir``: the blog's layout, as a skeleton directory tree.
# ``style_dir``: empy styles used for filling layout templates.
# ``output_dir``: location where we will write the generated pages.
#
# These will be $HOME/Mnemosyne/{entries,layout,style,htdocs} respectively.
#
# ``vars``: a dict of default local variables passed to all templates.
#
# This will contain the keys __version__, __url__, __author__, and __email__.
#
# You may also define functions here to add 'magic' attributes to each entry.
# A function with a name of the form ``make_MAGIC`` (which takes a single
# argument, the entry) will be used to create an attribute ``e._MAGIC`` for
# each entry ``e``. Either a single value or a list of values may be returned.
#
# In your layout, a file or directory name containing ``__MAGIC__`` will then
# be evaluated once for each value ``make_MAGIC`` returns, with the entries
# for which ``make_MAGIC`` returns that value or a list containing it.
vars['blogname'] = 'Example Blog'
class Entry:
def get_organization(self):
return self.m.get('Organization')
|
isc
|
Python
|
f818b7b8e23bae7d9ceb359ff9a0a3264c8ba69f
|
Remove unused DATABASE_URL env variable
|
paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms
|
example_config.py
|
example_config.py
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'REPO_OWNER', 'REPO_NAME',
'REPO_OWNER_ACCESS_TOKEN', 'REDISCLOUD_URL',
'REDIS_URL', 'MAILCHIMP_API_KEY',
'MAILCHIMP_LIST_ID', 'MAILCHIMP_STACKS_GROUP_NAME',
'SECONDARY_REPO_OWNER', 'SECONDARY_REPO_NAME',
'BASE_URL', 'CELERY_BROKER_URL',
'CELERY_TASK_SERIALIZER')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
CELERY_TASK_SERIALIZER = 'json'
CELERY_BROKER_URL = None
# Secondary (optional) repo for articles that are not editable
SECONDARY_REPO_OWNER = None
SECONDARY_REPO_NAME = None
# For caching
REDISCLOUD_URL = None
# For celery
REDIS_URL = None
MAILCHIMP_API_KEY = None
MAILCHIMP_LIST_ID = None
MAILCHIMP_STACKS_GROUP_NAME = None
class DevelopmentConfig(Config):
DEBUG = True
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'REPO_OWNER', 'REPO_NAME',
'REPO_OWNER_ACCESS_TOKEN', 'REDISCLOUD_URL',
'REDIS_URL', 'MAILCHIMP_API_KEY',
'MAILCHIMP_LIST_ID', 'MAILCHIMP_STACKS_GROUP_NAME',
'SECONDARY_REPO_OWNER', 'SECONDARY_REPO_NAME',
'BASE_URL', 'CELERY_BROKER_URL',
'CELERY_TASK_SERIALIZER')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
CELERY_TASK_SERIALIZER = 'json'
CELERY_BROKER_URL = None
# Secondary (optional) repo for articles that are not editable
SECONDARY_REPO_OWNER = None
SECONDARY_REPO_NAME = None
# For caching
REDISCLOUD_URL = None
# For celery
REDIS_URL = None
MAILCHIMP_API_KEY = None
MAILCHIMP_LIST_ID = None
MAILCHIMP_STACKS_GROUP_NAME = None
class DevelopmentConfig(Config):
DEBUG = True
|
agpl-3.0
|
Python
|
549190dc81bd4e666a8ca52eed8726a5717a8dde
|
fix syntax error
|
BrianHicks/trek
|
migrate/__init__.py
|
migrate/__init__.py
|
# -*- coding: utf-8 -*-
from .migration import Migration
from importlib import import_module
import os
class Migrator(object):
def __init__(self, count, runner_path, migrations_dir, direction, *args, **kwargs):
self.count = count
self.migrations_dir = migrations_dir
self.direction = direction
runner_cls = self.runner(runner_path)
self.runner = runner_cls(*args, **kwargs)
self.current = self.runner.version()
def runner(self, path):
package, name = path.rsplit('.', 1)
return import_module(name, package)
def migrations_to_run(self):
try:
names = names(os.listdir(self.migrations_dir))
except OSError: # explicitly raising this. Deal with it!
raise
if not names:
raise ValueError('No migrations to run in %s' % self.migrations_dir)
if self.direction == 'up':
return [
m for m in migrations
if self.current < m
][:self.count]
elif self.direction == 'down':
return [
m for m in reversed(migrations)
if self.current >= m
][:self.count]
else:
raise ValueError('Unknown migration direction "%s"' % self.direction)
def run(self):
"put all the parts together"
names = self.migrations_to_run()
if not names:
return {'message': 'No migrations necessary!'}
for name in names:
with open(os.path.join(self.migrations_dir, name), 'r'):
migration = Migration(mig.read())
if self.direction == 'up':
self.runner.up(name, migration)
elif self.direction == 'down':
self.runner.down(name, migration)
else:
raise ValueError('Unknown migration direction "%s"' % self.direction)
return {'message': 'Ran %d migrations' % len(names)}
|
# -*- coding: utf-8 -*-
from .migration import Migration
from importlib import import_module
import os
class Migrator(object):
def __init__(self, count, runner_path, migrations_dir, direction, *args, **kwargs):
self.count = count
self.migrations_dir = migrations_dir
self.direction = direction
runner_cls = self.runner(runner_path)
self.runner = runner_cls(*args, **kwargs)
self.current = self.runner.version()
def runner(self, path):
package, name = path.rsplit('.', 1)
return import_module(name, package)
def migrations_to_run(self):
try:
names = names(os.listdir(self.migrations_dir))
except OSError: # explicitly raising this. Deal with it!
raise
if not names:
raise ValueError('No migrations to run in %s' % self.migrations_dir)
if self.direction == 'up':
return [
m for m in migrations
if self.current < m
][:self.count]
elif self.direction == 'down':
return [
m for m in reversed(migrations)
if self.current >= m
][:self.count]
else:
raise ValueError('Unknown migration direction "%s"' % self.direction)
def run(self):
"put all the parts together"
names = self.migrations_to_run()
if not names:
return {'message': 'No migrations necessary!'}
for name in names:
with open(os.path.join(self.migrations_dir, name), 'r'):
migration = Migration(mig.read())
if self.direction == 'up':
self.runner.up(name, migration)
else:
self.runner.down(name, migration)
else:
raise ValueError('Unknown migration direction "%s"' % self.direction)
return {'message': 'Ran %d migrations' % len(names)}
|
mit
|
Python
|
68b6444eed6f3a748aa3a0126c20a52a5d56e6f9
|
Comment added for missing code.
|
alphagov/notifications-api,alphagov/notifications-api
|
app/notifications/rest.py
|
app/notifications/rest.py
|
import uuid
from flask import (
Blueprint,
jsonify,
request
)
from app import (notify_alpha_client, api_user)
from app.aws_sqs import add_notification_to_queue
from app.dao import (templates_dao)
from app.schemas import (
email_notification_schema, sms_template_notification_schema)
notifications = Blueprint('notifications', __name__)
@notifications.route('/<notification_id>', methods=['GET'])
def get_notifications(notification_id):
# TODO return notification id details
return jsonify({'id': notification_id}), 200
@notifications.route('/sms', methods=['POST'])
def create_sms_notification():
resp_json = request.get_json()
notification, errors = sms_template_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
add_notification_to_queue(api_user['client'], notification['template'], 'sms', notification)
# TODO data to be returned
return jsonify({}), 204
@notifications.route('/email', methods=['POST'])
def create_email_notification():
resp_json = request.get_json()
notification, errors = email_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
add_notification_to_queue(api_user['client'], "admin", 'email', notification)
# TODO data to be returned
return jsonify({}), 204
@notifications.route('/sms/service/<service_id>', methods=['POST'])
def create_sms_for_service(service_id):
resp_json = request.get_json()
notification, errors = sms_template_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
template_id = notification['template']
job_id = notification['job']
# TODO: job/job_id is in notification and can used to update job status
# TODO: remove once beta is reading notifications from the queue
template = templates_dao.get_model_templates(template_id)
if template.service.id != uuid.UUID(service_id):
message = "Invalid template: id {} for service id: {}".format(template.id, service_id)
return jsonify(result="error", message=message), 400
add_notification_to_queue(service_id, template_id, 'sms', notification)
# TODO data to be returned
return jsonify({}), 204
|
import uuid
from flask import (
Blueprint,
jsonify,
request
)
from app import (notify_alpha_client, api_user)
from app.aws_sqs import add_notification_to_queue
from app.dao import (templates_dao)
from app.schemas import (
email_notification_schema, sms_template_notification_schema)
notifications = Blueprint('notifications', __name__)
@notifications.route('/<notification_id>', methods=['GET'])
def get_notifications(notification_id):
# TODO return notification id details
return jsonify({'id': notification_id}), 200
@notifications.route('/sms', methods=['POST'])
def create_sms_notification():
resp_json = request.get_json()
notification, errors = sms_template_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
add_notification_to_queue(api_user['client'], notification['template'], 'sms', notification)
# TODO data to be returned
return jsonify({}), 200
@notifications.route('/email', methods=['POST'])
def create_email_notification():
resp_json = request.get_json()
notification, errors = email_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
add_notification_to_queue(api_user['client'], "admin", 'email', notification)
# TODO data to be returned
return jsonify({}), 200
@notifications.route('/sms/service/<service_id>', methods=['POST'])
def create_sms_for_service(service_id):
resp_json = request.get_json()
notification, errors = sms_template_notification_schema.load(resp_json)
if errors:
return jsonify(result="error", message=errors), 400
template_id = notification['template']
job_id = notification['job']
# TODO: job/job_id is in notification and can used to update job status
# TODO: remove once beta is reading notifications from the queue
template = templates_dao.get_model_templates(template_id)
if template.service.id != uuid.UUID(service_id):
message = "Invalid template: id {} for service id: {}".format(template.id, service_id)
return jsonify(result="error", message=message), 400
add_notification_to_queue(service_id, template_id, 'sms', notification)
# TODO data to be returned
return jsonify({}), 200
|
mit
|
Python
|
b480426bdc7f5a59523cc9e6250efedd311d0854
|
fix other minor remark
|
rjeschmi/vsc-base,rjeschmi/vsc-base
|
lib/vsc/utils/testing.py
|
lib/vsc/utils/testing.py
|
#!/usr/bin/env python
##
#
# Copyright 2014-2014 Ghent University
#
# This file is part of vsc-base,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/vsc-base
#
# vsc-base is free software: you can redistribute it and/or modify
# it under the terms of the GNU Library General Public License as
# published by the Free Software Foundation, either version 2 of
# the License, or (at your option) any later version.
#
# vsc-base is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with vsc-base. If not, see <http://www.gnu.org/licenses/>.
##
"""
Test utilities.
@author: Kenneth Hoste (Ghent University)
"""
import re
import sys
from unittest import TestCase
class EnhancedTestCase(TestCase):
"""Enhanced test case, provides extra functionality (e.g. an assertErrorRegex method)."""
def convert_exception_to_str(self, err):
"""Convert an Exception instance to a string."""
msg = err
if hasattr(err, 'msg'):
msg = err.msg
try:
res = str(msg)
except UnicodeEncodeError:
res = msg.encode('utf8', 'replace')
return res
def assertErrorRegex(self, error, regex, call, *args, **kwargs):
"""
Convenience method to match regex with the expected error message.
Example: self.assertErrorRegex(OSError, "No such file or directory", os.remove, '/no/such/file')
"""
try:
call(*args, **kwargs)
str_kwargs = ['='.join([k, str(v)]) for (k, v) in kwargs.items()]
str_args = ', '.join(map(str, args) + str_kwargs)
self.assertTrue(False, "Expected errors with %s(%s) call should occur" % (call.__name__, str_args))
except error, err:
msg = self.convert_exception_to_str(err)
regex = re.compile(regex)
self.assertTrue(regex.search(msg), "Pattern '%s' is found in '%s'" % (regex.pattern, msg))
|
#!/usr/bin/env python
##
#
# Copyright 2014-2014 Ghent University
#
# This file is part of vsc-base,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/vsc-base
#
# vsc-base is free software: you can redistribute it and/or modify
# it under the terms of the GNU Library General Public License as
# published by the Free Software Foundation, either version 2 of
# the License, or (at your option) any later version.
#
# vsc-base is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with vsc-base. If not, see <http://www.gnu.org/licenses/>.
##
"""
Test utilities.
@author: Kenneth Hoste (Ghent University)
"""
import re
import sys
from unittest import TestCase
class EnhancedTestCase(TestCase):
"""Enhanced test case, provides extra functionality (e.g. an assertErrorRegex method)."""
def convert_exception_to_str(self, err):
"""Convert an Exception instance to a string."""
msg = err
if hasattr(err, 'msg'):
msg = err.msg
try:
res = str(msg)
except UnicodeEncodeError:
res = msg.encode('utf8', 'replace')
return res
def assertErrorRegex(self, error, regex, call, *args, **kwargs):
"""
Convenience method to match regex with the expected error message.
Example: self.assertErrorRegex(OSError, "No such file or directory", os.remove, '/no/such/file')
"""
try:
call(*args, **kwargs)
str_kwargs = ['='.join([k, str(v)]) for (k, v) in kwargs.items()]
str_args = ', '.join(map(str, args) + str_kwargs)
self.assertTrue(False, "Expected errors with %s(%s) call should occur" % (call.__name__, str_args))
except error, err:
msg = self.convert_exception_to_str(err)
self.assertTrue(re.search(regex, msg), "Pattern '%s' is found in '%s'" % (regex, msg))
|
lgpl-2.1
|
Python
|
5c86a27f3ff324a8a7f74e161eeff4bda62fe0ab
|
add asynchronous dump
|
squeaky-pl/japronto,squeaky-pl/japronto,squeaky-pl/japronto,squeaky-pl/japronto,squeaky-pl/japronto
|
integration_tests/dump.py
|
integration_tests/dump.py
|
import os.path
import sys
import base64
import asyncio
from app import Application
def dump(request):
body = request.body
if body is not None:
body = base64.b64encode(body).decode('ascii')
result = {
"method": request.method,
"path": request.path,
"query_string": request.query_string,
"headers": request.headers,
"match_dict": request.match_dict,
"body": body
}
return request.Response(json=result)
async def adump(request):
sleep = int(request.query.get('sleep', 0))
await asyncio.sleep(sleep)
return dump(request)
app = Application()
r = app.get_router()
r.add_route('/dump/{p1}/{p2}', dump)
r.add_route('/adump/{p1}/{p2}', adump)
if __name__ == '__main__':
app.serve()
|
import os.path
import sys
import base64
from app import Application
def dump(request):
body = request.body
if body is not None:
body = base64.b64encode(body).decode('ascii')
result = {
"method": request.method,
"path": request.path,
"query_string": request.query_string,
"headers": request.headers,
"match_dict": request.match_dict,
"body": body
}
return request.Response(json=result)
app = Application()
r = app.get_router()
r.add_route('/dump/{p1}/{p2}', dump)
if __name__ == '__main__':
app.serve()
|
mit
|
Python
|
de0c0edafb3e3a128c8ad54e9841c7a188cba328
|
Format output (#85)
|
ZuZuD/JARVIS-on-Messenger,jaskaransarkaria/JARVIS-on-Messenger,swapagarwal/JARVIS-on-Messenger,edadesd/JARVIS-on-Messenger
|
modules/src/time.py
|
modules/src/time.py
|
import requests
import config
import os
from templates.text import TextTemplate
from datetime import datetime
MAPQUEST_CONSUMER_KEY = os.environ.get('MAPQUEST_CONSUMER_KEY', config.MAPQUEST_CONSUMER_KEY)
TIME_ZONE_DB_API_KEY = os.environ.get('TIME_ZONE_DB_API_KEY', config.TIME_ZONE_DB_API_KEY)
def process(input, entities):
output = {}
try:
r = requests.get('http://open.mapquestapi.com/nominatim/v1/search.php?key=' + MAPQUEST_CONSUMER_KEY + '&format=json&q='+ entities['time_location'][0]['value'] + '&limit=1')
location_data = r.json()
r = requests.get('http://api.timezonedb.com/?lat='+ location_data[0]['lat'] + '&lng='+ location_data[0]['lon'] + '&format=json&key=' + TIME_ZONE_DB_API_KEY)
time_data = r.json()
time = datetime.utcfromtimestamp(time_data['timestamp']).strftime('%a %b %d %Y %H:%M:%S')
output['input'] = input
output['output'] = TextTemplate('Location: ' + location_data[0]['display_name'] + '\nTime: ' + time + ' ' + time_data['abbreviation']).get_message()
output['success'] = True
except:
error_message = 'I couldn\'t get the time at the location you specified.'
error_message += '\nPlease ask me something else, like:'
error_message += '\n - time in new york'
error_message += '\n - india time'
error_message += '\n - time at paris'
output['error_msg'] = TextTemplate(error_message).get_message()
output['success'] = False
return output
|
import requests
import config
import os
from templates.text import TextTemplate
from datetime import datetime
MAPQUEST_CONSUMER_KEY = os.environ.get('MAPQUEST_CONSUMER_KEY', config.MAPQUEST_CONSUMER_KEY)
TIME_ZONE_DB_API_KEY = os.environ.get('TIME_ZONE_DB_API_KEY', config.TIME_ZONE_DB_API_KEY)
def process(input, entities):
output = {}
try:
r = requests.get('http://open.mapquestapi.com/nominatim/v1/search.php?key=' + MAPQUEST_CONSUMER_KEY + '&format=json&q='+ entities['time_location'][0]['value'] + '&limit=1')
location_data = r.json()
r = requests.get('http://api.timezonedb.com/?lat='+ location_data[0]['lat'] + '&lng='+ location_data[0]['lon'] + '&format=json&key=' + TIME_ZONE_DB_API_KEY)
time_data = r.json()
time = datetime.utcfromtimestamp(time_data['timestamp']).strftime('%Y-%m-%d %H:%M:%S')
output['input'] = input
output['output'] = TextTemplate('Location: ' + location_data[0]['display_name'] + '\nTime: ' + time + ' ' + time_data['abbreviation']).get_message()
output['success'] = True
except:
error_message = 'I couldn\'t get the time at the location you specified.'
error_message += '\nPlease ask me something else, like:'
error_message += '\n - time in new york'
error_message += '\n - india time'
error_message += '\n - time at paris'
output['error_msg'] = TextTemplate(error_message).get_message()
output['success'] = False
return output
|
mit
|
Python
|
38ba5ddbadd02df7d3781526350eb5b060f702fb
|
remove one of the two softmask items in the doc
|
carlthome/librosa,carlthome/librosa,carlthome/librosa
|
librosa/util/__init__.py
|
librosa/util/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utilities
=========
Array operations
----------------
.. autosummary::
:toctree: generated/
frame
pad_center
fix_length
fix_frames
index_to_slice
softmask
sync
axis_sort
normalize
roll_sparse
sparsify_rows
buf_to_float
tiny
Matching
--------
.. autosummary::
:toctree: generated/
match_intervals
match_events
Miscellaneous
-------------
.. autosummary::
:toctree: generated/
localmax
peak_pick
Input validation
----------------
.. autosummary::
:toctree: generated/
valid_audio
valid_int
valid_intervals
File operations
---------------
.. autosummary::
:toctree: generated/
example_audio_file
find_files
"""
from .utils import * # pylint: disable=wildcard-import
from .files import * # pylint: disable=wildcard-import
from .matching import * # pylint: disable=wildcard-import
from .deprecation import * # pylint: disable=wildcard-import
from . import decorators
from . import exceptions
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utilities
=========
Array operations
----------------
.. autosummary::
:toctree: generated/
frame
pad_center
fix_length
fix_frames
index_to_slice
softmask
sync
softmask
axis_sort
normalize
roll_sparse
sparsify_rows
buf_to_float
tiny
Matching
--------
.. autosummary::
:toctree: generated/
match_intervals
match_events
Miscellaneous
-------------
.. autosummary::
:toctree: generated/
localmax
peak_pick
Input validation
----------------
.. autosummary::
:toctree: generated/
valid_audio
valid_int
valid_intervals
File operations
---------------
.. autosummary::
:toctree: generated/
example_audio_file
find_files
"""
from .utils import * # pylint: disable=wildcard-import
from .files import * # pylint: disable=wildcard-import
from .matching import * # pylint: disable=wildcard-import
from .deprecation import * # pylint: disable=wildcard-import
from . import decorators
from . import exceptions
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
isc
|
Python
|
0f79b2ab82f3d628983baca02c5bc54a872d5513
|
Cover contact_link_for
|
hartwork/wnpp.debian.net,hartwork/wnpp.debian.net,hartwork/wnpp.debian.net
|
wnpp_debian_net/templatetags/tests/test_contact_links.py
|
wnpp_debian_net/templatetags/tests/test_contact_links.py
|
# Copyright (C) 2021 Sebastian Pipping <[email protected]>
# Licensed under GNU Affero GPL v3 or later
from unittest import TestCase
from parameterized import parameterized
from ...templatetags.contact_links import _parse_contact
from ..contact_links import contact_link_for
class ParseContactTest(TestCase):
@parameterized.expand([
('[email protected]', '[email protected]'),
('"First Middle Last" <[email protected]>', 'First Middle Last'),
('First Middle Last <[email protected]>', 'First Middle Last'),
])
def test_parse_contact(self, contact, expected_display):
actual_mailto, actual_display = _parse_contact(contact)
self.assertEqual(actual_mailto, '[email protected]')
self.assertEqual(actual_display, expected_display)
class ContactLinkForTest(TestCase):
@parameterized.expand([
(None, '<i>nobody</i>'),
('', '<i>nobody</i>'),
('no at sign in sight', '<i>no at sign in sight</i>'),
('Some Thing <[email protected]>',
'<a href="mailto:[email protected]">Some Thing</a>'),
('[email protected]', '<a href="mailto:[email protected]">[email protected]</a>'),
])
def test_contact_link_for(self, contact, expected_html):
actual_html = contact_link_for(contact)
self.assertEqual(actual_html, expected_html)
|
# Copyright (C) 2021 Sebastian Pipping <[email protected]>
# Licensed under GNU Affero GPL v3 or later
from unittest import TestCase
from parameterized import parameterized
from ...templatetags.contact_links import _parse_contact
class ParseContactTest(TestCase):
@parameterized.expand([
('[email protected]', '[email protected]'),
('"First Middle Last" <[email protected]>', 'First Middle Last'),
('First Middle Last <[email protected]>', 'First Middle Last'),
])
def test_parse_contact(self, contact, expected_display):
actual_mailto, actual_display = _parse_contact(contact)
self.assertEqual(actual_mailto, '[email protected]')
self.assertEqual(actual_display, expected_display)
|
agpl-3.0
|
Python
|
3192634b5468c3aff64367366f7419ad0aabf49b
|
Fix Objective-C executor
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
executors/OBJC.py
|
executors/OBJC.py
|
from subprocess import check_output, CalledProcessError
from GCCExecutor import GCCExecutor
from judgeenv import env
class Executor(GCCExecutor):
ext = '.m'
objc_flags = []
objc_ldflags = []
command = env['runtime'].get('gobjc')
name = 'OBJC'
address_grace = 131072
test_program = r'''
#import <Foundation/Foundation.h>
int main (int argc, const char * argv[]) {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
int ch;
while ((ch = getchar()) != EOF)
putchar(ch);
[pool drain];
return 0;
}
'''
def get_flags(self):
return self.objc_flags + super(Executor, self).get_flags()
def get_ldflags(self):
return self.objc_ldflags + super(Executor, self).get_ldflags()
def get_fs(self):
return super(Executor, self).get_fs() + ['/proc/\d+/cmdline', '/usr/lib', '/dev/urandom$']
@classmethod
def initialize(cls, sandbox=True):
if 'gnustep-config' not in env['runtime']:
return False
try:
cls.objc_flags = check_output([env['runtime']['gnustep-config'], '--objc-flags']).split()
cls.objc_ldflags = check_output([env['runtime']['gnustep-config'], '--base-libs']).split()
except CalledProcessError as e:
return False
return super(Executor, cls).initialize(sandbox=sandbox)
initialize = Executor.initialize
|
from subprocess import check_output, CalledProcessError
from GCCExecutor import GCCExecutor
from judgeenv import env
class Executor(GCCExecutor):
ext = '.m'
objc_flags = []
objc_ldflags = []
command = env['runtime'].get('gobjc')
name = 'OBJC'
address_grace = 131072
test_program = r'''
#import <Foundation/Foundation.h>
int main (int argc, const char * argv[]) {
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
int ch;
while ((ch = getchar()) != EOF)
putchar(ch);
[pool drain];
return 0;
}
'''
def get_flags(self):
return self.objc_flags + super(Executor, self).get_flags()
def get_ldflags(self):
return self.objc_ldflags + super(Executor, self).get_ldflags()
def get_fs(self):
return super(Executor, self).get_fs() + ['/proc/\d+/cmdline', '/usr/lib', '/dev/urandom$']
@classmethod
def initialize(cls):
if 'gnustep-config' not in env['runtime']:
return False
try:
cls.objc_flags = check_output([env['runtime']['gnustep-config'], '--objc-flags']).split()
cls.objc_ldflags = check_output([env['runtime']['gnustep-config'], '--base-libs']).split()
except CalledProcessError as e:
return False
return super(Executor, cls).initialize()
initialize = Executor.initialize
|
agpl-3.0
|
Python
|
47a7321682f3f47c638c133ccbcb3f9daea32e77
|
add help text for language
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
molo/core/models.py
|
molo/core/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch import index
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class HomePage(Page):
pass
class Main(Page):
parent_page_types = []
subpage_types = ['core.LanguagePage']
class LanguagePage(Page):
code = models.CharField(
max_length=255,
help_text=_('The language code as specified in iso639-2'))
parent_page_types = ['core.Main']
subpage_types = ['core.HomePage', 'core.SectionPage']
class Meta:
verbose_name = _('Language')
LanguagePage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('code'),
]
class SectionPage(Page):
description = models.TextField(null=True, blank=True)
subpage_types = ['core.ArticlePage']
search_fields = Page.search_fields + (
index.SearchField('description'),
)
class Meta:
verbose_name = _('Section')
SectionPage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('description'),
]
class ArticlePage(Page):
subtitle = models.TextField(null=True, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
subpage_types = []
search_fields = Page.search_fields + (
index.SearchField('subtitle'),
)
class Meta:
verbose_name = _('Article')
ArticlePage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('subtitle'),
ImageChooserPanel('image'),
]
|
from django.db import models
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch import index
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class HomePage(Page):
pass
class Main(Page):
parent_page_types = []
subpage_types = ['core.LanguagePage']
class LanguagePage(Page):
code = models.CharField(max_length=255)
parent_page_types = ['core.Main']
subpage_types = ['core.HomePage', 'core.SectionPage']
class Meta:
verbose_name = 'Language'
LanguagePage.content_panels = [
FieldPanel('title'),
FieldPanel('code'),
]
class SectionPage(Page):
description = models.TextField(null=True, blank=True)
subpage_types = ['core.ArticlePage']
search_fields = Page.search_fields + (
index.SearchField('description'),
)
class Meta:
verbose_name = 'Section'
SectionPage.content_panels = [
FieldPanel('title'),
FieldPanel('description'),
]
class ArticlePage(Page):
subtitle = models.TextField(null=True, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
subpage_types = []
search_fields = Page.search_fields + (
index.SearchField('subtitle'),
)
class Meta:
verbose_name = 'Article'
ArticlePage.content_panels = [
FieldPanel('title'),
FieldPanel('subtitle'),
ImageChooserPanel('image'),
]
|
bsd-2-clause
|
Python
|
9ffcb95f0475c9d3a090b8de62073049f999fcf0
|
update version
|
mongolab/mongoctl
|
mongoctl/version.py
|
mongoctl/version.py
|
__author__ = 'abdul'
MONGOCTL_VERSION = '0.9.1'
|
__author__ = 'abdul'
MONGOCTL_VERSION = '0.9.0'
|
mit
|
Python
|
b6b8ae74eabd69defcbf46a2e0bd46512872bd40
|
Add tools to write bidsignore and dataset_description.json
|
poldracklab/mriqc,oesteban/mriqc,oesteban/mriqc,poldracklab/mriqc,poldracklab/mriqc,poldracklab/mriqc,oesteban/mriqc,oesteban/mriqc
|
mriqc/utils/bids.py
|
mriqc/utils/bids.py
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""PyBIDS tooling"""
import os
import json
from pathlib import Path
from collections import defaultdict
DEFAULT_TYPES = ["bold", "T1w", "T2w"]
def collect_bids_data(
layout, participant_label=None, session=None, run=None, task=None, bids_type=None
):
"""Get files in dataset"""
bids_type = bids_type or DEFAULT_TYPES
if not isinstance(bids_type, (list, tuple)):
bids_type = [bids_type]
basequery = {
"subject": participant_label,
"session": session,
"task": task,
"run": run,
}
# Filter empty lists, strings, zero runs, and Nones
basequery = {k: v for k, v in basequery.items() if v}
# Start querying
imaging_data = defaultdict(list, {})
for btype in bids_type:
imaging_data[btype] = layout.get(
suffix=btype, return_type="file", extension=["nii", "nii.gz"], **basequery
)
return imaging_data
def write_bidsignore(deriv_dir):
bids_ignore = (
"*.html", "logs/", # Reports
"*_T1w.json", "*_T2w.json", "*_bold.json", # Outputs are not yet standardized
)
ignore_file = Path(deriv_dir) / ".bidsignore"
ignore_file.write_text("\n".join(bids_ignore) + "\n")
def write_derivative_description(bids_dir, deriv_dir):
from ..__about__ import __version__, __download__
bids_dir = Path(bids_dir)
deriv_dir = Path(deriv_dir)
desc = {
'Name': 'MRIQC - MRI Quality Control',
'BIDSVersion': '1.4.0',
'DatasetType': 'derivative',
'GeneratedBy': [{
'Name': 'MRIQC',
'Version': __version__,
'CodeURL': __download__,
}],
'HowToAcknowledge':
'Please cite our paper (https://doi.org/10.1371/journal.pone.0184661).',
}
# Keys that can only be set by environment
# XXX: This currently has no effect, but is a stand-in to remind us to figure out
# how to detect the container
if 'MRIQC_DOCKER_TAG' in os.environ:
desc['GeneratedBy'][0]['Container'] = {
"Type": "docker",
"Tag": f"poldracklab/mriqc:{os.environ['MRIQC_DOCKER_TAG']}"
}
if 'MRIQC_SINGULARITY_URL' in os.environ:
desc['GeneratedBy'][0]['Container'] = {
"Type": "singularity",
"URI": os.getenv('MRIQC_SINGULARITY_URL')
}
# Keys deriving from source dataset
orig_desc = {}
fname = bids_dir / 'dataset_description.json'
if fname.exists():
orig_desc = json.loads(fname.read_text())
if 'DatasetDOI' in orig_desc:
desc['SourceDatasets'] = [{
'URL': f'https://doi.org/{orig_desc["DatasetDOI"]}',
'DOI': orig_desc['DatasetDOI']
}]
if 'License' in orig_desc:
desc['License'] = orig_desc['License']
Path.write_text(deriv_dir / 'dataset_description.json', json.dumps(desc, indent=4))
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""PyBIDS tooling"""
from collections import defaultdict
DEFAULT_TYPES = ["bold", "T1w", "T2w"]
def collect_bids_data(
layout, participant_label=None, session=None, run=None, task=None, bids_type=None
):
"""Get files in dataset"""
bids_type = bids_type or DEFAULT_TYPES
if not isinstance(bids_type, (list, tuple)):
bids_type = [bids_type]
basequery = {
"subject": participant_label,
"session": session,
"task": task,
"run": run,
}
# Filter empty lists, strings, zero runs, and Nones
basequery = {k: v for k, v in basequery.items() if v}
# Start querying
imaging_data = defaultdict(list, {})
for btype in bids_type:
imaging_data[btype] = layout.get(
suffix=btype, return_type="file", extension=["nii", "nii.gz"], **basequery
)
return imaging_data
|
apache-2.0
|
Python
|
363bb6f409b4c3184a5b9e72c095b6aca207a11f
|
Cut 0.9.1 final
|
bspink/fabric,qinrong/fabric,ericholscher/fabric,getsentry/fabric,cgvarela/fabric,elijah513/fabric,MjAbuz/fabric,ploxiln/fabric,SamuelMarks/fabric,mathiasertl/fabric,haridsv/fabric,StackStorm/fabric,akaariai/fabric,rbramwell/fabric,jaraco/fabric,hrubi/fabric,TarasRudnyk/fabric,itoed/fabric,raimon49/fabric,tolbkni/fabric,felix-d/fabric,cmattoon/fabric,rane-hs/fabric-py3,askulkarni2/fabric,opavader/fabric,bitprophet/fabric,pgroudas/fabric,rodrigc/fabric,amaniak/fabric,fernandezcuesta/fabric,bitmonk/fabric,likesxuqiang/fabric,kxxoling/fabric,sdelements/fabric,pashinin/fabric,xLegoz/fabric,tekapo/fabric,kmonsoor/fabric
|
fabric/version.py
|
fabric/version.py
|
"""
Current Fabric version constant plus version pretty-print method.
This functionality is contained in its own module to prevent circular import
problems with ``__init__.py`` (which is loaded by setup.py during installation,
which in turn needs access to this version information.)
"""
VERSION = (0, 9, 1, "final", 0)
def get_version(form='short'):
"""
Return a version string for this package, based on `VERSION`.
Takes a single argument, ``form``, which should be one of the following
strings:
* ``branch``: just the major + minor, e.g. "0.9", "1.0".
* ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
filenames or SCM tag identifiers.
* ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
documentation site headers.
* ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
tag commit messages, or anywhere that it's important to remove ambiguity
between a branch and the first final release within that branch.
"""
# Setup
versions = {}
branch = "%s.%s" % (VERSION[0], VERSION[1])
tertiary = VERSION[2]
type_ = VERSION[3]
final = (type_ == "final")
type_num = VERSION[4]
firsts = "".join([x[0] for x in type_.split()])
# Branch
versions['branch'] = branch
# Short
v = branch
if (tertiary or final):
v += "." + str(tertiary)
if not final:
v += firsts
if type_num:
v += str(type_num)
versions['short'] = v
# Normal
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
versions['normal'] = v
# Verbose
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
else:
v += " final"
versions['verbose'] = v
try:
return versions[form]
except KeyError:
raise TypeError, '"%s" is not a valid form specifier.' % form
__version__ = get_version('short')
|
"""
Current Fabric version constant plus version pretty-print method.
This functionality is contained in its own module to prevent circular import
problems with ``__init__.py`` (which is loaded by setup.py during installation,
which in turn needs access to this version information.)
"""
VERSION = (0, 9, 0, "final", 0)
def get_version(form='short'):
"""
Return a version string for this package, based on `VERSION`.
Takes a single argument, ``form``, which should be one of the following
strings:
* ``branch``: just the major + minor, e.g. "0.9", "1.0".
* ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
filenames or SCM tag identifiers.
* ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
documentation site headers.
* ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
tag commit messages, or anywhere that it's important to remove ambiguity
between a branch and the first final release within that branch.
"""
# Setup
versions = {}
branch = "%s.%s" % (VERSION[0], VERSION[1])
tertiary = VERSION[2]
type_ = VERSION[3]
final = (type_ == "final")
type_num = VERSION[4]
firsts = "".join([x[0] for x in type_.split()])
# Branch
versions['branch'] = branch
# Short
v = branch
if (tertiary or final):
v += "." + str(tertiary)
if not final:
v += firsts
if type_num:
v += str(type_num)
versions['short'] = v
# Normal
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
versions['normal'] = v
# Verbose
v = branch
if tertiary:
v += "." + str(tertiary)
if not final:
v += " " + type_
if type_num:
v += " " + str(type_num)
else:
v += " final"
versions['verbose'] = v
try:
return versions[form]
except KeyError:
raise TypeError, '"%s" is not a valid form specifier.' % form
__version__ = get_version('short')
|
bsd-2-clause
|
Python
|
cbf5f26d4a1860082d9e29524146298ad0b4e0db
|
Support for pkgdb teams.
|
fedora-infra/fedmsg-genacls
|
fedmsg_genacls.py
|
fedmsg_genacls.py
|
# -*- coding: utf-8 -*-
""" A fedmsg consumer that listens to pkgdb messages to update gitosis acls
Authors: Janez Nemanič <[email protected]>
Ralph Bean <[email protected]>
"""
import pprint
import subprocess
import os
import fedmsg.consumers
import moksha.hub.reactor
class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer):
# Because we are interested in a variety of topics, we tell moksha that
# we're interested in all of them (it doesn't know how to do complicated
# distinctions). But then we'll filter later in our consume() method.
topic = '*'
interesting_topics = [
'org.fedoraproject.prod.pkgdb.acl.update',
'org.fedoraproject.prod.fas.group.member.sponsor',
'org.fedoraproject.prod.fas.group.member.remove',
]
config_key = 'genacls.consumer.enabled'
def __init__(self, hub):
super(GenACLsConsumer, self).__init__(hub)
# This is required. It is the number of seconds that we should wait
# until we ultimately act on a pkgdb message.
self.delay = self.hub.config['genacls.consumer.delay']
# We use this to manage our state
self.queued_messages = []
def consume(self, msg):
if msg['topic'] not in self.interesting_topics:
return
msg = msg['body']
self.log.info("Got a message %r" % msg['topic'])
def delayed_consume():
if self.queued_messages:
try:
self.action(self.queued_messages)
finally:
# Empty our list at the end of the day.
self.queued_messages = []
else:
self.log.debug("Woke up, but there were no messages.")
self.queued_messages.append(msg)
moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume)
def action(self, messages):
self.log.debug("Acting on %s" % pprint.pformat(messages))
command = '/usr/bin/sudo -u gen-acls /usr/local/bin/genacls.sh'.split()
self.log.info("Running %r" % command)
process = subprocess.Popen(args=command)
stdout, stderr = process.communicate()
if process.returncode == 0:
self.log.info("%r was successful" % command)
else:
self.log.error("%r exited with %r, stdout: %s, stderr: %s" % (
command, process.returncode, stdout, stderr))
|
# -*- coding: utf-8 -*-
""" A fedmsg consumer that listens to pkgdb messages to update gitosis acls
Authors: Janez Nemanič <[email protected]>
Ralph Bean <[email protected]>
"""
import pprint
import subprocess
import os
import fedmsg.consumers
import moksha.hub.reactor
class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer):
# Really, we want to use this specific topic to listen to.
topic = 'org.fedoraproject.prod.pkgdb.acl.update'
# But for testing, we'll just listen to all topics with this:
#topic = '*'
config_key = 'genacls.consumer.enabled'
def __init__(self, hub):
super(GenACLsConsumer, self).__init__(hub)
# This is required. It is the number of seconds that we should wait
# until we ultimately act on a pkgdb message.
self.delay = self.hub.config['genacls.consumer.delay']
# We use this to manage our state
self.queued_messages = []
def consume(self, msg):
msg = msg['body']
self.log.info("Got a message %r" % msg['topic'])
def delayed_consume():
if self.queued_messages:
try:
self.action(self.queued_messages)
finally:
# Empty our list at the end of the day.
self.queued_messages = []
else:
self.log.debug("Woke up, but there were no messages.")
self.queued_messages.append(msg)
moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume)
def action(self, messages):
self.log.debug("Acting on %s" % pprint.pformat(messages))
command = '/usr/bin/sudo -u gen-acls /usr/local/bin/genacls.sh'.split()
self.log.info("Running %r" % command)
process = subprocess.Popen(args=command)
stdout, stderr = process.communicate()
if process.returncode == 0:
self.log.info("%r was successful" % command)
else:
self.log.error("%r exited with %r, stdout: %s, stderr: %s" % (
command, process.returncode, stdout, stderr))
|
lgpl-2.1
|
Python
|
5022dada0153bca2c99f2faf302827307078e91a
|
Remove timestamp from forms
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
fellowms/forms.py
|
fellowms/forms.py
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"user",
"home_lon",
"home_lat",
"funding_notes",
"inauguration_year",
"fellowship_grant",
"mentor",
"added",
"updated",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"ad_status",
"budget_approve",
"report_url",
"added",
"updated",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
"added",
"updated",
]
class BlogForm(ModelForm):
class Meta:
model = Blog
exclude = [
'status',
"added",
"updated",
]
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"user",
"home_lon",
"home_lat",
"funding_notes",
"inauguration_year",
"fellowship_grant",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"ad_status",
"budget_approve",
"report_url",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
exclude = [
'status',
]
|
bsd-3-clause
|
Python
|
e97a43f4558b19311eea9a5ef508502c7151256f
|
fix periodic task schedule_updates()
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
mygpo/data/tasks.py
|
mygpo/data/tasks.py
|
from operator import itemgetter
from datetime import datetime, timedelta
from celery.decorators import periodic_task
from mygpo.data.podcast import calc_similar_podcasts
from mygpo.celery import celery
from mygpo.podcasts.models import Podcast
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@celery.task
def update_podcasts(podcast_urls):
""" Task to update a podcast """
from mygpo.data.feeddownloader import PodcastUpdater
updater = PodcastUpdater()
podcasts = updater.update_queue(podcast_urls)
return list(podcasts)
@celery.task
def update_related_podcasts(podcast, max_related=20):
get_podcast = itemgetter(0)
related = calc_similar_podcasts(podcast)[:max_related]
related = map(get_podcast, related)
for p in related:
podcast.related_podcasts.add(p)
# interval in which podcast updates are scheduled
UPDATE_INTERVAL = timedelta(hours=1)
@periodic_task(run_every=UPDATE_INTERVAL)
def schedule_updates(interval=UPDATE_INTERVAL):
""" Schedules podcast updates that are due within ``interval`` """
now = datetime.utcnow()
# fetch podcasts for which an update is due within the next hour
podcasts = Podcast.objects.all()\
.next_update_between(now, now+interval)\
.prefetch_related('urls')\
.only('pk')
logger.error('Scheduling %d podcasts for update', podcasts.count())
# queue all those podcast updates
for podcast in podcasts:
update_podcasts.delay([podcast.url])
|
from operator import itemgetter
from datetime import datetime, timedelta
from celery.decorators import periodic_task
from mygpo.data.podcast import calc_similar_podcasts
from mygpo.celery import celery
from mygpo.podcasts.models import Podcast
@celery.task
def update_podcasts(podcast_urls):
""" Task to update a podcast """
from mygpo.data.feeddownloader import PodcastUpdater
updater = PodcastUpdater()
podcasts = updater.update_queue(podcast_urls)
return list(podcasts)
@celery.task
def update_related_podcasts(podcast, max_related=20):
get_podcast = itemgetter(0)
related = calc_similar_podcasts(podcast)[:max_related]
related = map(get_podcast, related)
for p in related:
podcast.related_podcasts.add(p)
# interval in which podcast updates are scheduled
UPDATE_INTERVAL = timedelta(hours=1)
@periodic_task(run_every=UPDATE_INTERVAL)
def schedule_updates(self, interval=UPDATE_INTERVAL):
""" Schedules podcast updates that are due within ``interval`` """
now = datetime.utcnow()
# fetch podcasts for which an update is due within the next hour
podcasts = Podcast.objects.next_update_between(now, now+interval)\
.prefetch_related('urls')\
.only('pk')
# queue all those podcast updates
for podcast in podcasts:
update_podcasts.delay([podcast.url])
|
agpl-3.0
|
Python
|
140b8b829f566a75a09a1ff7bc2d7d4ba4ea5272
|
fix version
|
Star2Billing/newfies-dialer,berinhard/newfies-dialer,laprice/newfies-dialer,newfies-dialer/newfies-dialer,romonzaman/newfies-dialer,saydulk/newfies-dialer,emartonline/newfies-dialer,romonzaman/newfies-dialer,berinhard/newfies-dialer,berinhard/newfies-dialer,romonzaman/newfies-dialer,newfies-dialer/newfies-dialer,newfies-dialer/newfies-dialer,Star2Billing/newfies-dialer,laprice/newfies-dialer,Star2Billing/newfies-dialer,saydulk/newfies-dialer,Star2Billing/newfies-dialer,newfies-dialer/newfies-dialer,emartonline/newfies-dialer,romonzaman/newfies-dialer,laprice/newfies-dialer,saydulk/newfies-dialer,saydulk/newfies-dialer,emartonline/newfies-dialer
|
newfies/__init__.py
|
newfies/__init__.py
|
# -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2012 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
# :copyright: (c) 2011 - 2012 by Arezqui Belaid.
# :license: MPL 2.0, see COPYING for more details.
VERSION = (1, 2, 4, "")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Arezqui Belaid"
__contact__ = "[email protected]"
__homepage__ = "http://www.newfies-dialer.org"
__docformat__ = "restructuredtext"
|
# -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2012 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
# :copyright: (c) 2011 - 2012 by Arezqui Belaid.
# :license: MPL 2.0, see COPYING for more details.
VERSION = (1, 2, 2, "")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Arezqui Belaid"
__contact__ = "[email protected]"
__homepage__ = "http://www.newfies-dialer.org"
__docformat__ = "restructuredtext"
|
mpl-2.0
|
Python
|
df5bcec8f6d05a27ba6be3ea0af401ab6045d636
|
Bump version to 0.5.2
|
neutralio/nio-cli,nioinnovation/nio-cli
|
nio_cli/__init__.py
|
nio_cli/__init__.py
|
__version__ = '0.5.2'
|
__version__ = '0.5.1'
|
apache-2.0
|
Python
|
8c5de29849ca061ed528c93c36fb3479aef34e41
|
Update ipc_lista1.15.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.15.py
|
lista1/ipc_lista1.15.py
|
#ipc_lista1.15
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#
qHora = input("Quanto você ganha por hora: ")
hT = input("Quantas horas você trabalhou: ")
SalBruto = qHora
ir = (11/100.0 * salBruto)
inss = (8/100.0m* SalBruto)
sindicato = (5/100.0 * SalBruto)
vT = ir + sindicato
SalLiq = SalBruto - vT
print "Seu salário bruto e: ",SalBruto
|
#ipc_lista1.15
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#
qHora = input("Quanto você ganha por hora: ")
hT = input("Quantas horas você trabalhou: ")
SalBruto = qHora
ir = (11/100.0 * salBruto)
inss = (8/100.0m* SalBruto)
sindicato = (5/100.0 * SalBruto)
vT = ir + sindicato
SalLiq = SalBruto - vT
print "Seu salário bruto e: ", SalBruto
|
apache-2.0
|
Python
|
6653e7d9289290b97f06e2b46314a12e194bfc6a
|
Update ipc_lista1.18.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.18.py
|
lista1/ipc_lista1.18.py
|
#ipc_lista1.18
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça o tamanho de um arquivo para download(em MB) e a velocidade de um link de internet (em Mbps), calcule e informe o tempo aproximado de downloado do arquivo usando este link (em minutos).
arquivo = input("Informe o tamanho do arquivo para download (em MB): ")
|
#ipc_lista1.18
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça o tamanho de um arquivo para download(em MB) e a velocidade de um link de internet (em Mbps), calcule e informe o tempo aproximado de downloado do arquivo usando este link (em minutos).
#
|
apache-2.0
|
Python
|
6c4d6587939b6e186fdc7c76448803559a00d5ea
|
Update ipc_lista2.02.py
|
any1m1c/ipc20161
|
lista2/ipc_lista2.02.py
|
lista2/ipc_lista2.02.py
|
#ipc_lista2.02
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça um valor e mostre na tela
|
#ipc_lista2.02
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça um valor e mostre
|
apache-2.0
|
Python
|
b029ecf823cb97c1d70a7599787bd798b9a95ac6
|
Update ipc_lista2.06.py
|
any1m1c/ipc20161
|
lista2/ipc_lista2.06.py
|
lista2/ipc_lista2.06.py
|
#ipc_lista2.06
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que leia três números e mostre o maior deles.
number1 =
number2
|
#ipc_lista2.06
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que leia três números e mostre o maior deles.
|
apache-2.0
|
Python
|
37a55d4d0d4dd5827f1cd3e8cc62f3ac59f645fb
|
Remove old 'MIDDLEWARE_CLASSES'.
|
alexsilva/django-xadmin,alexsilva/django-xadmin,alexsilva/django-xadmin,alexsilva/django-xadmin
|
xadmin/plugins/language.py
|
xadmin/plugins/language.py
|
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
def get_media(self, media):
media = media + self.vendor('xadmin.plugin.language.css')
return media
class SetLangView(BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')
|
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
def get_media(self, media):
media = media + self.vendor('xadmin.plugin.language.css')
return media
class SetLangView(BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
middleware = getattr(settings, "MIDDLEWARE", None) or \
settings.MIDDLEWARE_CLASSES
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in middleware:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')
|
bsd-3-clause
|
Python
|
5b0386d0872d4106902655ada78389503c62a95a
|
Add some default feedback types for item requests
|
yunity/yunity-core,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend
|
yunity/models/relations.py
|
yunity/models/relations.py
|
from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User, null=True)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
@classproperty
def FEEDBACK(cls):
return cls.create_constants('feedback', 'OK', 'NO_SHOW', 'NOT_GRANTED')
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
|
from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User, null=True)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
|
agpl-3.0
|
Python
|
38b93a1c71aae112ec5a18234b1814b49d41066c
|
Add a test for operator / with an integer.
|
ymoch/pyjpmesh
|
jpmesh/test/test_angle.py
|
jpmesh/test/test_angle.py
|
"""
Tests for jpmesh.angle.
"""
import unittest
from jpmesh.angle import Angle
class TestAngle(unittest.TestCase):
"""
Tests for jpmesh.angle.Angle.
"""
def test_properties(self):
"""
Test for properties.
"""
millisecond = 3600000
angle = Angle.from_millisecond(millisecond)
self.assertEqual(angle.degree, float(millisecond) / 60 / 60 / 1000)
self.assertEqual(angle.minute, float(millisecond) / 60 / 1000)
self.assertEqual(angle.second, float(millisecond) / 1000)
self.assertEqual(angle.millisecond, float(millisecond))
def test_operators(self):
"""
Test for operators.
"""
angle1 = Angle.from_degree(1.0)
angle2 = Angle.from_degree(2.0)
self.assertEqual(
(angle1 + angle2).degree, angle1.degree + angle2.degree)
self.assertEqual(
(angle1 - angle2).degree, angle1.degree - angle2.degree)
self.assertEqual((angle1 * 2).degree, angle1.degree * 2)
self.assertEqual((angle1 / 2).degree, angle1.degree / 2)
self.assertEqual((angle1 / 2.0).degree, angle1.degree / 2.0)
self.assertEqual((angle2 // 2).degree, angle2.degree // 2)
self.assertEqual((+angle1).degree, +angle1.degree)
self.assertEqual((-angle1).degree, -angle1.degree)
self.assertEqual(abs(angle1).degree, abs(angle1.degree))
|
"""
Tests for jpmesh.angle.
"""
import unittest
from jpmesh.angle import Angle
class TestAngle(unittest.TestCase):
"""
Tests for jpmesh.angle.Angle.
"""
def test_properties(self):
"""
Test for properties.
"""
millisecond = 3600000
angle = Angle.from_millisecond(millisecond)
self.assertEqual(angle.degree, float(millisecond) / 60 / 60 / 1000)
self.assertEqual(angle.minute, float(millisecond) / 60 / 1000)
self.assertEqual(angle.second, float(millisecond) / 1000)
self.assertEqual(angle.millisecond, float(millisecond))
def test_operators(self):
"""
Test for operators.
"""
angle1 = Angle.from_degree(1.0)
angle2 = Angle.from_degree(2.0)
self.assertEqual(
(angle1 + angle2).degree, angle1.degree + angle2.degree)
self.assertEqual(
(angle1 - angle2).degree, angle1.degree - angle2.degree)
self.assertEqual((angle1 * 2).degree, angle1.degree * 2)
self.assertEqual((angle1 / 2.0).degree, angle1.degree / 2.0)
self.assertEqual((angle2 // 2).degree, angle2.degree // 2)
self.assertEqual((+angle1).degree, +angle1.degree)
self.assertEqual((-angle1).degree, -angle1.degree)
self.assertEqual(abs(angle1).degree, abs(angle1.degree))
|
mit
|
Python
|
af4c5a72afb80ff59662cc6992ce3084fed75dfe
|
Fix dedupe not preserving order
|
muddyfish/PYKE,muddyfish/PYKE
|
node/deduplicate.py
|
node/deduplicate.py
|
#!/usr/bin/env python
from nodes import Node
class Deduplicate(Node):
char = "}"
args = 1
results = 1
@Node.test_func([2], [4])
@Node.test_func([1.5], [3])
def double(self, inp: Node.number):
"""inp*2"""
return inp*2
@Node.test_func([[1,2,3,1,1]], [[1,2,3]])
@Node.test_func(["hi!!!"], ["hi!"])
def func(self, seq:Node.indexable):
"""remove duplicates from seq"""
seen = set()
seen_add = seen.add
if isinstance(seq, str):
return "".join(x for x in seq if not (x in seen or seen_add(x)))
return[type(seq)([x for x in seq if not (x in seen or seen_add(x))])]
|
#!/usr/bin/env python
from nodes import Node
class Deduplicate(Node):
char = "}"
args = 1
results = 2
@Node.test_func([2], [4])
@Node.test_func([1.5], [3])
def double(self, inp: Node.number):
"""inp*2"""
self.results = 1
return inp*2
def func(self, seq:Node.indexable):
"""remove duplicates from seq"""
if isinstance(seq, str):
return "".join(set(seq))
return [type(seq)(set(seq))]
|
mit
|
Python
|
2c41bcc21f01be159be384fe5fa30c824dfb345c
|
Bump version to 14.0.0a7
|
genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio
|
resolwe_bio/__about__.py
|
resolwe_bio/__about__.py
|
"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '14.0.0a7'
__author__ = 'Genialis, Inc.'
__email__ = '[email protected]'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
|
"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '14.0.0a6'
__author__ = 'Genialis, Inc.'
__email__ = '[email protected]'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
|
apache-2.0
|
Python
|
05c319f868215f832e97577f5e158edf82fab074
|
Change version for next release
|
zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown,zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown,zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown
|
markdown/__version__.py
|
markdown/__version__.py
|
#
# markdown/__version__.py
#
# version_info should conform to PEP 386
# (major, minor, micro, alpha/beta/rc/final, #)
# (1, 1, 2, 'alpha', 0) => "1.1.2.dev"
# (1, 2, 0, 'beta', 2) => "1.2b2"
version_info = (2, 6, 0, 'zds', 8)
def _get_version():
" Returns a PEP 386-compliant version number from version_info. "
assert len(version_info) == 5
assert version_info[3] in ('alpha', 'beta', 'rc', 'final', 'zds')
parts = 2 if version_info[2] == 0 else 3
main = '.'.join(map(str, version_info[:parts]))
sub = ''
if version_info[3] == 'alpha' and version_info[4] == 0:
# TODO: maybe append some sort of git info here??
sub = '.dev'
elif version_info[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c', 'zds': 'post'}
sub = mapping[version_info[3]] + str(version_info[4])
return str(main + sub)
version = _get_version()
|
#
# markdown/__version__.py
#
# version_info should conform to PEP 386
# (major, minor, micro, alpha/beta/rc/final, #)
# (1, 1, 2, 'alpha', 0) => "1.1.2.dev"
# (1, 2, 0, 'beta', 2) => "1.2b2"
version_info = (2, 6, 0, 'zds', 7)
def _get_version():
" Returns a PEP 386-compliant version number from version_info. "
assert len(version_info) == 5
assert version_info[3] in ('alpha', 'beta', 'rc', 'final', 'zds')
parts = 2 if version_info[2] == 0 else 3
main = '.'.join(map(str, version_info[:parts]))
sub = ''
if version_info[3] == 'alpha' and version_info[4] == 0:
# TODO: maybe append some sort of git info here??
sub = '.dev'
elif version_info[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c', 'zds': 'post'}
sub = mapping[version_info[3]] + str(version_info[4])
return str(main + sub)
version = _get_version()
|
bsd-3-clause
|
Python
|
ab10f3d134065047a7260662d3c39295904795b8
|
Add fkey constraints at the same time
|
openannotation/annotateit,openannotation/annotateit
|
migration/versions/001_initial_migration.py
|
migration/versions/001_initial_migration.py
|
from sqlalchemy import *
from migrate import *
import annotateit
from annotateit import db
from annotateit.model import Consumer, User
meta = MetaData()
consumer = Table('consumer', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String),
Column('secret', String),
Column('ttl', Integer),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('user_id', Integer, ForeignKey('user.id')),
)
user = Table('user', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('username', String),
Column('email', String),
Column('password_hash', String),
Column('created_at', DateTime),
Column('updated_at', DateTime),
)
def upgrade(migrate_engine):
meta.bind = migrate_engine
user.create()
consumer.create()
def downgrade(migrate_engine):
meta.bind = migrate_engine
consumer.drop()
user.drop()
|
from __future__ import print_function
from getpass import getpass
import readline
import sys
from sqlalchemy import *
from migrate import *
from migrate.changeset.constraint import ForeignKeyConstraint
import annotateit
from annotateit import db
from annotateit.model import Consumer, User
meta = MetaData()
consumer = Table('consumer', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String),
Column('secret', String),
Column('ttl', Integer),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('user_id', Integer),
)
user = Table('user', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('username', String),
Column('email', String),
Column('password_hash', String),
Column('created_at', DateTime),
Column('updated_at', DateTime),
)
consumer_user_id_fkey = ForeignKeyConstraint([consumer.c.user_id], [user.c.id])
def upgrade(migrate_engine):
meta.bind = migrate_engine
consumer.create()
user.create()
consumer_user_id_fkey.create()
def downgrade(migrate_engine):
meta.bind = migrate_engine
consumer_user_id_fkey.create()
user.drop()
consumer.drop()
|
agpl-3.0
|
Python
|
1421866ac3c4e4f1f09d17019d058aa903597df5
|
Add new feature: find out is current week menu created already
|
Jntz/RuokalistaCommandLine
|
modules/menus_reader.py
|
modules/menus_reader.py
|
# -*- coding: utf-8 -*-
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
return get_menus()[index]
def is_week_menu_created(week):
return week in get_menus() # True/False
|
# -*- coding: utf-8 -*-
from json_reader import *
from config import *
def get_menus_data():
old_data = read_json_from_file(filenames["menus"])
if old_data == None or type(old_data) is not dict: # rewrite old_data and create new recipe dictionary
# initialize new dict
old_data = {}
old_data["menus"] = {}
elif "menus" not in old_data and type(old_data) is dict: # save other data (maybe worthless)
# add new row: recipes
old_data["menus"] = {}
return old_data
def get_menus():
data = get_menus_data()
return data["menus"]
def get_menu(index): #get recipe with spesific index
pass
#return get_menus()[index]
|
mit
|
Python
|
8fe2a8735b11a834895d8a71fffa11349f890847
|
Fix broken test.
|
pombreda/py2neo,fpieper/py2neo,nicolewhite/py2neo,fpieper/py2neo,nicolewhite/py2neo,technige/py2neo,technige/py2neo,fpieper/py2neo,nigelsmall/py2neo,pombreda/py2neo,nicolewhite/py2neo,nigelsmall/py2neo,technige/py2neo,pombreda/py2neo
|
test/cypher/cypher_record_test.py
|
test/cypher/cypher_record_test.py
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from py2neo.cypher.core import RecordProducer
def test_record_field_access(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name as name,a.age as age"
for record in graph.cypher.stream(statement):
alice = record[0]
assert record[1] == alice.properties["name"]
assert record[2] == alice.properties["age"]
assert record["name"] == alice.properties["name"]
assert record["age"] == alice.properties["age"]
assert record.name == alice.properties["name"]
assert record.age == alice.properties["age"]
def test_record_representation(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name,a.age"
for record in graph.cypher.stream(statement):
alice_id = record[0]._id
if sys.version_info >= (3,):
assert repr(record) == ("Record(columns=('a', 'a.name', 'a.age'), "
"values=((n%s {age:33,name:\"Alice\"}), "
"'Alice', 33))" % alice_id)
else:
assert repr(record) == ("Record(columns=(u'a', u'a.name', u'a.age'), "
"values=((n%s {age:33,name:\"Alice\"}), "
"u'Alice', 33))" % alice_id)
def test_producer_representation():
producer = RecordProducer(["apple", "banana", "carrot"])
assert repr(producer) == "RecordProducer(columns=('apple', 'banana', 'carrot'))"
def test_producer_length():
producer = RecordProducer(["apple", "banana", "carrot"])
assert len(producer) == 3
def test_producer_column_indexes():
producer = RecordProducer(["apple", "banana", "carrot"])
assert producer.column_indexes == {"apple": 0, "banana": 1, "carrot": 2}
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from py2neo.cypher.core import RecordProducer
def test_record_field_access(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name as name,a.age as age"
for record in graph.cypher.stream(statement):
alice = record[0]
assert record[1] == alice.properties["name"]
assert record[2] == alice.properties["age"]
assert record["name"] == alice.properties["name"]
assert record["age"] == alice.properties["age"]
assert record.name == alice.properties["name"]
assert record.age == alice.properties["age"]
def test_record_representation(graph):
statement = "CREATE (a {name:'Alice',age:33}) RETURN a,a.name,a.age"
for record in graph.cypher.stream(statement):
alice_id = record[0]._id
assert repr(record) == ("Record(columns=('a', 'a.name', 'a.age'), "
"values=((n%s {age:33,name:\"Alice\"}), 'Alice', 33))" % alice_id)
def test_producer_representation():
producer = RecordProducer(["apple", "banana", "carrot"])
assert repr(producer) == "RecordProducer(columns=('apple', 'banana', 'carrot'))"
def test_producer_length():
producer = RecordProducer(["apple", "banana", "carrot"])
assert len(producer) == 3
def test_producer_column_indexes():
producer = RecordProducer(["apple", "banana", "carrot"])
assert producer.column_indexes == {"apple": 0, "banana": 1, "carrot": 2}
|
apache-2.0
|
Python
|
c4c2b7936a89ffbf97d63e4b5d33ea59201c94b0
|
Add tests for mine.send
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
tests/integration/modules/mine.py
|
tests/integration/modules/mine.py
|
'''
Test the salt mine system
'''
import integration
class MineTest(integration.ModuleCase):
'''
Test the mine system
'''
def test_get(self):
'''
test mine.get and mine.update
'''
self.assertTrue(self.run_function('mine.update'))
self.assertTrue(self.run_function('mine.get', ['minion', 'test.ping']))
def test_send(self):
'''
test mine.send
'''
self.assertFalse(self.run_function('mine.send', ['foo.__spam_and_cheese']))
self.assertTrue(self.run_function('mine.send', ['test.retcode']))
self.assertTrue(self.run_function('mine.get', ['minion', 'test.retcode']))
|
'''
Test the salt mine system
'''
import integration
class MineTest(integration.ModuleCase):
'''
Test the mine system
'''
def test_get(self):
'''
test mine.get
'''
self.assertTrue(self.run_function('mine.update'))
self.assertTrue(self.run_function('mine.get', ['minion', 'test.ping']))
|
apache-2.0
|
Python
|
77fed481efb6da1091033e66b93324586664c221
|
Cover new optimization for regression test
|
kayhayen/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka
|
tests/optimizations/Operations.py
|
tests/optimizations/Operations.py
|
# Copyright 2016, Kay Hayen, mailto:[email protected]
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
print(not bool)
print(not {})
print(not 7)
# TODO: Needs some SSA now.
# print(bool or len)
# print(False or dict)
print(type(Ellipsis))
print("a" in "abba")
print("a" not in "abba")
# TODO: Add support for functions
# def testInplaceOperations():
# x = 2
# x += 1
# x *= 2
# x **= 2
# x -= 8
# x //= 5
# x %= 3
# x &= 2
# x |= 5
# x ^= 1
# x /= 2
#
# print(x)
print(len("a"*10000))
print(len(10000*"a"))
|
# Copyright 2016, Kay Hayen, mailto:[email protected]
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
print(not bool)
print(not {})
print(not 7)
# TODO: Needs some SSA now.
# print(bool or len)
# print(False or dict)
print(type(Ellipsis))
print("a" in "abba")
print("a" not in "abba")
# TODO: Add support for functions
# def testInplaceOperations():
# x = 2
# x += 1
# x *= 2
# x **= 2
# x -= 8
# x //= 5
# x %= 3
# x &= 2
# x |= 5
# x ^= 1
# x /= 2
#
# print(x)
|
apache-2.0
|
Python
|
08d1873e39531a8cb453fab91e0bb1c95a236cd2
|
Update Copyright
|
tanghaibao/goatools,tanghaibao/goatools
|
tests/test_cli_write_hierarchy.py
|
tests/test_cli_write_hierarchy.py
|
#!/usr/bin/env python
"""Test that hierarchy below specified GO terms is printed."""
from __future__ import print_function
__copyright__ = "Copyright (c) 2017-2019, DV Klopfenstein. Haiboa Tang. All rights reserved."
from goatools.cli.wr_hierarchy import WrHierCli
# --o Output file in ASCII text format
# --no_indent Do not indent GO terms
# --max_indent max depth for printing relative to GO Term
# --num_child Print count of total number of children for each GO
# --concise If a branch has already been printed, do not re-print.
# Print '===' instead of dashes to note the point of compression
def test_cli():
"""Add and remove markers for a file."""
# pylint: disable=bad-whitespace
args_exp = [
# args exp_set expected_dict
# -------- ------- ---------------------
([], {'dag':'go-basic.obo', 'dash_len':6}),
(['--dag=go-basic.obo'], {'dag':'go-basic.obo', 'dash_len':6}),
(['-o rpt.txt'], {'dag':'go-basic.obo', 'dash_len':6, 'o':'rpt.txt'}),
(['--max_indent=7'], {'dag':'go-basic.obo', 'dash_len':6, 'max_indent':7}),
(['--concise'], {'dag':'go-basic.obo', 'dash_len':6, 'concise':True}),
(['--no_indent'], {'dag':'go-basic.obo', 'dash_len':6, 'no_indent':True}),
(['--concise', '--no_indent'], {'dag':'go-basic.obo', 'dash_len':6,
'concise':True, 'no_indent':True}),
]
for args, exp_dict in args_exp:
print("ARGS={ARGS}".format(ARGS=args))
print("EXP={EXP}".format(EXP=exp_dict))
obj = WrHierCli(args)
print("DCT: {DCT}".format(DCT=obj.kws))
print("WWWWWWWWWWWWWWWWWWW WrHierCli", obj.kws)
assert obj.kws == exp_dict, "DCT: ACT({}) != EXP({})".format(obj.kws, exp_dict)
print("")
if __name__ == '__main__':
test_cli()
# Copyright (c) 2017-2019, DV Klopfenstein, Haibao Tang. All rights reserved.
|
#!/usr/bin/env python
"""Test that hierarchy below specified GO terms is printed."""
from __future__ import print_function
__copyright__ = "Copyright (c) 2017-2018, DV Klopfenstein. Haiboa Tang. All rights reserved."
from goatools.cli.wr_hierarchy import WrHierCli
# --o Output file in ASCII text format
# --no_indent Do not indent GO terms
# --max_indent max depth for printing relative to GO Term
# --num_child Print count of total number of children for each GO
# --concise If a branch has already been printed, do not re-print.
# Print '===' instead of dashes to note the point of compression
def test_cli():
"""Add and remove markers for a file."""
# pylint: disable=bad-whitespace
args_exp = [
# args exp_set expected_dict
# -------- ------- ---------------------
([], {'dag':'go-basic.obo', 'dash_len':6}),
(['--dag=go-basic.obo'], {'dag':'go-basic.obo', 'dash_len':6}),
(['-o rpt.txt'], {'dag':'go-basic.obo', 'dash_len':6, 'o':'rpt.txt'}),
(['--max_indent=7'], {'dag':'go-basic.obo', 'dash_len':6, 'max_indent':7}),
(['--concise'], {'dag':'go-basic.obo', 'dash_len':6, 'concise':True}),
(['--no_indent'], {'dag':'go-basic.obo', 'dash_len':6, 'no_indent':True}),
(['--concise', '--no_indent'], {'dag':'go-basic.obo', 'dash_len':6,
'concise':True, 'no_indent':True}),
]
for args, exp_dict in args_exp:
print("ARGS={ARGS}".format(ARGS=args))
print("EXP={EXP}".format(EXP=exp_dict))
obj = WrHierCli(args)
print("DCT: {DCT}".format(DCT=obj.kws))
print("WWWWWWWWWWWWWWWWWWW WrHierCli", obj.kws)
assert obj.kws == exp_dict, "DCT: ACT({}) != EXP({})".format(obj.kws, exp_dict)
print("")
if __name__ == '__main__':
test_cli()
# Copyright (c) 2017-2018, DV Klopfenstein, Haibao Tang. All rights reserved.
|
bsd-2-clause
|
Python
|
b7eefbea83768bd006c677aa5d69cf2a24e6e909
|
Fix shebang line.
|
izrik/aeneas,izrik/aeneas
|
aeneas.py
|
aeneas.py
|
#!/usr/bin/env python
import argparse
from os import environ
def bool_from_str(s):
if isinstance(s, basestring):
s = s.lower()
if s in ['true', 't', '1', 'y']:
return True
if s in ['false', 'f', '0', 'n']:
return False
return bool(s)
AENEAS_DEBUG = bool_from_str(environ.get('AENEAS_DEBUG', False))
DEFAULT_AENEAS_PORT = 4935
AENEAS_PORT = environ.get('AENEAS_PORT', DEFAULT_AENEAS_PORT)
try:
AENEAS_PORT = int(AENEAS_PORT)
except:
AENEAS_PORT = DEFAULT_AENEAS_PORT
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--debug', help='Run Flask in debug mode, with '
'auto-reload and debugger page on '
'errors.',
action='store_true', default=AENEAS_DEBUG)
parser.add_argument('--port', help='The port on which to accept incoming '
'HTTP requests. Default is {}.'.format(
AENEAS_PORT),
action='store', default=AENEAS_PORT, type=int)
args = parser.parse_args()
print('aeneas.py')
|
#!/usr/bin/env python2
import argparse
from os import environ
def bool_from_str(s):
if isinstance(s, basestring):
s = s.lower()
if s in ['true', 't', '1', 'y']:
return True
if s in ['false', 'f', '0', 'n']:
return False
return bool(s)
AENEAS_DEBUG = bool_from_str(environ.get('AENEAS_DEBUG', False))
DEFAULT_AENEAS_PORT = 4935
AENEAS_PORT = environ.get('AENEAS_PORT', DEFAULT_AENEAS_PORT)
try:
AENEAS_PORT = int(AENEAS_PORT)
except:
AENEAS_PORT = DEFAULT_AENEAS_PORT
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--debug', help='Run Flask in debug mode, with '
'auto-reload and debugger page on '
'errors.',
action='store_true', default=AENEAS_DEBUG)
parser.add_argument('--port', help='The port on which to accept incoming '
'HTTP requests. Default is {}.'.format(
AENEAS_PORT),
action='store', default=AENEAS_PORT, type=int)
args = parser.parse_args()
print('aeneas.py')
|
agpl-3.0
|
Python
|
600b054b950b26db8609d71a75350aaa995bb26e
|
Add import export to Geography admin
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
scorecard/admin.py
|
scorecard/admin.py
|
from django.contrib import admin
from django.conf import settings
from django_q.tasks import async_task
from constance import config
from import_export import resources
from import_export.admin import ImportExportModelAdmin
from .models import (
Geography,
MunicipalityProfilesCompilation,
)
@admin.register(Geography)
class GeographyAdmin(ImportExportModelAdmin):
list_display = ("geo_code", "geo_level", "name",)
@admin.register(MunicipalityProfilesCompilation)
class MunicipalityProfilesCompilationAdmin(admin.ModelAdmin):
list_display = (
"datetime",
"user",
"last_audit_year",
"last_opinion_year",
"last_uifw_year",
"last_audit_quarter",
)
readonly_fields = (
"user",
)
def get_form(self, request, obj=None, **kwargs):
form = super(MunicipalityProfilesCompilationAdmin,
self).get_form(request, obj, **kwargs)
form.base_fields["last_audit_year"].disabled = True
form.base_fields["last_opinion_year"].disabled = True
form.base_fields["last_uifw_year"].disabled = True
form.base_fields["last_audit_quarter"].disabled = True
form.base_fields["last_audit_year"].initial = config.LAST_AUDIT_YEAR
form.base_fields["last_opinion_year"].initial = config.LAST_OPINION_YEAR
form.base_fields["last_uifw_year"].initial = config.LAST_UIFW_YEAR
form.base_fields["last_audit_quarter"].initial = config.LAST_AUDIT_QUARTER
return form
def get_exclude(self, request, obj=None):
if obj is None:
return ("user",)
else:
return super(MunicipalityProfilesCompilationAdmin, self).get_exclude(request, obj)
def save_model(self, request, obj, form, change):
# Set the user to the current user
obj.user = request.user
# Process default save behavior
super(MunicipalityProfilesCompilationAdmin, self).save_model(
request, obj, form, change)
# Queue task
async_task(
"scorecard.compile_profiles.compile_data",
settings.API_URL,
obj.last_audit_year,
obj.last_opinion_year,
obj.last_uifw_year,
obj.last_audit_quarter,
task_name="Compile municipal profiles"
)
|
from django.contrib import admin
from django.conf import settings
from django_q.tasks import async_task
from constance import config
from .models import (
Geography,
MunicipalityProfilesCompilation,
)
@admin.register(Geography)
class GeographyAdmin(admin.ModelAdmin):
list_display = ("geo_code", "geo_level", "name",)
@admin.register(MunicipalityProfilesCompilation)
class MunicipalityProfilesCompilationAdmin(admin.ModelAdmin):
list_display = (
"datetime",
"user",
"last_audit_year",
"last_opinion_year",
"last_uifw_year",
"last_audit_quarter",
)
readonly_fields = (
"user",
)
def get_form(self, request, obj=None, **kwargs):
form = super(MunicipalityProfilesCompilationAdmin,
self).get_form(request, obj, **kwargs)
form.base_fields["last_audit_year"].disabled = True
form.base_fields["last_opinion_year"].disabled = True
form.base_fields["last_uifw_year"].disabled = True
form.base_fields["last_audit_quarter"].disabled = True
form.base_fields["last_audit_year"].initial = config.LAST_AUDIT_YEAR
form.base_fields["last_opinion_year"].initial = config.LAST_OPINION_YEAR
form.base_fields["last_uifw_year"].initial = config.LAST_UIFW_YEAR
form.base_fields["last_audit_quarter"].initial = config.LAST_AUDIT_QUARTER
return form
def get_exclude(self, request, obj=None):
if obj is None:
return ("user",)
else:
return super(MunicipalityProfilesCompilationAdmin, self).get_exclude(request, obj)
def save_model(self, request, obj, form, change):
# Set the user to the current user
obj.user = request.user
# Process default save behavior
super(MunicipalityProfilesCompilationAdmin, self).save_model(
request, obj, form, change)
# Queue task
async_task(
"scorecard.compile_profiles.compile_data",
settings.API_URL,
obj.last_audit_year,
obj.last_opinion_year,
obj.last_uifw_year,
obj.last_audit_quarter,
task_name="Compile municipal profiles"
)
|
mit
|
Python
|
6fb4bf39fd460b5f2d69d665d9e4d09e4279d88b
|
Add maximum page constraint for Pagination results
|
Mbarak-Mbigo/cp2_bucketlist
|
common/utils.py
|
common/utils.py
|
# common/utils.py
from flask import url_for, current_app
class PaginateData():
""""Pagination class.
Paginate query results
"""
def __init__(self, request, query, resource_for_url, key_name, schema):
self.request = request
self.query = query
self.resource_for_url = resource_for_url
self.key_name = key_name
self.schema = schema
self.page_argument_name = current_app.config['PAGINATION_PAGE_ARGUMENT_NAME']
if request.args.get('limit'):
if int(request.args.get('limit')) > 100:
self.results_per_page = 100
else:
self.results_per_page = int(request.args.get('limit'))
else:
self.results_per_page = current_app.config['DEFAULT_PAGINATION_PAGE_SIZE']
def paginate_query(self):
""""Handle query pagination."""
page_number = self.request.args.get(self.page_argument_name, 1, type=int)
paginated_objects = self.query.paginate(page_number, per_page=self.results_per_page, error_out=False)
objects = paginated_objects.items
if paginated_objects.has_prev:
previous_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number - 1, _external=True)
else:
previous_page_url = None
if paginated_objects.has_next:
next_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number + 1, _external=True)
else:
next_page_url = None
dumped_objects = self.schema.dump(objects, many=True).data
return ({
self.key_name: dumped_objects,
'previous': previous_page_url,
'next': next_page_url,
'count': paginated_objects.total
})
|
# common/utils.py
from flask import url_for, current_app
class PaginateData():
""""Pagination class.
Paginate query results
"""
def __init__(self, request, query, resource_for_url, key_name, schema):
self.request = request
self.query = query
self.resource_for_url = resource_for_url
self.key_name = key_name
self.schema = schema
self.page_argument_name = current_app.config['PAGINATION_PAGE_ARGUMENT_NAME']
if request.args.get('limit'):
self.results_per_page = int(request.args.get('limit'))
else:
self.results_per_page = current_app.config['DEFAULT_PAGINATION_PAGE_SIZE']
def paginate_query(self):
""""Handle query pagination."""
page_number = self.request.args.get(self.page_argument_name, 1, type=int)
paginated_objects = self.query.paginate(page_number, per_page=self.results_per_page, error_out=False)
objects = paginated_objects.items
if paginated_objects.has_prev:
previous_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number - 1, _external=True)
else:
previous_page_url = None
if paginated_objects.has_next:
next_page_url = url_for(
self.resource_for_url, limit=self.results_per_page, page=page_number + 1, _external=True)
else:
next_page_url = None
dumped_objects = self.schema.dump(objects, many=True).data
return ({
self.key_name: dumped_objects,
'previous': previous_page_url,
'next': next_page_url,
'count': paginated_objects.total
})
|
mit
|
Python
|
9c38604fa8e85efeed723b47e3486f9d09ea9858
|
Fix login cookie
|
phihag/adhocracy,DanielNeugebauer/adhocracy,SysTheron/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,alkadis/vcv,SysTheron/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,phihag/adhocracy,phihag/adhocracy,SysTheron/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv
|
adhocracy/lib/auth/instance_auth_tkt.py
|
adhocracy/lib/auth/instance_auth_tkt.py
|
import datetime
from repoze.who.plugins.auth_tkt import AuthTktCookiePlugin, _now
from pylons import config
from paste.deploy.converters import asbool
class InstanceAuthTktCookiePlugin(AuthTktCookiePlugin):
def _get_cookies(self, environ, value, max_age=None):
if max_age is not None:
later = _now() + datetime.timedelta(seconds=int(max_age))
# Wdy, DD-Mon-YY HH:MM:SS GMT
expires = later.strftime('%a, %d %b %Y %H:%M:%S')
# the Expires header is *required* at least for IE7 (IE7 does
# not respect Max-Age)
max_age = "; Max-Age=%s; Expires=%s" % (max_age, expires)
else:
max_age = ''
if asbool(config.get('adhocracy.relative_urls', 'false')):
# Serve the cookie for the current host, which may be
# "localhost" or an IP address.
cookies = [
('Set-Cookie', '%s="%s"; Path=/; %s' % (
self.cookie_name, value, max_age))
]
else:
cur_domain = environ.get('adhocracy.domain').split(':')[0]
wild_domain = '.' + cur_domain
cookies = [
('Set-Cookie', '%s="%s"; Path=/; Domain=%s%s' % (
self.cookie_name, value, wild_domain, max_age))
]
return cookies
|
import datetime
from repoze.who.plugins.auth_tkt import AuthTktCookiePlugin, _now
class InstanceAuthTktCookiePlugin(AuthTktCookiePlugin):
def _get_cookies(self, environ, value, max_age=None):
if max_age is not None:
later = _now() + datetime.timedelta(seconds=int(max_age))
# Wdy, DD-Mon-YY HH:MM:SS GMT
expires = later.strftime('%a, %d %b %Y %H:%M:%S')
# the Expires header is *required* at least for IE7 (IE7 does
# not respect Max-Age)
max_age = "; Max-Age=%s; Expires=%s" % (max_age, expires)
else:
max_age = ''
cur_domain = environ.get('adhocracy.domain').split(':')[0]
wild_domain = '.' + cur_domain
cookies = [
#('Set-Cookie', '%s="%s"; Path=/%s' % (
#self.cookie_name, value, max_age)),
#('Set-Cookie', '%s="%s"; Path=/; Domain=%s%s' % (
#self.cookie_name, value, cur_domain, max_age)),
('Set-Cookie', '%s="%s"; Path=/; Domain=%s%s' % (
self.cookie_name, value, wild_domain, max_age))
]
return cookies
|
agpl-3.0
|
Python
|
10dc027ee15428d7ca210e0b74e5ae9274de0fa8
|
Use raw_input instead of the unmodified words
|
YChrisZhang/PythonCrawler
|
lianXiangCi.py
|
lianXiangCi.py
|
#coding:utf-8
import urllib
import urllib2
import re
from random import choice
ipList=['120.76.115.134:80','222.83.14.145:3128','119.188.94.145:80']
thisIp=choice(ipList)
input = raw_input("Please input your key words:")
keyWord=urllib.quote(input)
url='http://search.sina.com.cn/iframe/suggest/index.php?q='+keyWord
headers={
'Get':url,
'Host':'search.sina.com.cn',
'Referer':'http://search.sina.com.cn/',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36'
}
proxy_support = urllib2.ProxyHandler({'http': 'http://'+thisIp})
opener=urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
req=urllib2.Request(url)
for key in headers:
req.add_header(key,headers[key])
html=urllib2.urlopen(req).read()
file=open('C:\Users\Ryan\Desktop\lianXC.txt','w')
file.write(html)
|
#coding:utf-8
import urllib
import urllib2
import re
from random import choice
ipList=['120.76.115.134:80','222.83.14.145:3128','119.188.94.145:80']
thisIp=choice(ipList)
keyWord=urllib.quote('科学')
url='http://search.sina.com.cn/iframe/suggest/index.php?q='+keyWord
headers={
'Get':url,
'Host':'search.sina.com.cn',
'Referer':'http://search.sina.com.cn/',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36'
}
proxy_support = urllib2.ProxyHandler({'http': 'http://'+thisIp})
opener=urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
req=urllib2.Request(url)
for key in headers:
req.add_header(key,headers[key])
html=urllib2.urlopen(req).read()
file=open('C:\Users\Ryan\Desktop\lianXC.txt','w')
file.write(html)
|
mit
|
Python
|
5e13528a3c2912b4238f8f531a8f2c2652292b0a
|
Add init_db
|
ForumOrganisation/forum-prod,ForumOrganisation/forum-fra,ForumOrganisation/forum-prod,ForumOrganisation/forum-prod
|
scripts/init_db.py
|
scripts/init_db.py
|
import os
from pymongo import MongoClient
def main():
try:
client = MongoClient(host=os.environ.get('MONGODB_URI'))
db = client.get_default_database()
# tasks
# db.users.create_index(keys='id', name='index_id', unique=True)
# create_admin(db)
db.users.update_many({'$or': [{'events.joi.conference.registered': True}, {'events.joi.table_ronde.registered': True}]},
{'$set': {'events.joi.registered': True}})
except Exception as e:
print(e)
def create_admin(db):
admin_data = dict(id=os.environ.get('ADMIN_ID'), password=os.environ.get('ADMIN_PASSWORD'))
try:
db.companies.insert_one(admin_data)
except:
print('admin already exists')
if __name__ == '__main__':
main()
|
import os
from pymongo import MongoClient
def main():
try:
client = MongoClient(host=os.environ.get('MONGODB_URI'))
db = client.get_default_database()
# tasks
db.users.create_index(keys='id', name='index_id', unique=True)
create_admin(db)
except Exception as e:
print(e)
def create_admin(db):
admin_data = dict(id=os.environ.get('ADMIN_ID'), password=os.environ.get('ADMIN_PASSWORD'))
try:
db.companies.insert_one(admin_data)
except:
print('admin already exists')
if __name__ == '__main__':
main()
|
mit
|
Python
|
d85a27db790bb4bc1926fc78bcf0f2d54c876e48
|
bump version
|
SunPower/PVMismatch
|
pvmismatch/__init__.py
|
pvmismatch/__init__.py
|
# -*- coding: utf-8 -*-
"""
This is the PVMismatch Package. It contains :mod:`~pvmismatch.pvmismatch_lib`
and :mod:`~pvmismatch.pvmismatch_tk`.
:mod:`~pvmismatch.pvmismatch_lib`
=================================
This package contains the basic library modules, methods, classes and
attributes to model PV system mismatch.
.. note::
The main library classes and modules are exposed through this package for
convenience.
For example::
>>> from pvmismatch import PVcell # imports the PVcell class
>>> # import pvconstants, pvcell, pvmodule, pvstring and pvsystem
>>> from pvmismatch import *
:mod:`~pvmismatch.pvmismatch_tk`
================================
This package contains an application that can be run using
:mod:`pvmismatch.pv_tk`.
"""
# import pvmismatch_lib modules so to match old API
import pvmismatch.pvmismatch_lib.pvconstants as pvconstants
import pvmismatch.pvmismatch_lib.pvcell as pvcell
import pvmismatch.pvmismatch_lib.pvmodule as pvmodule
import pvmismatch.pvmismatch_lib.pvstring as pvstring
import pvmismatch.pvmismatch_lib.pvsystem as pvsystem
import pvmismatch.pvmismatch_lib.pvexceptions as pvexceptions
# expose constructors to package's top level
PVconstants = pvconstants.PVconstants
PVcell = pvcell.PVcell
PVmodule = pvmodule.PVmodule
PVstring = pvstring.PVstring
PVsystem = pvsystem.PVsystem
__author__ = 'mmikofski'
__version__ = '2.1'
__release__ = 'Himalayan Ham'
__all__ = ['pvconstants', 'pvcell', 'pvmodule', 'pvstring', 'pvsystem']
|
# -*- coding: utf-8 -*-
"""
This is the PVMismatch Package. It contains :mod:`~pvmismatch.pvmismatch_lib`
and :mod:`~pvmismatch.pvmismatch_tk`.
:mod:`~pvmismatch.pvmismatch_lib`
=================================
This package contains the basic library modules, methods, classes and
attributes to model PV system mismatch.
.. note::
The main library classes and modules are exposed through this package for
convenience.
For example::
>>> from pvmismatch import PVcell # imports the PVcell class
>>> # import pvconstants, pvcell, pvmodule, pvstring and pvsystem
>>> from pvmismatch import *
:mod:`~pvmismatch.pvmismatch_tk`
================================
This package contains an application that can be run using
:mod:`pvmismatch.pv_tk`.
"""
# import pvmismatch_lib modules so to match old API
import pvmismatch.pvmismatch_lib.pvconstants as pvconstants
import pvmismatch.pvmismatch_lib.pvcell as pvcell
import pvmismatch.pvmismatch_lib.pvmodule as pvmodule
import pvmismatch.pvmismatch_lib.pvstring as pvstring
import pvmismatch.pvmismatch_lib.pvsystem as pvsystem
import pvmismatch.pvmismatch_lib.pvexceptions as pvexceptions
# expose constructors to package's top level
PVconstants = pvconstants.PVconstants
PVcell = pvcell.PVcell
PVmodule = pvmodule.PVmodule
PVstring = pvstring.PVstring
PVsystem = pvsystem.PVsystem
__author__ = 'mmikofski'
__version__ = '2.0'
__release__ = 'Himalayan Honey'
__all__ = ['pvconstants', 'pvcell', 'pvmodule', 'pvstring', 'pvsystem']
|
bsd-3-clause
|
Python
|
45a624e18b9b870163a5852bd4d06c59b3bb5ac2
|
Fix base for got on pie binaries (#618)
|
pwndbg/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg,anthraxx/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg,pwndbg/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg
|
pwndbg/commands/got.py
|
pwndbg/commands/got.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import pwndbg.chain
import pwndbg.commands
import pwndbg.enhance
import pwndbg.file
import pwndbg.which
import pwndbg.wrappers.checksec
import pwndbg.wrappers.readelf
from pwndbg.color import message
parser = argparse.ArgumentParser(description='Show the state of the Global Offset Table')
parser.add_argument('name_filter', help='Filter results by passed name.',
type=str, nargs='?', default='')
@pwndbg.commands.ArgparsedCommand(parser)
@pwndbg.commands.OnlyWhenRunning
def got(name_filter=''):
relro_status = pwndbg.wrappers.checksec.relro_status()
pie_status = pwndbg.wrappers.checksec.pie_status()
jmpslots = list(pwndbg.wrappers.readelf.get_jmpslots())
if not len(jmpslots):
print(message.error("NO JUMP_SLOT entries available in the GOT"))
return
if "PIE enabled" in pie_status:
bin_base = pwndbg.elf.exe().address
relro_color = message.off
if 'Partial' in relro_status:
relro_color = message.warn
elif 'Full' in relro_status:
relro_color = message.on
print("\nGOT protection: %s | GOT functions: %d\n " % (relro_color(relro_status), len(jmpslots)))
for line in jmpslots:
address, info, rtype, value, name = line.split()[:5]
if name_filter not in name:
continue
address_val = int(address, 16)
if "PIE enabled" in pie_status: # if PIE, address is only the offset from the binary base address
address_val = bin_base + address_val
got_address = pwndbg.memory.pvoid(address_val)
print("[0x%x] %s -> %s" % (address_val, message.hint(name), pwndbg.chain.format(got_address)))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import pwndbg.chain
import pwndbg.commands
import pwndbg.enhance
import pwndbg.file
import pwndbg.which
import pwndbg.wrappers.checksec
import pwndbg.wrappers.readelf
from pwndbg.color import message
parser = argparse.ArgumentParser(description='Show the state of the Global Offset Table')
parser.add_argument('name_filter', help='Filter results by passed name.',
type=str, nargs='?', default='')
@pwndbg.commands.ArgparsedCommand(parser)
@pwndbg.commands.OnlyWhenRunning
def got(name_filter=''):
relro_status = pwndbg.wrappers.checksec.relro_status()
pie_status = pwndbg.wrappers.checksec.pie_status()
jmpslots = list(pwndbg.wrappers.readelf.get_jmpslots())
if not len(jmpslots):
print(message.error("NO JUMP_SLOT entries available in the GOT"))
return
if "PIE enabled" in pie_status:
bin_text_base = pwndbg.vmmap.find(pwndbg.elf.entry()).start
relro_color = message.off
if 'Partial' in relro_status:
relro_color = message.warn
elif 'Full' in relro_status:
relro_color = message.on
print("\nGOT protection: %s | GOT functions: %d\n " % (relro_color(relro_status), len(jmpslots)))
for line in jmpslots:
address, info, rtype, value, name = line.split()[:5]
if name_filter not in name:
continue
address_val = int(address, 16)
if "PIE enabled" in pie_status: # if PIE, address is only the offset from the binary base address
address_val = bin_text_base + address_val
got_address = pwndbg.memory.pvoid(address_val)
print("[0x%x] %s -> %s" % (address_val, message.hint(name), pwndbg.chain.format(got_address)))
|
mit
|
Python
|
a735f36102b370415b9f49dae4ee3c5ceda30136
|
Update cycling_light.py
|
jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi
|
apps/hue/cycling_light.py
|
apps/hue/cycling_light.py
|
# -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
import httplib
import time
conn = httplib.HTTPConnection("10.xxx.xxx.xxxx")
hue_uid = "c274b3c285d19cfxxxxxxxxxx"
restcmd = "/api"+hue_uid+"/lights"
str = " "
xhue = [10000,25000,46000,56280]
def shifthue() :
global str
global xhue
xhue.insert(0,xhue[-1])
xhue = xhue[0:4]
print xhue
callurl = restcmd + "/4/state"
try:
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
except:
print "keep goging...."
time.sleep(2)
time.sleep(1)
for num in [3,2,1,4] :
callurl = restcmd + "/%s/state"%(num)
print callurl
huenumber = (xhue[4-num])
try :
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
time.sleep(1)
conn.request("PUT",callurl ,'{"on":true, "sat":254, "bri":254, "hue":%s}'%huenumber)
response = conn.getresponse()
data = response.read()
print data
time.sleep(1)
except:
print "exception conn.getresponse from Hue GW"
time.sleep(2)
if __name__ == "__main__":
# print web()
while True :
shifthue()
time.sleep(5
|
# -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
import httplib
import time
conn = httplib.HTTPConnection("10.xxx.xxx.xxxx")
hue_uid = "c274b3c285d19cfxxxxxxxxxx"
restcmd = "/api"+hue_uid+"/lights"
str = " "
xhue = [10000,25000,46000,56280]
def shifthue() :
global str
global xhue
xhue.insert(0,xhue[-1])
xhue = xhue[0:4]
print xhue
callurl = restcmd + "/4/state"
try:
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
except:
print "keep goging...."
time.sleep(2)
time.sleep(1)
for num in [3,2,1,4] :
callurl = restcmd + "/%s/state"%(num)
print callurl
huenumber = (xhue[4-num])
try :
conn.request("PUT",callurl ,'{"on":false}')
response = conn.getresponse()
data = response.read()
time.sleep(1)
conn.request("PUT",callurl ,'{"on":true, "sat":254, "bri":254, "hue":%s}'%huenumber)
response = conn.getresponse()
data = response.read()
print data
time.sleep(1)
except:
print "exception conn.getresponse from Hue GW"
time.sleep(2)
if __name__ == "__main__":
# print web()
while True :
shifthue()
time.sleep(5
|
bsd-2-clause
|
Python
|
1f53a12d60b22263f35a40bf0a7d4cb5396c14e8
|
add docstrings in Events
|
TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl
|
AlphaTwirl/Events/Events.py
|
AlphaTwirl/Events/Events.py
|
# Tai Sakuma <[email protected]>
##____________________________________________________________________________||
class Events(object):
"""An iterative object for events.
Examples
--------
inputFile = ROOT.TFile.Open(inputPath)
tree = inputFile.Get(treeName)
events = Events(tree)
for event in events:
event.jet_pt # a branch can be accessed as an attribute
"""
def __init__(self, tree, maxEvents = -1):
self.file = tree.GetDirectory() # so a file won't close
self.tree = tree
self.nEvents = min(self.tree.GetEntries(), maxEvents) if (maxEvents > -1) else self.tree.GetEntries()
self.iEvent = -1
def __iter__(self):
for self.iEvent in xrange(self.nEvents):
self.tree.GetEntry(self.iEvent)
yield self
self.iEvent = -1
def __getattr__(self, name):
return getattr(self.tree, name)
##____________________________________________________________________________||
|
# Tai Sakuma <[email protected]>
##____________________________________________________________________________||
class Events(object):
def __init__(self, tree, maxEvents = -1):
self.file = tree.GetDirectory() # so a file won't close
self.tree = tree
self.nEvents = min(self.tree.GetEntries(), maxEvents) if (maxEvents > -1) else self.tree.GetEntries()
self.iEvent = -1
def __iter__(self):
for self.iEvent in xrange(self.nEvents):
self.tree.GetEntry(self.iEvent)
yield self
self.iEvent = -1
def __getattr__(self, name):
return getattr(self.tree, name)
##____________________________________________________________________________||
|
bsd-3-clause
|
Python
|
c8d88489f3a4ce01ea8936440daec378b983e768
|
Fix CLI typo.
|
peak6/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,peak6/st2,Plexxi/st2,peak6/st2,nzlosh/st2,StackStorm/st2,tonybaloney/st2,Plexxi/st2,tonybaloney/st2,Plexxi/st2,tonybaloney/st2,StackStorm/st2,nzlosh/st2
|
st2client/st2client/models/action_alias.py
|
st2client/st2client/models/action_alias.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2client.models import core
__all__ = [
'ActionAlias',
'ActionAliasMatch'
]
class ActionAlias(core.Resource):
_alias = 'Action-Alias'
_display_name = 'Action Alias'
_plural = 'ActionAliases'
_plural_display_name = 'Action Aliases'
_url_path = 'actionalias'
_repr_attributes = ['name', 'pack', 'action_ref']
class ActionAliasMatch(core.Resource):
_alias = 'Action-Alias-Match'
_display_name = 'ActionAlias Match'
_plural = 'ActionAliasMatches'
_plural_display_name = 'Action Alias Matches'
_url_path = 'actionalias'
_repr_attributes = ['command']
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2client.models import core
__all__ = [
'ActionAlias',
'ActionAliasMatch'
]
class ActionAlias(core.Resource):
_alias = 'Action-Alias'
_display_name = 'Action Alias'
_plural = 'ActionAliases'
_plural_display_name = 'Runners'
_url_path = 'actionalias'
_repr_attributes = ['name', 'pack', 'action_ref']
class ActionAliasMatch(core.Resource):
_alias = 'Action-Alias-Match'
_display_name = 'ActionAlias Match'
_plural = 'ActionAliasMatches'
_plural_display_name = 'Action Alias Matches'
_url_path = 'actionalias'
_repr_attributes = ['command']
|
apache-2.0
|
Python
|
03fec45fa269a8badbe047b4911c655c3c952404
|
Use consistent CLI command names.
|
alfasin/st2,Itxaka/st2,StackStorm/st2,StackStorm/st2,alfasin/st2,punalpatel/st2,punalpatel/st2,dennybaa/st2,alfasin/st2,punalpatel/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,peak6/st2,pixelrebel/st2,armab/st2,tonybaloney/st2,grengojbo/st2,Plexxi/st2,grengojbo/st2,emedvedev/st2,Plexxi/st2,nzlosh/st2,pixelrebel/st2,StackStorm/st2,dennybaa/st2,dennybaa/st2,armab/st2,emedvedev/st2,lakshmi-kannan/st2,tonybaloney/st2,pixelrebel/st2,tonybaloney/st2,Itxaka/st2,grengojbo/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,nzlosh/st2,armab/st2,lakshmi-kannan/st2,peak6/st2,lakshmi-kannan/st2,peak6/st2
|
st2client/st2client/models/action_alias.py
|
st2client/st2client/models/action_alias.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2client.models import core
__all__ = [
'ActionAlias'
]
class ActionAlias(core.Resource):
_alias = 'Action-Alias'
_display_name = 'Action Alias'
_plural = 'ActionAliases'
_plural_display_name = 'Runners'
_url_path = 'actionalias'
_repr_attributes = ['name', 'pack', 'action_ref']
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2client.models import core
__all__ = [
'ActionAlias'
]
class ActionAlias(core.Resource):
_alias = 'ActionAlias'
_display_name = 'Action Alias'
_plural = 'ActionAliases'
_plural_display_name = 'Runners'
_url_path = 'actionalias'
_repr_attributes = ['name', 'pack', 'action_ref']
|
apache-2.0
|
Python
|
be6237173274c98d646f50ad87d4b7bcd38a5707
|
Use Unicode strings for file paths
|
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
|
pylinks/links/utils.py
|
pylinks/links/utils.py
|
import six
from django.conf import settings
from django.core.exceptions import ValidationError
from pyuploadcare.dj import FileField
from pyuploadcare.api_resources import File
from pyuploadcare.exceptions import InvalidRequestError
# Patch File to pass through S3 URLs
class LinkFile(File):
def __init__(self, cdn_url_or_file_id):
matches = cdn_url_or_file_id.startswith('links/')
if matches:
self.s3_path = cdn_url_or_file_id
self.uuid = None
else:
self.s3_path = None
File.__init__(self, cdn_url_or_file_id)
@property
def cdn_url(self):
if self.s3_path is not None:
return u'http://s3.amazonaws.com/{bucket}/media/{path}'.format(bucket=settings.AWS_STORAGE_BUCKET_NAME, path=self.s3_path)
else:
return File.cdn_url.fget(self)
def __repr__(self):
if self.uuid is None:
return u'<LinkFile {s3_path}>'.format(s3_path=self.s3_path)
else:
return '<LinkFile {uuid}>'.format(uuid=self.uuid)
# Patch FileField to return LinkFile instances
class LinkFileField(FileField):
def to_python(self, value):
if value is None or value == '':
return value
if isinstance(value, File):
return value
if not isinstance(value, six.string_types):
raise ValidationError(
'Invalid value for a field: string was expected'
)
try:
return LinkFile(value)
except InvalidRequestError as exc:
raise ValidationError(
'Invalid value for a field: {exc}'.format(exc=exc)
)
|
import six
from django.conf import settings
from django.core.exceptions import ValidationError
from pyuploadcare.dj import FileField
from pyuploadcare.api_resources import File
from pyuploadcare.exceptions import InvalidRequestError
# Patch File to pass through S3 URLs
class LinkFile(File):
def __init__(self, cdn_url_or_file_id):
matches = cdn_url_or_file_id.startswith('links/')
if matches:
self.s3_path = cdn_url_or_file_id
self.uuid = None
else:
self.s3_path = None
File.__init__(self, cdn_url_or_file_id)
@property
def cdn_url(self):
if self.s3_path is not None:
return 'http://s3.amazonaws.com/{bucket}/media/{path}'.format(bucket=settings.AWS_STORAGE_BUCKET_NAME, path=self.s3_path)
else:
return File.cdn_url.fget(self)
def __repr__(self):
if self.uuid is None:
return '<LinkFile {s3_path}>'.format(s3_path=self.s3_path)
else:
return '<LinkFile {uuid}>'.format(uuid=self.uuid)
# Patch FileField to return LinkFile instances
class LinkFileField(FileField):
def to_python(self, value):
if value is None or value == '':
return value
if isinstance(value, File):
return value
if not isinstance(value, six.string_types):
raise ValidationError(
'Invalid value for a field: string was expected'
)
try:
return LinkFile(value)
except InvalidRequestError as exc:
raise ValidationError(
'Invalid value for a field: {exc}'.format(exc=exc)
)
|
mit
|
Python
|
92ef6fda87d70434bc6875062779c72bffd2d723
|
Check field type
|
mangal-wg/pymangal
|
pymangal/makeschema.py
|
pymangal/makeschema.py
|
import json
def makeschema(infos=None, name=None, title="Autogenerated JSON schema"):
""" Generates a JSON scheme from a dict representing the schema sent by the API
:param infos: A ``dict`` with the resource schema
:param name: The name of the resource
:param title: A description of the object
"""
if not isinstance(infos, dict):
raise TypeError("The infos must be passed as a dict")
if name == None :
raise ValueError("You must provide a name")
if not isinstance(name, str):
raise TypeError("The name must be a string")
if not isinstance(title, str):
raise TypeError("The title must be given as a string")
# These are the top-level objects
schema = {'title': title, 'type': 'object', '$schema': 'http://json-schema.org/draft-04/schema#'}
required = []
properties = {}
fields = infos['fields']
for field in fields.keys():
# Is the field nullable?
if (not fields[field]['nullable']) and (not field in ['id']) :
required.append(field)
properties[field] = {}
# The help_text is the description
properties[field]['description'] = fields[field]['help_text']
# The type of the field depends on whether it's related
if fields[field]['type'] == 'related':
if fields[field]['related_type'] == 'to_one':
properties[field]['type'] = 'string'
else :
properties[field]['type'] = 'array'
properties[field]['items'] = {'type': 'string'}
else :
properties[field]['type'] = fields[field]['type']
if fields[field]['type'] == 'float':
fields[field]['type'] = 'number'
# Fields with a choice key are enum
if fields[field].has_key('choices'):
properties[field]['enum'] = fields[field]['choices']
schema['required'] = required
schema['properties'] = properties
return schema
|
import json
def makeschema(infos=None, name=None, title="Autogenerated JSON schema"):
""" Generates a JSON scheme from a dict representing the schema sent by the API
:param infos: A ``dict`` with the resource schema
:param name: The name of the resource
:param title: A description of the object
"""
if not isinstance(infos, dict):
raise TypeError("The infos must be passed as a dict")
if name == None :
raise ValueError("You must provide a name")
if not isinstance(name, str):
raise TypeError("The name must be a string")
if not isinstance(title, str):
raise TypeError("The title must be given as a string")
# These are the top-level objects
schema = {'title': title, 'type': 'object', '$schema': 'http://json-schema.org/draft-04/schema#'}
required = []
properties = {}
fields = infos['fields']
for field in fields.keys():
# Is the field nullable?
if (not fields[field]['nullable']) and (not field in ['id']) :
required.append(field)
properties[field] = {}
# The help_text is the description
properties[field]['description'] = fields[field]['help_text']
# The type of the field depends on whether it's related
if fields[field]['type'] == 'related':
if fields[field]['related_type'] == 'to_one':
properties[field]['type'] = 'string'
else :
properties[field]['type'] = 'array'
properties[field]['items'] = {'type': 'string'}
else :
properties[field]['type'] = fields[field]['type']
# Fields with a choice key are enum
if fields[field].has_key('choices'):
properties[field]['enum'] = fields[field]['choices']
schema['required'] = required
schema['properties'] = properties
return schema
|
bsd-2-clause
|
Python
|
59c1c893375f6fdf2308385b2c4d331e5af7961b
|
Prepare 0.5.1 release
|
curzona/pytest-bdd,spinus/pytest-bdd,pytest-dev/pytest-bdd
|
pytest_bdd/__init__.py
|
pytest_bdd/__init__.py
|
from pytest_bdd.steps import given, when, then # pragma: no cover
from pytest_bdd.scenario import scenario # pragma: no cover
__version__ = '0.5.1'
__all__ = [given.__name__, when.__name__, then.__name__, scenario.__name__] # pragma: no cover
|
from pytest_bdd.steps import given, when, then # pragma: no cover
from pytest_bdd.scenario import scenario # pragma: no cover
__version__ = '0.5.0'
__all__ = [given.__name__, when.__name__, then.__name__, scenario.__name__] # pragma: no cover
|
mit
|
Python
|
95d3b17ca32781b4ce81110aa406ab6d15b1ae1c
|
Update metadata with new tutorials
|
Kaggle/learntools,Kaggle/learntools
|
notebooks/feature_engineering/track_meta.py
|
notebooks/feature_engineering/track_meta.py
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='matleonard',
course_name='Feature Engineering',
course_url='https://www.kaggle.com/learn/feature-engineering'
)
lessons = [ {'topic': topic_name} for topic_name in
['Baseline Model',
'Categorical Encodings',
'Feature Generation',
'Feature Selection']
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='ex1.ipynb',
lesson_idx=0,
type='exercise'
# scriptid=1258954
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial'
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise'
# scriptid=400771
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial'
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise'
# scriptid=1259097
),
# dict(
# filename='tut5.ipynb',
# lesson_idx=4,
# type='tutorial'
# ),
dict(
filename='ex4.ipynb',
lesson_idx=3,
type='exercise'
# scriptid=1259126
)
]
for nb in notebooks:
nb['dataset_sources'] = ["matleonard/feature-engineering-data",
"kemical/kickstarter-projects"]
|
# See also examples/example_track/example_meta.py for a longer, commented example
track = dict(
author_username='matleonard',
course_name='Feature Engineering',
course_url='https://www.kaggle.com/learn/feature-engineering'
)
lessons = [ {'topic': topic_name} for topic_name in
['Baseline Model',
'Categorical Encodings',
'Feature Generation',
'Feature Selection']
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
# dict(
# filename='tut2.ipynb',
# lesson_idx=1,
# type='tutorial',
# ),
dict(
filename='ex1.ipynb',
lesson_idx=0,
type='exercise'
# scriptid=1258954
),
# dict(
# filename='tut3.ipynb',
# lesson_idx=2,
# type='tutorial'
# ),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise'
# scriptid=400771
),
# dict(
# filename='tut4.ipynb',
# lesson_idx=3,
# type='tutorial'
# ),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise'
# scriptid=1259097
),
# dict(
# filename='tut5.ipynb',
# lesson_idx=4,
# type='tutorial'
# ),
dict(
filename='ex4.ipynb',
lesson_idx=3,
type='exercise'
# scriptid=1259126
)
]
for nb in notebooks:
nb['dataset_sources'] = ["matleonard/feature-engineering-data",
"kemical/kickstarter-projects"]
|
apache-2.0
|
Python
|
64275164f9f7c226695774ce1d3a234005ab23a6
|
Solve Code Fights digits product problem
|
HKuz/Test_Code
|
CodeFights/digitsProduct.py
|
CodeFights/digitsProduct.py
|
#!/usr/local/bin/python
# Code Fights Digits Product Problem
def digitsProduct(product):
def get_single_dig_factors(product):
# Helper function to generate single-digit factors of product
n = product
factors = []
for i in range(9, 1, -1):
while n % i == 0 and n > 1:
factors.append(i)
n /= i
if n > 9:
# At least one factor is a two-digit prime number
return None
return sorted(factors)
if product == 0:
return 10
elif product < 10:
return product
factors = get_single_dig_factors(product)
if factors:
return int(''.join([str(i) for i in factors]))
else:
return -1
def main():
tests = [
[12, 26],
[19, -1],
[450, 2559],
[0, 10],
[13, -1],
[1, 1],
[243, 399],
[576, 889],
[360, 589],
[24, 38],
[120, 358],
[168, 378],
[192, 388],
[216, 389],
[600, 3558],
[33, -1],
[81, 99]
]
for t in tests:
res = digitsProduct(t[0])
ans = t[1]
if ans == res:
print("PASSED: digitsProduct({}) returned {}"
.format(t[0], res))
else:
print("FAILED: digitsProduct({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
#!/usr/local/bin/python
# Code Fights Digits Product Problem
def digitsProduct(product):
def get_single_dig_factors(product):
# Helper function to generate single-digit factors of product
n = product
factors = []
while n > 1:
for i in range(9, 1, -1):
if n % i == 0:
factors.append(i)
n /= i
break
if n == product:
return None
return sorted(factors)
if product == 0:
return 10
elif product < 10:
return product
factors = get_single_dig_factors(product)
if factors:
return int(''.join([str(i) for i in factors]))
else:
return -1
def main():
tests = [
[12, 26],
[19, -1],
[450, 2559],
[0, 10],
[13, -1],
[1, 1],
[243, 399],
[576, 889],
[360, 589]
]
for t in tests:
res = digitsProduct(t[0])
ans = t[1]
if ans == res:
print("PASSED: digitsProduct({}) returned {}"
.format(t[0], res))
else:
print("FAILED: digitsProduct({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
mit
|
Python
|
5d4b1b16b787dd529a92c76ff2f5e0736443782f
|
mark external connection to EDW tests xfail
|
4dn-dcic/fourfront,ENCODE-DCC/encoded,philiptzou/clincoded,4dn-dcic/fourfront,T2DREAM/t2dream-portal,ENCODE-DCC/snovault,ClinGen/clincoded,4dn-dcic/fourfront,hms-dbmi/fourfront,ENCODE-DCC/encoded,philiptzou/clincoded,kidaa/encoded,kidaa/encoded,kidaa/encoded,hms-dbmi/fourfront,hms-dbmi/fourfront,hms-dbmi/fourfront,philiptzou/clincoded,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/encoded,ENCODE-DCC/snovault,4dn-dcic/fourfront,ClinGen/clincoded,kidaa/encoded,kidaa/encoded,ENCODE-DCC/snovault,ClinGen/clincoded,T2DREAM/t2dream-portal,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,ENCODE-DCC/encoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,T2DREAM/t2dream-portal,ClinGen/clincoded
|
src/encoded/tests/test_edw_file.py
|
src/encoded/tests/test_edw_file.py
|
import pytest
import json
from sqlalchemy.engine.base import Engine
import encoded.edw_file
import edw_test_data
pytestmark = [pytest.mark.edw_file]
## TODO: This should be converted to a smoke-type test after build/install is complete
@pytest.mark.xfail
# travis cannot connect to EDW
def test_make_edw():
edw = encoded.edw_file.make_edw()
assert(type(edw)==Engine)
@pytest.mark.xfail
# travis cannot connect to EDW
def test_get_edw_files():
''' test connectivity schema integrity basic query not all options '''
edw = encoded.edw_file.make_edw()
files = encoded.edw_file.get_edw_fileinfo(edw,limit=50)
assert(len(files)==50)
for f in files:
## quasi validate required fields
assert(f['accession'])
assert(f.has_key('biological_replicate'))
assert(f.has_key('technical_replicate'))
assert(f['md5sum'])
assert(f['dataset'])
|
import pytest
import json
from sqlalchemy.engine.base import Engine
import encoded.edw_file
import edw_test_data
pytestmark = [pytest.mark.edw_file]
## edw_file
# def format_edw_fileinfo(file_dict, exclude=None):
# def make_edw(data_host=None):
# def dump_filelist(fileaccs, header=True, typeField=None):
# def dump_fileinfo(fileinfos, header=True, typeField=None, exclude=None):
# def get_edw_filelist(edw, limit=None, experiment=True, phase=ENCODE_PHASE_ALL):
# def get_edw_max_id(edw):
# def get_edw_fileinfo(edw, limit=None, experiment=True, start_id=0,
def test_make_edw():
edw = encoded.edw_file.make_edw()
assert(type(edw)==Engine)
def test_get_edw_files():
''' test connectivity schema integrity basic query not all options '''
edw = encoded.edw_file.make_edw()
files = encoded.edw_file.get_edw_fileinfo(edw,limit=50)
assert(len(files)==50)
for f in files:
## quasi validate required fields
assert(f['accession'])
assert(f.has_key('biological_replicate'))
assert(f.has_key('technical_replicate'))
assert(f['md5sum'])
assert(f['dataset'])
|
mit
|
Python
|
9b16c980a0b3ac627e8ba2c7ea88231a0f308bd9
|
Upgrade celery config
|
stefanw/seriesly,stefanw/seriesly
|
seriesly/celery.py
|
seriesly/celery.py
|
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'seriesly.settings')
app = Celery('seriesly')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
|
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'seriesly.settings')
from django.conf import settings # noqa
app = Celery('seriesly')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
agpl-3.0
|
Python
|
3449534933e5d1e33c4f6b1017d68c804ba1f91f
|
Allow overriding of bgen pathnames in an optional module bgenlocationcustomize. Editing of bgenlocations.py isn't easy if your Python was supplied by Apple.
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Lib/plat-mac/bgenlocations.py
|
Lib/plat-mac/bgenlocations.py
|
#
# Local customizations for generating the Carbon interface modules.
# Edit this file to reflect where things should be on your system.
# Note that pathnames are unix-style for OSX MachoPython/unix-Python,
# but mac-style for MacPython, whether running on OS9 or OSX.
#
import sys, os
Error = "bgenlocations.Error"
#
# Where bgen is. For unix-Python bgen isn't installed, so you have to refer to
# the source tree here.
BGENDIR="/Users/jack/src/python/Tools/bgen/bgen"
#
# Where to find the Universal Header include files. If you have CodeWarrior
# installed you can use the Universal Headers from there, otherwise you can
# download them from the Apple website. Bgen can handle both unix- and mac-style
# end of lines, so don't worry about that.
#
INCLUDEDIR="/Users/jack/src/Universal/Interfaces/CIncludes"
#
# Where to put the python definitions files. Note that, on unix-Python,
# if you want to commit your changes to the CVS repository this should refer to
# your source directory, not your installed directory.
#
TOOLBOXDIR="/Users/jack/src/python/Lib/plat-mac/Carbon"
# Creator for C files:
CREATOR="CWIE"
# The previous definitions can be overriden by creating a module
# bgenlocationscustomize.py and putting it in site-packages (or anywere else
# on sys.path, actually)
try:
from bgenlocationscustomize import *
except ImportError:
pass
if not os.path.exists(BGENDIR):
raise Error, "Please fix bgenlocations.py, BGENDIR does not exist: %s" % BGENDIR
if not os.path.exists(INCLUDEDIR):
raise Error, "Please fix bgenlocations.py, INCLUDEDIR does not exist: %s" % INCLUDEDIR
if not os.path.exists(TOOLBOXDIR):
raise Error, "Please fix bgenlocations.py, TOOLBOXDIR does not exist: %s" % TOOLBOXDIR
# Sigh, due to the way these are used make sure they end with : or /.
if BGENDIR[-1] != os.sep:
BGENDIR = BGENDIR + os.sep
if INCLUDEDIR[-1] != os.sep:
INCLUDEDIR = INCLUDEDIR + os.sep
if TOOLBOXDIR[-1] != os.sep:
TOOLBOXDIR = TOOLBOXDIR + os.sep
|
#
# Local customizations for generating the Carbon interface modules.
# Edit this file to reflect where things should be on your system.
# Note that pathnames are unix-style for OSX MachoPython/unix-Python,
# but mac-style for MacPython, whether running on OS9 or OSX.
#
import sys, os
Error = "bgenlocations.Error"
#
# Where bgen is. For unix-Python bgen isn't installed, so you have to refer to
# the source tree here.
BGENDIR="/Users/jack/src/python/Tools/bgen/bgen"
#
# Where to find the Universal Header include files. If you have CodeWarrior
# installed you can use the Universal Headers from there, otherwise you can
# download them from the Apple website. Bgen can handle both unix- and mac-style
# end of lines, so don't worry about that.
#
INCLUDEDIR="/Users/jack/src/Universal/Interfaces/CIncludes"
#
# Where to put the python definitions files. Note that, on unix-Python,
# if you want to commit your changes to the CVS repository this should refer to
# your source directory, not your installed directory.
#
TOOLBOXDIR="/Users/jack/src/python/Lib/plat-mac/Carbon"
# Creator for C files:
CREATOR="CWIE"
if not os.path.exists(BGENDIR):
raise Error, "Please fix bgenlocations.py, BGENDIR does not exist: %s" % BGENDIR
if not os.path.exists(INCLUDEDIR):
raise Error, "Please fix bgenlocations.py, INCLUDEDIR does not exist: %s" % INCLUDEDIR
if not os.path.exists(TOOLBOXDIR):
raise Error, "Please fix bgenlocations.py, TOOLBOXDIR does not exist: %s" % TOOLBOXDIR
# Sigh, due to the way these are used make sure they end with : or /.
if BGENDIR[-1] != os.sep:
BGENDIR = BGENDIR + os.sep
if INCLUDEDIR[-1] != os.sep:
INCLUDEDIR = INCLUDEDIR + os.sep
if TOOLBOXDIR[-1] != os.sep:
TOOLBOXDIR = TOOLBOXDIR + os.sep
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.