commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
633c40d365acd390190e3eef0089cc1e00925e46
|
Revert to alpha test mode; fix grammar
|
Contextualist/Quip4AHA,Contextualist/Quip4AHA
|
NewDoc.py
|
NewDoc.py
|
import time
import datetime
import quip
class NewDoc(object):
def __init__(self):
NextWednesday = datetime.datetime.today() + datetime.timedelta(days = 5)
self.NextWednesdayN = NextWednesday.strftime("%m%d")
self.NextWednesdayS = NextWednesday.strftime("%B %d")
if self.NextWednesdayS[-2] == "0":
self.NextWednesdayS = self.NextWednesdayS[:-2] + self.NextWednesdayS[-1];
self.ctx = """<p class='line'>Good Morning AHA!<br/>
It is Wednesday, %s. The weather for today is __. There is a(n) __%% chance of rain. The high temperature today will be __ degrees Celsius, which is __ degrees Fahrenheit.</p>
<p class='line'>​</p>
<p class='line'><b>Now for this week in history:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>In World News:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>Now for the fun facts:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>In AHA News:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'>We will close with this/these verse(s) from</p>
<p class='line'>​</p>
<p class='line'>That is all for today AHA broadcasting. Thank you for listening, and as always stay classy AHA!</p>
""" % (self.NextWednesdayS) # ​ (or ​) stands for a place-holder for a blank <p>
self.FolderID = "LHEAOAhm7YS" # my desktop
#self.FolderID = "PCeAOAQx6sO" # AHA BC
self.client = quip.QuipClient(access_token="Wk9EQU1BcDZFS04=|1483091850|CF037JVoITJPnAET8aHWnZwEZACvrIm7jtkRIQCaX3g=")
def do(self):
self.client.new_document(content=self.ctx, format="html", title=self.NextWednesdayN, member_ids=[self.FolderID])
return "Done!"
if __name__=="__main__":
NewDocAction = NewDoc()
NewDocAction.do()
|
import time
import datetime
import quip
class NewDoc(object):
def __init__(self):
NextWednesday = datetime.datetime.today() + datetime.timedelta(days = 5)
self.NextWednesdayN = NextWednesday.strftime("%m%d")
self.NextWednesdayS = NextWednesday.strftime("%B %d")
if self.NextWednesdayS[-2] == "0":
self.NextWednesdayS = self.NextWednesdayS[:-2] + self.NextWednesdayS[-1];
self.ctx = """<p class='line'>Good Morning AHA!<br/>
It is Wednesday, %s. The weather for today is __. There is __%% chance of rain. The high temperature today will be __ degrees Celsius, which is __ degrees Fahrenheit.</p>
<p class='line'>​</p>
<p class='line'><b>Now for this week in history:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>In World News:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>Now for the fun facts:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'><b>In AHA News:</b></p>
<p class='line'>​</p>
<p class='line'>​</p>
<p class='line'>We will close with this/these verse(s) from</p>
<p class='line'>​</p>
<p class='line'>This is all for today AHA broadcasting thank you for listening, and as always stay classy AHA!</p>
""" % (self.NextWednesdayS) # ​ (or ​) stands for a place-holder for a blank <p>
#self.FolderID = "LHEAOAhm7YS" # my desktop
self.FolderID = "PCeAOAQx6sO" # AHA BC
self.client = quip.QuipClient(access_token="Wk9EQU1BcDZFS04=|1483091850|CF037JVoITJPnAET8aHWnZwEZACvrIm7jtkRIQCaX3g=")
def do(self):
self.client.new_document(content=self.ctx, format="html", title=self.NextWednesdayN, member_ids=[self.FolderID])
return "Done!"
if __name__=="__main__":
NewDocAction = NewDoc()
NewDocAction.do()
|
apache-2.0
|
Python
|
6a88e390467326f1e53f7b7a10bab777186b3418
|
replace post_syncdb with post_migrate
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
corehq/preindex/models.py
|
corehq/preindex/models.py
|
from couchdbkit.ext.django import syncdb
from django.db.models import signals
from corehq.preindex import get_preindex_plugin
def catch_signal(sender, using=None, **kwargs):
"""Function used by syncdb signal"""
if using != 'default':
# only sync for the default DB
return
app_name = sender.label.rsplit('.', 1)[0]
app_label = app_name.split('.')[-1]
plugin = get_preindex_plugin(app_label)
if plugin:
plugin.sync_design_docs()
signals.pre_migrate.connect(catch_signal)
signals.post_migrate.disconnect(syncdb)
|
from couchdbkit.ext.django import syncdb
from django.db.models import signals
from corehq.preindex import get_preindex_plugin
def catch_signal(sender, using=None, **kwargs):
"""Function used by syncdb signal"""
if using != 'default':
# only sync for the default DB
return
app_name = sender.label.rsplit('.', 1)[0]
app_label = app_name.split('.')[-1]
plugin = get_preindex_plugin(app_label)
if plugin:
plugin.sync_design_docs()
signals.pre_migrate.connect(catch_signal)
signals.post_syncdb.disconnect(syncdb)
|
bsd-3-clause
|
Python
|
4c2e81adfc21b7857d9c01ae70f8f022bfe430ba
|
print to check thread exit normally
|
cyh24/multicpu
|
test/test.py
|
test/test.py
|
import time
import requests
import numpy as np
import multiprocessing
from concurrent import futures
from multicpu import multi_cpu
def process_job(job):
#time.sleep(1)
count = 10000000
while count>0:
count -= 1
print "ok"
return job
jobs = [i for i in range(10)]
def test_multi_cpu_thread(cpu_num, thread_num):
print "multi_cpu_thread: cpu_num=%d, thread_num=%d"%(cpu_num, thread_num)
result = multi_cpu(process_job, jobs, cpu_num, thread_num)
print result
def test_multi_cpu(cpu_num):
print "multi_cpu: cpu_num=", cpu_num
cpu_pool = multiprocessing.Pool(processes=cpu_num)
result = cpu_pool.map(process_job, jobs)
print result
def test_multi_thread(thread_num):
print "multi_thread: thread_num=", thread_num
thread_pool = futures.ThreadPoolExecutor(max_workers=thread_num)
result = thread_pool.map(process_job, jobs)
print [r for r in result]
def test_no_thread():
print "no thread."
result = []
for job in jobs:
result.append(process_job(job))
print result
def test_multi_cpu_thread_timeout(cpu_num, thread_num, timeout):
print "multi_cpu_thread: cpu_num=%d, thread_num=%d"%(cpu_num, thread_num)
result = multi_cpu(process_job, jobs, cpu_num, thread_num, timeout)
print result
def aa():
start = time.time()
test_no_thread()
end = time.time()
print "Time: %f seconds\n" % (end - start)
start = time.time()
test_multi_thread(5)
end = time.time()
print "Time: %f seconds\n" % (end - start)
start = time.time()
test_multi_cpu(5)
end = time.time()
print "Time: %f seconds\n" % (end - start)
if __name__ == "__main__":
start = time.time()
test_multi_cpu_thread(3, 1)
end = time.time()
print "Time: %f seconds\n" % (end - start)
import concurrent
try:
test_multi_cpu_thread_timeout(3, 1, 1)
except concurrent.futures.TimeoutError:
print("this took too long")
|
import time
import requests
import numpy as np
import multiprocessing
from concurrent import futures
from multicpu import multi_cpu
def process_job(job):
#time.sleep(1)
count = 10000000
while count>0:
count -= 1
return job
jobs = [i for i in range(10)]
def test_multi_cpu_thread(cpu_num, thread_num):
print "multi_cpu_thread: cpu_num=%d, thread_num=%d"%(cpu_num, thread_num)
result = multi_cpu(process_job, jobs, cpu_num, thread_num)
print result
def test_multi_cpu(cpu_num):
print "multi_cpu: cpu_num=", cpu_num
cpu_pool = multiprocessing.Pool(processes=cpu_num)
result = cpu_pool.map(process_job, jobs)
print result
def test_multi_thread(thread_num):
print "multi_thread: thread_num=", thread_num
thread_pool = futures.ThreadPoolExecutor(max_workers=thread_num)
result = thread_pool.map(process_job, jobs)
print [r for r in result]
def test_no_thread():
print "no thread."
result = []
for job in jobs:
result.append(process_job(job))
print result
def test_multi_cpu_thread_timeout(cpu_num, thread_num, timeout):
print "multi_cpu_thread: cpu_num=%d, thread_num=%d"%(cpu_num, thread_num)
result = multi_cpu(process_job, jobs, cpu_num, thread_num, timeout)
print result
def aa():
start = time.time()
test_no_thread()
end = time.time()
print "Time: %f seconds\n" % (end - start)
start = time.time()
test_multi_thread(5)
end = time.time()
print "Time: %f seconds\n" % (end - start)
start = time.time()
test_multi_cpu(5)
end = time.time()
print "Time: %f seconds\n" % (end - start)
if __name__ == "__main__":
start = time.time()
test_multi_cpu_thread(3, 1)
end = time.time()
print "Time: %f seconds\n" % (end - start)
import concurrent
try:
test_multi_cpu_thread_timeout(3, 1, 1)
except concurrent.futures.TimeoutError:
print("this took too long")
|
mit
|
Python
|
62641f9d0c3c9260217703f80066bbb434697d6e
|
Change variable name
|
SerSamgy/trapper-acg
|
test_dice.py
|
test_dice.py
|
import pytest
import dice
expected_dice_faces = [
(dice.d4, tuple(range(1, 5))),
(dice.d6, tuple(range(1, 7))),
(dice.d8, tuple(range(1, 9))),
(dice.d10, tuple(range(1, 11))),
(dice.d12, tuple(range(1, 13))),
]
@pytest.mark.parametrize('dice,expected', expected_dice_faces)
def test_dice_has_proper_num_of_faces(dice, expected):
assert dice == expected
|
import pytest
import dice
expected_factory_method_data = [
(dice.d4, tuple(range(1, 5))),
(dice.d6, tuple(range(1, 7))),
(dice.d8, tuple(range(1, 9))),
(dice.d10, tuple(range(1, 11))),
(dice.d12, tuple(range(1, 13))),
]
@pytest.mark.parametrize('dice,expected', expected_factory_method_data)
def test_dice_has_proper_num_of_faces(dice, expected):
assert dice == expected
|
mit
|
Python
|
d75dc845a456782549d00364e2637c7c32b8507a
|
add additional gate tests
|
cjwfuller/quantum-circuits
|
test_gate.py
|
test_gate.py
|
import numpy as np
import unittest
import gate
class TestGate(unittest.TestCase):
def test_standard_gates_implemented(self):
gate.QuantumGate('paulix')
gate.QuantumGate('pauliy')
gate.QuantumGate('pauliz')
gate.QuantumGate('swap')
gate.QuantumGate('cnot')
gate.QuantumGate('toffoli')
gate.QuantumGate('hadamard')
def test_implemented_gates_only_constraint(self):
self.assertRaises(Exception, gate.QuantumGate, 'foo')
def test_gates_have_alphabetic_symbols(self):
paulix = gate.QuantumGate('paulix')
self.assertEquals('X', paulix.symbol)
pauliy = gate.QuantumGate('pauliy')
self.assertEquals('Y', pauliy.symbol)
pauliz = gate.QuantumGate('pauliz')
self.assertEquals('Z', pauliz.symbol)
swap = gate.QuantumGate('swap')
self.assertEquals('S', swap.symbol)
cnot = gate.QuantumGate('cnot')
self.assertEquals('C', cnot.symbol)
toffoli = gate.QuantumGate('toffoli')
self.assertEquals('T', toffoli.symbol)
hadamard = gate.QuantumGate('hadamard')
self.assertEquals('H', hadamard.symbol)
if __name__ == '__main__':
unittest.main()
|
import numpy as np
import unittest
import gate
class TestGate(unittest.TestCase):
def test_standard_gates_implemented(self):
gate.QuantumGate('paulix')
gate.QuantumGate('pauliy')
gate.QuantumGate('pauliz')
gate.QuantumGate('swap')
gate.QuantumGate('cnot')
gate.QuantumGate('toffoli')
gate.QuantumGate('hadamard')
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
fb13b04e09a837faba2f3ac6310cc36073727637
|
use nose for testing now (That was easy)
|
nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments,nex3/pygments
|
tests/run.py
|
tests/run.py
|
# -*- coding: utf-8 -*-
"""
Pygments unit tests
~~~~~~~~~~~~~~~~~~
Usage::
python run.py [testfile ...]
:copyright: 2006-2007 by Georg Brandl.
:license: GNU GPL, see LICENSE for more details.
"""
import sys
try:
import nose
except ImportError:
print >> sys.stderr, "nose is required to run the test suites"
sys.exit(1)
nose.main()
|
# -*- coding: utf-8 -*-
"""
Pygments unit tests
~~~~~~~~~~~~~~~~~~
Usage::
python run.py [testfile ...]
:copyright: 2006-2007 by Georg Brandl.
:license: GNU GPL, see LICENSE for more details.
"""
import sys, os, new
import unittest
from os.path import dirname, basename, join, abspath
import pygments
try:
import coverage
except ImportError:
coverage = None
testdir = abspath(dirname(__file__))
failed = []
total_test_count = 0
error_test_count = 0
def err(file, what, exc):
print >>sys.stderr, file, 'failed %s:' % what,
print >>sys.stderr, exc
failed.append(file[:-3])
class QuietTestRunner(object):
"""Customized test runner for relatively quiet output"""
def __init__(self, testname, stream=sys.stderr):
self.testname = testname
self.stream = unittest._WritelnDecorator(stream)
def run(self, test):
global total_test_count
global error_test_count
result = unittest._TextTestResult(self.stream, True, 1)
test(result)
if not result.wasSuccessful():
self.stream.write(' FAIL:')
result.printErrors()
failed.append(self.testname)
else:
self.stream.write(' ok\n')
total_test_count += result.testsRun
error_test_count += len(result.errors) + len(result.failures)
return result
def run_tests(with_coverage=False):
# needed to avoid confusion involving atexit handlers
import logging
if sys.argv[1:]:
# test only files given on cmdline
files = [entry + '.py' for entry in sys.argv[1:] if entry.startswith('test_')]
else:
files = [entry for entry in os.listdir(testdir)
if (entry.startswith('test_') and entry.endswith('.py'))]
files.sort()
WIDTH = 85
print >>sys.stderr, \
('Pygments %s Test Suite running%s, stand by...' %
(pygments.__version__,
with_coverage and " with coverage analysis" or "")).center(WIDTH)
print >>sys.stderr, ('(using Python %s)' % sys.version.split()[0]).center(WIDTH)
print >>sys.stderr, '='*WIDTH
if with_coverage:
coverage.erase()
coverage.start()
for testfile in files:
globs = {}
try:
execfile(join(testdir, testfile), globs)
except Exception, exc:
raise
err(testfile, 'execfile', exc)
continue
sys.stderr.write(testfile[:-3] + ': ')
try:
runner = QuietTestRunner(testfile[:-3])
# make a test suite of all TestCases in the file
tests = []
for name, thing in globs.iteritems():
if name.endswith('Test'):
tests.append((name, unittest.makeSuite(thing)))
tests.sort()
suite = unittest.TestSuite()
suite.addTests([x[1] for x in tests])
runner.run(suite)
except Exception, exc:
err(testfile, 'running test', exc)
print >>sys.stderr, '='*WIDTH
if failed:
print >>sys.stderr, '%d of %d tests failed.' % \
(error_test_count, total_test_count)
print >>sys.stderr, 'Tests failed in:', ', '.join(failed)
ret = 1
else:
if total_test_count == 1:
print >>sys.stderr, '1 test happy.'
else:
print >>sys.stderr, 'All %d tests happy.' % total_test_count
ret = 0
if with_coverage:
coverage.stop()
modules = [mod for name, mod in sys.modules.iteritems()
if name.startswith('pygments.') and mod]
coverage.report(modules)
return ret
if __name__ == '__main__':
with_coverage = False
if sys.argv[1:2] == ['-C']:
with_coverage = bool(coverage)
del sys.argv[1]
sys.exit(run_tests(with_coverage))
|
bsd-2-clause
|
Python
|
4b18b48e93186a5ea22b684e0392bf5cba387525
|
fix sleep between photos
|
dvl/raspberry-pi_timelapse,dvl/raspberry-pi_timelapse
|
timelapse.py
|
timelapse.py
|
import os
import datetime
import time
import picamera
from PIL import Image, ImageStat, ImageFont, ImageDraw
with picamera.PiCamera() as camera:
camera.resolution = (1024, 768)
camera.rotation = 180
time.sleep(2) # camera warm-up time
for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'):
image = Image.open(filename)
stat = ImageStat.Stat(image)
r, g, b, _ = stat.mean
if r < 50 and g < 50 and b < 50:
print('[!] Lights must be powered off, sleeping...')
try:
os.unlink(filename)
except:
pass
time.sleep(60 * 5)
else:
annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y')
draw = ImageDraw.Draw(image)
font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24)
draw.text((10, 730), annotate_text, (255, 255, 0), font=font)
image.save(filename)
print('[!] Taken: {}'.format(filename))
time.sleep(60 / 2)
image.close()
|
import os
import datetime
import time
import picamera
from PIL import Image, ImageStat, ImageFont, ImageDraw
with picamera.PiCamera() as camera:
camera.resolution = (1024, 768)
camera.rotation = 180
time.sleep(2) # camera warm-up time
for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'):
image = Image.open(filename)
stat = ImageStat.Stat(image)
r, g, b, _ = stat.mean
if r < 50 and g < 50 and b < 50:
print('[!] Lights must be powered off, sleeping...')
try:
os.unlink(filename)
except:
pass
time.sleep(60 * 5)
else:
annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y')
draw = ImageDraw.Draw(image)
font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24)
draw.text((10, 730), annotate_text, (255, 255, 0), font=font)
image.save(filename)
print('[!] Taken: {}'.format(filename))
time.sleep(60 / 30)
image.close()
|
mit
|
Python
|
af6394bca3ee6686ec018bdcb032b8a97ef3e831
|
Use config-directive for channel
|
Thor77/TeamspeakIRC
|
tsversion.py
|
tsversion.py
|
import irc3
from irc3.plugins.command import command
from irc3.plugins.cron import cron
from teamspeak_web_utils import latest_version
@irc3.plugin
class TSVersion(object):
def __init__(self, bot):
self.bot = bot
self.client_version = None
self.server_version = None
config = bot.config.get('tsversion', {})
self.channel = config.get('channel')
@cron('1 * * * *')
def fetch_version(self):
new_client, new_server = latest_version()
if self.channel:
# Notify channel
if self.client_version is not None and \
self.client_version != new_client:
self.bot.privmsg(self.channel,
'New client release: {}'.format(new_client))
if self.server_version is not None and \
self.server_version != new_server:
self.bot.privmsg(self.channel,
'New server release: {}'.format(new_server))
self.client_version = new_client
self.server_version = new_server
@command(permission='view')
def tsversion(self, mask, target, args):
'''Check latest Teamspeak3 Server/Client-version
%%tsversion
'''
if not self.client_version or not self.server_version:
self.fetch_version()
return 'Client: {} Server: {}'.format(
self.client_version, self.server_version)
|
import irc3
from irc3.plugins.command import command
from irc3.plugins.cron import cron
from teamspeak_web_utils import latest_version
@irc3.plugin
class TSVersion(object):
def __init__(self, bot):
self.bot = bot
self.client_version = None
self.server_version = None
self.target_channel = '#teamspeak'
@cron('1 * * * *')
def fetch_version(self):
new_client, new_server = latest_version()
if self.client_version is not None and \
self.client_version != new_client:
self.bot.privmsg(self.target_channel,
'New client release: {}'.format(new_client))
if self.server_version is not None and \
self.server_version != new_server:
self.bot.privmsg(self.target_channel,
'New server release: {}'.format(new_server))
self.client_version = new_client
self.server_version = new_server
@command(permission='view')
def tsversion(self, mask, target, args):
'''Check latest Teamspeak3 Server/Client-version
%%tsversion
'''
if not self.client_version or not self.server_version:
self.fetch_version()
return 'Client: {} Server: {}'.format(
self.client_version, self.server_version)
|
mit
|
Python
|
70181b3069649eddacac86dbcb49cb43733be0ec
|
Add code comments for begins example
|
aliles/cmdline_examples
|
tw_begins.py
|
tw_begins.py
|
#!/usr/bin/env python
import begin
import twitterlib
# sub-command definitions using subcommand decorator for each sub-command that
# implements a timeline display
@begin.subcommand
def timeline():
"Display recent tweets from users timeline"
for status in begin.context.api.timeline:
print u"%s: %s" % (status.user.screen_name, status.text)
@begin.subcommand
def mentions():
"Display recent tweets mentioning user"
for status in begin.context.api.mentions:
print u"%s: %s" % (status.user.screen_name, status.text)
@begin.subcommand
def retweets():
"Display recent retweets from user's timeline"
for status in begin.context.api.retweets:
print u"%s: %s" % (status.user.screen_name, status.text)
# program main definition replace __name__ === '__main__' magic
# sub-commands are registered and loaded automatically
@begin.start(env_prefix='', short_args=False)
def main(api_key='', api_secret='', access_token='', access_secret=''):
"""Minimal Twitter client
Demonstrate the use of the begins command line application framework by
implementing a simple Twitter command line client.
"""
api = twitterlib.API(api_key, api_secret, access_token, access_secret)
begin.context.api = api
|
#!/usr/bin/env python
import begin
import twitterlib
@begin.subcommand
def timeline():
"Display recent tweets from users timeline"
for status in begin.context.api.timeline:
print u"%s: %s" % (status.user.screen_name, status.text)
@begin.subcommand
def mentions():
"Display recent tweets mentioning user"
for status in begin.context.api.mentions:
print u"%s: %s" % (status.user.screen_name, status.text)
@begin.subcommand
def retweets():
"Display recent retweets from user's timeline"
for status in begin.context.api.retweets:
print u"%s: %s" % (status.user.screen_name, status.text)
@begin.start(env_prefix='', short_args=False)
def main(api_key='', api_secret='', access_token='', access_secret=''):
"""Minimal Twitter client
Demonstrate the use of the begins command line application framework by
implementing a simple Twitter command line client.
"""
api = twitterlib.API(api_key, api_secret, access_token, access_secret)
begin.context.api = api
|
mit
|
Python
|
3c4ba4bb89babdb2345dd01c5ddb79d30e7afa02
|
bump version to 1.9.8.3
|
vialectrum/vialectrum,pooler/electrum-ltc,pooler/electrum-ltc,vertcoin/electrum-vtc,vertcoin/electrum-vtc,vialectrum/vialectrum,pknight007/electrum-vtc,pooler/electrum-ltc,pooler/electrum-ltc,pknight007/electrum-vtc,vertcoin/electrum-vtc,pknight007/electrum-vtc,pknight007/electrum-vtc,vialectrum/vialectrum,vertcoin/electrum-vtc
|
lib/version.py
|
lib/version.py
|
ELECTRUM_VERSION = "1.9.8.3" # version of the client package
PROTOCOL_VERSION = '0.9' # protocol version requested
NEW_SEED_VERSION = 7 # bip32 wallets
OLD_SEED_VERSION = 4 # old electrum deterministic generation
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
|
ELECTRUM_VERSION = "1.9.8.1" # version of the client package
PROTOCOL_VERSION = '0.9' # protocol version requested
NEW_SEED_VERSION = 7 # bip32 wallets
OLD_SEED_VERSION = 4 # old electrum deterministic generation
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
|
mit
|
Python
|
c95bff54d8ff6534c40d60f34484f864cc04754a
|
Add example for when there is no book cover url
|
DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat
|
lib/web/web.py
|
lib/web/web.py
|
import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":None}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
|
import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
|
mit
|
Python
|
5698dd61695371b792ba65262ac503e25b1bbb19
|
Exclude root and alias pages from search results.
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
localore/search/views.py
|
localore/search/views.py
|
from django.contrib.contenttypes.models import ContentType
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
from localore_admin.models import PageAlias
# override per-site cache for search
@cache_page(30) # thirty seconds
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
page_alias_content_type = ContentType.objects.get_for_model(PageAlias)
search_results = (
Page.objects.live().
# exclude root
filter(depth__gt=1).
# exclude PageAlias pages
exclude(content_type=page_alias_content_type).
search(search_query)
)
# log the query so Wagtail can suggest promoted results
Query.get(search_query).add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
# override per-site cache for search
@cache_page(30) # thirty seconds
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
mpl-2.0
|
Python
|
84fa46196865295c68cf8bf6de225503ca0e2a37
|
bump version
|
wesokes/django-manager-utils,ambitioninc/django-manager-utils
|
manager_utils/version.py
|
manager_utils/version.py
|
__version__ = '1.3.0'
|
__version__ = '1.2.0'
|
mit
|
Python
|
ecc471f94dc2ca2931370e53948d9f674dd673d4
|
Add parenthesis to print statement
|
mathemage/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-3,mathemage/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,spennihana/h2o-3
|
h2o-py/tests/testdir_munging/unop/pyunit_cor.py
|
h2o-py/tests/testdir_munging/unop/pyunit_cor.py
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
##
# Test out the cor() functionality
# If NAs in the frame, they are skipped in calculation unless na.rm = F
# If any categorical columns, throw an error
##
import numpy as np
def cor_test():
iris_h2o = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris.csv"))
iris_np = np.genfromtxt(pyunit_utils.locate("smalldata/iris/iris.csv"),
delimiter=',',
skip_header=1,
usecols=(0, 1, 2, 3))
cor_np = h2o.H2OFrame(np.corrcoef(iris_np,rowvar=0))
cor_h2o = iris_h2o[0:4].cor()
cor_diff = abs(cor_h2o - cor_np)
print("Correlation matrix with H2O: ")
print(cor_h2o)
print("Correlation matrix with Numpy: ")
print(cor_np)
print("Correlation differences between H2O and Numpy: ")
print(cor_diff)
print("Max difference in correlation calculation between H2O and Numpy: ")
print(cor_diff.max())
max = cor_diff.max()
assert max < .006, "expected equal correlations"
if __name__ == "__main__":
pyunit_utils.standalone_test(cor_test)
else:
cor_test()
|
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
##
# Test out the cor() functionality
# If NAs in the frame, they are skipped in calculation unless na.rm = F
# If any categorical columns, throw an error
##
import numpy as np
def cor_test():
iris_h2o = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris.csv"))
iris_np = np.genfromtxt(pyunit_utils.locate("smalldata/iris/iris.csv"),
delimiter=',',
skip_header=1,
usecols=(0, 1, 2, 3))
cor_np = h2o.H2OFrame(np.corrcoef(iris_np,rowvar=0))
cor_h2o = iris_h2o[0:4].cor()
cor_diff = abs(cor_h2o - cor_np)
print("Correlation matrix with H2O: ")
print cor_h2o
print("Correlation matrix with Numpy: ")
print cor_np
print("Correlation differences between H2O and Numpy: ")
print cor_diff
print("Max difference in correlation calculation between H2O and Numpy: ")
print cor_diff.max()
max = cor_diff.max()
assert max < .006, "expected equal correlations"
if __name__ == "__main__":
pyunit_utils.standalone_test(cor_test)
else:
cor_test()
|
apache-2.0
|
Python
|
c027c5a5542adbe43792455a283df1b0a6f61f33
|
Add developer tool versions.
|
hlin117/statsmodels,gef756/statsmodels,phobson/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,YihaoLu/statsmodels,jseabold/statsmodels,wzbozon/statsmodels,bashtage/statsmodels,adammenges/statsmodels,detrout/debian-statsmodels,rgommers/statsmodels,musically-ut/statsmodels,wwf5067/statsmodels,musically-ut/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,yarikoptic/pystatsmodels,edhuckle/statsmodels,huongttlan/statsmodels,cbmoore/statsmodels,yl565/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,wwf5067/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,waynenilsen/statsmodels,astocko/statsmodels,ChadFulton/statsmodels,hlin117/statsmodels,yl565/statsmodels,waynenilsen/statsmodels,kiyoto/statsmodels,YihaoLu/statsmodels,kiyoto/statsmodels,jstoxrocky/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,saketkc/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,wwf5067/statsmodels,bert9bert/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,nvoron23/statsmodels,nguyentu1602/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,bert9bert/statsmodels,gef756/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,josef-pkt/statsmodels,bsipocz/statsmodels,adammenges/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,gef756/statsmodels,bzero/statsmodels,rgommers/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,bashtage/statsmodels,alekz112/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,bzero/statsmodels,wkfwkf/statsmodels,phobson/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,kiyoto/statsmodels,DonBeo/statsmodels,wzbozon/statsmodels,hainm/statsmodels,rgommers/statsmodels,wdurhamh/statsmodels,Averroes/statsmodels,yl565/statsmodels,bavardage/statsmodels,cbmoore/statsmodels,yarikoptic/pystatsmodels,ChadFulton/statsmodels,astocko/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,detrout/debian-statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,saketkc/statsmodels,astocko/statsmodels,huongttlan/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,bavardage/statsmodels,Averroes/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,cbmoore/statsmodels,jseabold/statsmodels,saketkc/statsmodels,wzbozon/statsmodels,wkfwkf/statsmodels,DonBeo/statsmodels,YihaoLu/statsmodels,jseabold/statsmodels,bashtage/statsmodels,wdurhamh/statsmodels,statsmodels/statsmodels,astocko/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,kiyoto/statsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,waynenilsen/statsmodels,phobson/statsmodels,bzero/statsmodels,wdurhamh/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,statsmodels/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,bavardage/statsmodels,phobson/statsmodels,alekz112/statsmodels,yl565/statsmodels,bavardage/statsmodels,nguyentu1602/statsmodels,nvoron23/statsmodels,adammenges/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,rgommers/statsmodels,phobson/statsmodels,edhuckle/statsmodels,bzero/statsmodels,Averroes/statsmodels,gef756/statsmodels,nvoron23/statsmodels,bsipocz/statsmodels,hainm/statsmodels,statsmodels/statsmodels,bzero/statsmodels,yarikoptic/pystatsmodels,edhuckle/statsmodels,hainm/statsmodels,alekz112/statsmodels
|
statsmodels/tools/print_version.py
|
statsmodels/tools/print_version.py
|
#!/usr/bin/env python
import sys
def show_versions():
print("\nINSTALLED VERSIONS")
print("------------------")
print("Python: %d.%d.%d.%s.%s" % sys.version_info[:])
try:
import os
(sysname, nodename, release, version, machine) = os.uname()
print("OS: %s %s %s %s" % (sysname, release, version,machine))
print("byteorder: %s" % sys.byteorder)
print("LC_ALL: %s" % os.environ.get('LC_ALL',"None"))
print("LANG: %s" % os.environ.get('LANG',"None"))
except:
pass
print("\nRequired Dependencies\n=====================\n")
try:
import Cython
print("cython: %s" % Cython.__version__)
except ImportError:
print("cython: Not installed")
try:
import numpy
print("numpy: %s" % numpy.version.version)
except ImportError:
print("numpy: Not installed")
try:
import scipy
print("scipy: %s" % scipy.version.version)
except ImportError:
print("scipy: Not installed")
try:
import pandas
print("pandas: %s" % pandas.version.version)
except ImportError:
print("pandas: Not installed")
try:
import patsy
print("patsy: %s" % patsy.__version__)
except ImportError:
print("patsy: Not installed")
print("\nOptional Dependencies\n=====================\n")
try:
import matplotlib as mpl
print("matplotlib: %s" % mpl.__version__)
except ImportError:
print("matplotlib: Not installed")
try:
from cvxopt import info
print("cvxopt: %s" % info.version)
except ImportError:
print("cvxopt: Not installed")
print("\nDeveloper Tools\n================\n")
try:
import IPython
print("IPython: %s" % IPython.__version__)
except ImportError:
print("IPython: Not installed")
try:
import jinja2
print(" jinja2: %s" % jinja2.__version__)
except ImportError:
print(" jinja2: Not installed")
try:
import sphinx
print("sphinx: %s" % sphinx.__version__)
except ImportError:
print("sphinx: Not installed")
try:
import pygments
print(" pygments: %s" % pygments.__version__)
except ImportError:
print(" pygments: Not installed")
try:
import nose
print("nose: %s" % nose.__version__)
except ImportError:
print("nose: Not installed")
try:
import virtualenv
print("virtualenv: %s" % virtualenv.__version__)
except ImportError:
print("virtualenv: Not installed")
print("\n")
if __name__ == "__main__":
show_versions()
|
#!/usr/bin/env python
import sys
def show_versions():
print("\nINSTALLED VERSIONS")
print("------------------")
print("Python: %d.%d.%d.%s.%s" % sys.version_info[:])
try:
import os
(sysname, nodename, release, version, machine) = os.uname()
print("OS: %s %s %s %s" % (sysname, release, version,machine))
print("byteorder: %s" % sys.byteorder)
print("LC_ALL: %s" % os.environ.get('LC_ALL',"None"))
print("LANG: %s" % os.environ.get('LANG',"None"))
except:
pass
print("\nRequired Dependencies\n=====================\n")
try:
import Cython
print("cython: %s" % Cython.__version__)
except ImportError:
print("cython: Not installed")
try:
import numpy
print("numpy: %s" % numpy.version.version)
except ImportError:
print("numpy: Not installed")
try:
import scipy
print("scipy: %s" % scipy.version.version)
except ImportError:
print("scipy: Not installed")
try:
import pandas
print("pandas: %s" % pandas.version.version)
except ImportError:
print("pandas: Not installed")
try:
import patsy
print("patsy: %s" % patsy.__version__)
except ImportError:
print("patsy: Not installed")
print ("\nOptional Dependencies\n=====================\n")
try:
import matplotlib as mpl
print("matplotlib: %s" % mpl.__version__)
except ImportError:
print("matplotlib: Not installed")
try:
from cvxopt import info
print("cvxopt: %s" % info.version)
except ImportError:
print("cvxopt: Not installed")
print("\n")
if __name__ == "__main__":
show_versions()
|
bsd-3-clause
|
Python
|
76f1640b2ebb5c85e5989d798bf09277943ff6e2
|
Bump suffix
|
orome/crypto-enigma-py
|
crypto_enigma/_version.py
|
crypto_enigma/_version.py
|
#!/usr/bin/env python
# encoding: utf8
"""
Description
.. note::
Any additional note.
"""
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b1' # aN | bN | cN |
__suffix__ = '.dev3' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
#!/usr/bin/env python
# encoding: utf8
"""
Description
.. note::
Any additional note.
"""
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b1' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
bsd-3-clause
|
Python
|
4adb78fde502faed78350233896f3efd3f42816e
|
Define a default interpreter rather than using shutil.copyfile.
|
startling/cytoplasm
|
cytoplasm/interpreters.py
|
cytoplasm/interpreters.py
|
'''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
@SaveReturned
def default_interpreter(source, **kwargs):
f = open(source)
source_string = f.read()
f.close()
return source_string
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = file.split(".")[-1]
interpreters.get(ending, default_interpreter)(file, destination, **kwargs)
|
'''
These are some utilites used when writing and handling interpreters.
'''
import shutil
from cytoplasm import configuration
from cytoplasm.errors import InterpreterError
def SaveReturned(fn):
'''Some potential interpreters, like Mako, don't give you an easy way to save to a destination.
In these cases, simply use this function as a decorater.'''
def InterpreterWithSave(source, destination, **kwargs):
# under some circumstances, this should be able to write to file-like objects;
# so if destination is a string, assume it's a path; otherwise, it's a file-like object
if isinstance(destination, str):
f = open(destination, "w")
else:
f = destination
# pass **kwargs to this function.
f.write(fn(source, **kwargs))
f.close()
return InterpreterWithSave
def interpret(file, destination, **kwargs):
"Interpret a file with an interpreter according to its suffix."
# get the list of interpreters from the configuration
interpreters = configuration.get_config().interpreters
# figure out the suffix of the file, to use to determine which interpreter to use
ending = ".".join(file.split(".")[:-1])
try:
interpreters.get(ending, shutil.copyfile)(file, destination, **kwargs)
except Exception as exception:
# if the interpreter chokes, raise an InterpreterError with some useful information.
raise InterpreterError("%s on file '%s': %s" %(ending, file, exception))
|
mit
|
Python
|
34a5ac0d0581e45e79eb1ef5c1172e1bd3e4c9b0
|
change id to lowercase d
|
d120/pyophase,d120/pyophase,d120/pyophase,d120/pyophase
|
d120_provider/provider.py
|
d120_provider/provider.py
|
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class D120Account(ProviderAccount):
pass
class D120Provider(OAuth2Provider):
id = 'd120'
name = 'D120 OAuth2 Provider'
account_class = D120Account
def extract_uid(self, data):
return str(data['preferred_username'])
def extract_common_fields(self, data):
return dict(username=data['preferred_username'],
email=data.get('email', ''),
first_name=data.get('given_name', ''),
last_name=data.get('family_name', ''), )
def get_default_scope(self):
scope = ['read']
return scope
providers.registry.register(D120Provider)
|
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class D120Account(ProviderAccount):
pass
class D120Provider(OAuth2Provider):
id = 'D120'
name = 'D120 OAuth2 Provider'
account_class = D120Account
def extract_uid(self, data):
return str(data['preferred_username'])
def extract_common_fields(self, data):
return dict(username=data['preferred_username'],
email=data.get('email', ''),
first_name=data.get('given_name', ''),
last_name=data.get('family_name', ''), )
def get_default_scope(self):
scope = ['read']
return scope
providers.registry.register(D120Provider)
|
agpl-3.0
|
Python
|
e58781d10d930addc911a2e9b86370ba997cddfc
|
Make STRtree work with empty input sequence.
|
abali96/Shapely,mouadino/Shapely,jdmcbr/Shapely,jdmcbr/Shapely,mouadino/Shapely,abali96/Shapely,mindw/shapely,mindw/shapely
|
shapely/strtree.py
|
shapely/strtree.py
|
from shapely.geos import lgeos
import ctypes
class STRtree:
"""
STRtree is an R-tree that is created using the Sort-Tile-Recursive
algorithm. STRtree takes a sequence of geometry objects as initialization
parameter. After initialization the query method can be used to make a
spatial query over those objects.
>>> from shapely.geometry import Polygon
>>> polys = [ Polygon(((0, 0), (1, 0), (1, 1))), Polygon(((0, 1), (0, 0), (1, 0))), Polygon(((100, 100), (101, 100), (101, 101))) ]
>>> s = STRtree(polys)
>>> query_geom = Polygon(((-1, -1), (2, 0), (2, 2), (-1, 2)))
>>> result = s.query(query_geom)
>>> polys[0] in result
True
>>> polys[1] in result
True
>>> polys[2] in result
False
>>> # Test empty tree
>>> s = STRtree([])
>>> s.query(query_geom)
[]
>>> # Test tree with one object
>>> s = STRtree([polys[0]])
>>> result = s.query(query_geom)
>>> polys[0] in result
True
"""
def __init__(self, geoms):
self._n_geoms = len(geoms)
# GEOS STRtree capacity has to be > 1
self._tree_handle = lgeos.GEOSSTRtree_create(max(2, len(geoms)))
for geom in geoms:
lgeos.GEOSSTRtree_insert(self._tree_handle, geom._geom, ctypes.py_object(geom))
def __del__(self):
lgeos.GEOSSTRtree_destroy(self._tree_handle)
def query(self, geom):
if self._n_geoms == 0:
return []
result = []
def callback(item, userdata):
geom = ctypes.cast(item, ctypes.py_object).value
result.append(geom)
lgeos.GEOSSTRtree_query(self._tree_handle, geom._geom, lgeos.GEOSQueryCallback(callback), None)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
from shapely.geos import lgeos
import ctypes
class STRtree:
"""
STRtree is an R-tree that is created using the Sort-Tile-Recursive
algorithm. STRtree takes a sequence of geometry objects as initialization
parameter. After initialization the query method can be used to make a
spatial query over those objects.
>>> from shapely.geometry import Polygon
>>> polys = [ Polygon(((0, 0), (1, 0), (1, 1))), Polygon(((0, 1), (0, 0), (1, 0))), Polygon(((100, 100), (101, 100), (101, 101))) ]
>>> s = STRtree(polys)
>>> query_geom = Polygon(((-1, -1), (2, 0), (2, 2), (-1, 2)))
>>> result = s.query(query_geom)
>>> polys[0] in result
True
>>> polys[1] in result
True
>>> polys[2] in result
False
"""
def __init__(self, geoms):
self._tree_handle = lgeos.GEOSSTRtree_create(len(geoms))
for geom in geoms:
lgeos.GEOSSTRtree_insert(self._tree_handle, geom._geom, ctypes.py_object(geom))
def __del__(self):
lgeos.GEOSSTRtree_destroy(self._tree_handle)
def query(self, geom):
result = []
def callback(item, userdata):
geom = ctypes.cast(item, ctypes.py_object).value
result.append(geom)
lgeos.GEOSSTRtree_query(self._tree_handle, geom._geom, lgeos.GEOSQueryCallback(callback), None)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
bsd-3-clause
|
Python
|
b4810235dd91e326b088f0ea4fc479ad6b9b7900
|
Improve session backend compatibility with Django.
|
wtanaka/google-app-engine-helper-for-django,clones/google-app-engine-django
|
appengine_django/sessions/backends/db.py
|
appengine_django/sessions/backends/db.py
|
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from django.contrib.sessions.backends import base
from django.core.exceptions import SuspiciousOperation
from appengine_django.sessions.models import Session
class SessionStore(base.SessionBase):
"""A key-based session store for Google App Engine."""
def load(self):
session = self._get_session(self.session_key)
if session:
try:
return self.decode(session.session_data)
except SuspiciousOperation:
# Create a new session_key for extra security.
pass
self.create()
return {}
def save(self, must_create=False):
if must_create:
if Session.get_by_key_name('k:' + self._session_key):
raise base.CreateError
session = Session(
key_name='k:' + self.session_key,
session_data = self.encode(self._session),
expire_date = self.get_expiry_date())
session.put()
def exists(self, session_key):
return self._get_session(session_key) is not None
def delete(self, session_key=None):
if session_key is None:
session_key = self._session_key
session = self._get_session(session_key)
if session:
session.delete()
def _get_session(self, session_key):
session = Session.get_by_key_name('k:' + session_key)
if session:
if session.expire_date > datetime.now():
return session
session.delete()
return None
def create(self):
while True:
self.session_key = self._get_new_session_key()
try:
# Save immediately to ensure we have a unique entry in the
# database.
self.save(must_create=True)
except base.CreateError:
# Key wasn't unique. Try again.
continue
self.modified = True
self._session_cache = {}
return
|
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from django.contrib.sessions.backends import base
from django.core.exceptions import SuspiciousOperation
from appengine_django.sessions.models import Session
from google.appengine.ext import db
class SessionStore(base.SessionBase):
"""A key-based session store for Google App Engine."""
def load(self):
session = self._get_session(self.session_key)
if session:
try:
return self.decode(session.session_data)
except SuspiciousOperation:
# Create a new session_key for extra security.
pass
self.session_key = self._get_new_session_key()
self._session_cache = {}
self.save()
# Ensure the user is notified via a new cookie.
self.modified = True
return {}
def save(self):
session = Session(
key_name='k:' + self.session_key,
session_data = self.encode(self._session),
expire_date = self.get_expiry_date())
session.put()
def exists(self, session_key):
return self._get_session(session_key) is not None
def delete(self, session_key):
session = self._get_session(session_key)
if session:
session.delete()
def _get_session(self, session_key):
session = Session.get_by_key_name('k:' + session_key)
if session:
if session.expire_date > datetime.now():
return session
session.delete()
return None
|
apache-2.0
|
Python
|
5e63e3b86623aed099383fa96c7ee079fdce8905
|
Update to setup.py so that generated SWIG library is copied over to python installation appropriately. Somehow, I did not include this change in my previous commit.
|
opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core
|
OpenSim/Wrapping/Python/setup.py
|
OpenSim/Wrapping/Python/setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
if os.name == 'posix':
# Linux, etc.
lib_name = '_opensim.so'
elif os.name == 'nt':
# Windows.
lib_name = '_opensim.pyd'
setup(name='opensim',
version='3.1',
description='OpenSim Simulation Framework',
author='OpenSim Team',
author_email='[email protected]',
url='http://opensim.stanford.edu/',
license='Apache 2.0',
packages=find_packages(),
data_files=[('opensim', ['opensim/%s' % lib_name])],
classifiers=[
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Physics',
],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='pyOpenSim',
version='3.1',
description='OpenSim Simulation Framework',
author='OpenSim Team',
author_email='[email protected]',
url='http://opensim.stanford.edu/'
)
|
apache-2.0
|
Python
|
08f46549f7ce70416bb0f605fb9da7334b465e50
|
Fix attributes.
|
PyBossa/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,OpenNewsLabs/pybossa,OpenNewsLabs/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,jean/pybossa,geotagx/pybossa,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,stefanhahmann/pybossa
|
test/factories/auditlog_factory.py
|
test/factories/auditlog_factory.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.auditlog import Auditlog
from . import BaseFactory, factory, auditlog_repo
app = factory.SubFactory('factories.AppFactory')
class AuditlogFactory(BaseFactory):
class Meta:
model = Auditlog
@classmethod
def _create(cls, model_class, *args, **kwargs):
log = model_class(*args, **kwargs)
auditlog_repo.save(log)
return log
id = factory.Sequence(lambda n: n)
action = 'update'
caller = 'web'
attribute = 'attribute'
old_value ='old'
new_value = 'new'
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model.auditlog import Auditlog
from . import BaseFactory, factory, auditlog_repo
app = factory.SubFactory('factories.AppFactory')
class AuditlogFactory(BaseFactory):
class Meta:
model = Auditlog
@classmethod
def _create(cls, model_class, *args, **kwargs):
log = model_class(*args, **kwargs)
auditlog_repo.save(log)
return log
id = factory.Sequence(lambda n: n)
action = 'update'
caller = 'web'
log = "msg"
|
agpl-3.0
|
Python
|
5934669c0edbd914d14612e16be7c88641b50bee
|
Fix test for eq and test eq with other classes
|
Pytwitcher/pytwitcherapi,Pytwitcher/pytwitcherapi
|
test/test_chat_chatserverstatus.py
|
test/test_chat_chatserverstatus.py
|
from pytwitcherapi import chat
def test_eq_str(servers):
assert servers[0] == '192.16.64.11:80',\
"Server should be equal to the same address."
def test_noteq_str(servers):
assert servers[0] != '192.16.64.50:89',\
"""Server should not be equal to a different address"""
def test_eq(servers):
s1 = chat.ChatServerStatus('192.16.64.11:80')
assert servers[0] == s1,\
"""Servers with same address should be equal"""
assert not (s1 == 123),\
"""Servers should not be eqaul to other classes with different id"""
def test_noteq(servers):
assert not (servers[0] == servers[1]),\
"""Servers with different address should not be equal"""
def test_lt(servers):
sortedservers = sorted(servers)
expected = [servers[2], servers[3], servers[0], servers[1]]
assert sortedservers == expected,\
"""Server should be sorted like this: online, then offline,
little errors, then more errors, little lag, then more lag."""
|
from pytwitcherapi import chat
def test_eq_str(servers):
assert servers[0] == '192.16.64.11:80',\
"Server should be equal to the same address."
def test_noteq_str(servers):
assert servers[0] != '192.16.64.50:89',\
"""Server should not be equal to a different address"""
def test_eq(servers):
s1 = chat.ChatServerStatus('192.16.64.11:80')
assert servers[0] == s1,\
"""Servers with same address should be equal"""
def test_noteq(servers):
assert servers[0] != servers[1],\
"""Servers with different address should not be equal"""
def test_lt(servers):
sortedservers = sorted(servers)
expected = [servers[2], servers[3], servers[0], servers[1]]
assert sortedservers == expected,\
"""Server should be sorted like this: online, then offline,
little errors, then more errors, little lag, then more lag."""
|
bsd-3-clause
|
Python
|
a004abd76af602192704cf4d01d9daf3903d6477
|
Remove unused code
|
ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland
|
child_switzerland/models/child_compassion.py
|
child_switzerland/models/child_compassion.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields
class CompassionChild(models.Model):
_inherit = 'compassion.child'
desc_fr = fields.Text('French description', readonly=True)
desc_de = fields.Text('German description', readonly=True)
desc_it = fields.Text('Italian description', readonly=True)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class CompassionChild(models.Model):
_inherit = 'compassion.child'
desc_fr = fields.Text('French description', readonly=True)
desc_de = fields.Text('German description', readonly=True)
desc_it = fields.Text('Italian description', readonly=True)
@api.model
def correct_old_children(self):
old_children = self.search([('global_id', '=', False)]).filtered(
lambda c: len(c.local_id) < 11)
for child in old_children:
if child.code:
child.local_id = child.code[0:2] + '0' + \
child.code[2:5] + '0' + child.code[5:]
return True
@api.model
def find_missing_global_id(self):
missing_gids = self.search([('global_id', '=', False)])
global_search = self.env['compassion.childpool.search'].create({
'take': 1,
})
cpt = 0
for child in missing_gids:
try:
global_search.local_id = child.local_id
global_search.do_search()
if global_search.global_child_ids:
child.global_id = global_search.global_child_ids.global_id
# Commit at each fix
self.env.cr.commit() # pylint: disable=invalid-commit
except:
self.env.invalidate_all()
finally:
cpt += 1
|
agpl-3.0
|
Python
|
92f5cff9edfbeb2219fc2fb714364dc590bd912f
|
Fix too long line in soc.cache.logic module.
|
SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange
|
app/soc/cache/logic.py
|
app/soc/cache/logic.py
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module contains logic memcaching functions.
"""
__authors__ = [
'"Sverre Rabbelier" <[email protected]>',
]
from google.appengine.api import memcache
from google.appengine.ext import db
import soc.cache.base
def key(model, filter, order):
"""Returns the memcache key for this query.
"""
new_filter = {}
for filter_key, value in filter.iteritems():
if isinstance(value, db.Model):
new_value = value.key().id_or_name()
else:
new_value = value
new_filter[filter_key] = new_value
return 'query_for_%(kind)s_%(filter)s_%(order)s' % {
'kind': repr(model.kind()),
'filter': repr(new_filter),
'order': repr(order),
}
def get(model, filter, order, *args, **kwargs):
"""Retrieves the data for the specified query from the memcache.
"""
memcache_key = key(model, filter, order)
return memcache.get(memcache_key), memcache_key
def put(data, memcache_key, *args, **kwargs):
"""Sets the data for the specified query in the memcache.
Args:
data: the data to be cached
"""
# Store data for fifteen minutes to force a refresh every so often
retention = 15*60
memcache.add(memcache_key, data, retention)
def flush(model, filter):
"""Removes the data for the current user from the memcache.
"""
memcache_key = key(model, filter)
memcache.delete(memcache_key)
# define the cache function
cache = soc.cache.base.getCacher(get, put)
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module contains logic memcaching functions.
"""
__authors__ = [
'"Sverre Rabbelier" <[email protected]>',
]
from google.appengine.api import memcache
from google.appengine.ext import db
import soc.cache.base
def key(model, filter, order):
"""Returns the memcache key for this query.
"""
new_filter = {}
for filter_key, value in filter.iteritems():
new_value = value.key().id_or_name() if isinstance(value, db.Model) else value
new_filter[filter_key] = new_value
return 'query_for_%(kind)s_%(filter)s_%(order)s' % {
'kind': repr(model.kind()),
'filter': repr(new_filter),
'order': repr(order),
}
def get(model, filter, order, *args, **kwargs):
"""Retrieves the data for the specified query from the memcache.
"""
memcache_key = key(model, filter, order)
return memcache.get(memcache_key), memcache_key
def put(data, memcache_key, *args, **kwargs):
"""Sets the data for the specified query in the memcache.
Args:
data: the data to be cached
"""
# Store data for fifteen minutes to force a refresh every so often
retention = 15*60
memcache.add(memcache_key, data, retention)
def flush(model, filter):
"""Removes the data for the current user from the memcache.
"""
memcache_key = key(model, filter)
memcache.delete(memcache_key)
# define the cache function
cache = soc.cache.base.getCacher(get, put)
|
apache-2.0
|
Python
|
bed9390490ad0c9d8d8319ea017f13d075284450
|
Make sure SQL file gets closed
|
california-civic-data-coalition/django-calaccess-processed-data,california-civic-data-coalition/django-calaccess-processed-data
|
calaccess_processed_filings/managers.py
|
calaccess_processed_filings/managers.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom manager for loading raw data in to "filings" models.
"""
from __future__ import unicode_literals
import itertools
# Django tricks
from django.db.models import Q
from django.db import connection
# Managers
from calaccess_processed.managers import BulkLoadSQLManager
# Logging
import logging
logger = logging.getLogger(__name__)
class FilingsManager(BulkLoadSQLManager):
"""
Utilities for more quickly loading bulk data.
"""
app_name = "calaccess_processed_filings"
def get_sql(self):
"""
Return string of raw sql for loading the model.
"""
fp = open(self.sql_path, 'r')
sql = fp.read()
fp.close()
return sql
@property
def sql_path(self):
"""
Return the path to the .sql file with the model's loading query.
"""
file_name = 'load_%s_model' % self.model._meta.model_name
return self.get_sql_path(file_name)
def load(self):
"""
Load the model by executing its corresponding raw SQL query.
Temporarily drops any constraints or indexes on the model.
"""
# Drop constraints and indexes to speed loading
self.get_queryset().drop_constraints()
self.get_queryset().drop_indexes()
# Run the actual loader SQL
with connection.cursor() as c:
c.execute(self.get_sql())
# Restore the constraints and index that were dropped
self.get_queryset().restore_constraints()
self.get_queryset().restore_indexes()
class Form501FilingManager(FilingsManager):
"""
A custom manager for Form 501 filings.
"""
def without_candidacy(self):
"""
Returns Form 501 filings that do not have an OCD Candidacy yet.
"""
from calaccess_processed_elections.proxies import OCDCandidacyProxy
matched_qs = OCDCandidacyProxy.objects.matched_form501_ids()
matched_list = [i for i in itertools.chain.from_iterable(matched_qs)]
return self.get_queryset().exclude(
Q(filing_id__in=matched_list) | Q(office__icontains='RETIREMENT')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom manager for loading raw data in to "filings" models.
"""
from __future__ import unicode_literals
import itertools
# Django tricks
from django.db.models import Q
from django.db import connection
# Managers
from calaccess_processed.managers import BulkLoadSQLManager
# Logging
import logging
logger = logging.getLogger(__name__)
class FilingsManager(BulkLoadSQLManager):
"""
Utilities for more quickly loading bulk data.
"""
app_name = "calaccess_processed_filings"
def get_sql(self):
"""
Return string of raw sql for loading the model.
"""
return open(self.sql_path, 'r').read()
@property
def sql_path(self):
"""
Return the path to the .sql file with the model's loading query.
"""
file_name = 'load_%s_model' % self.model._meta.model_name
return self.get_sql_path(file_name)
def load(self):
"""
Load the model by executing its corresponding raw SQL query.
Temporarily drops any constraints or indexes on the model.
"""
# Drop constraints and indexes to speed loading
self.get_queryset().drop_constraints()
self.get_queryset().drop_indexes()
# Run the actual loader SQL
with connection.cursor() as c:
c.execute(self.get_sql())
# Restore the constraints and index that were dropped
self.get_queryset().restore_constraints()
self.get_queryset().restore_indexes()
class Form501FilingManager(FilingsManager):
"""
A custom manager for Form 501 filings.
"""
def without_candidacy(self):
"""
Returns Form 501 filings that do not have an OCD Candidacy yet.
"""
from calaccess_processed_elections.proxies import OCDCandidacyProxy
matched_qs = OCDCandidacyProxy.objects.matched_form501_ids()
matched_list = [i for i in itertools.chain.from_iterable(matched_qs)]
return self.get_queryset().exclude(
Q(filing_id__in=matched_list) | Q(office__icontains='RETIREMENT')
)
|
mit
|
Python
|
a2398d77f550a5b73c5bbd76d30131c88c64caa4
|
Make the example a bit more exciting.
|
amorilia/formast,amorilia/formast,amorilia/formast
|
swig/test.py
|
swig/test.py
|
import formast
e = formast.Expr()
formast.parse_xml("test.txt", e)
class Printer(formast.Visitor):
def expr_uint(self, v):
print v,
def expr_add(self, left, right):
print "(",
self.expr(left)
print "+",
self.expr(right)
print ")",
def expr_sub(self, left, right):
print "(",
self.expr(left)
print "-",
self.expr(right)
print ")",
def expr_mul(self, left, right):
print "(",
self.expr(left)
print "*",
self.expr(right)
print ")",
def expr_div(self, left, right):
print "(",
self.expr(left)
print "/",
self.expr(right)
print ")",
def expr_neg(self, right):
print "- (",
self.expr(right)
print ")",
def expr_pos(self, right):
print "+",
self.expr(right)
class Evaluator(formast.Visitor):
def __init__(self):
formast.Visitor.__init__(self)
self.stack = []
def expr(self, e):
formast.Visitor.expr(self, e)
print self.stack
def expr_uint(self, v):
self.stack.append(v)
def expr_add(self, left, right):
self.expr(left)
self.expr(right)
self.stack.append(self.stack.pop() + self.stack.pop())
def expr_sub(self, left, right):
self.expr(left)
self.expr(right)
self.stack.append(self.stack.pop() - self.stack.pop())
def expr_mul(self, left, right):
self.expr(left)
self.expr(right)
self.stack.append(self.stack.pop() * self.stack.pop())
def expr_div(self, left, right):
self.expr(right)
self.expr(left) # will pop first!
self.stack.append(self.stack.pop() // self.stack.pop())
def expr_neg(self, right):
self.expr(right)
self.stack.append(-self.stack.pop())
def expr_pos(self, right):
pass
printer = Printer()
printer.expr(e)
print
evaluator = Evaluator()
evaluator.expr(e)
|
import formast
e = formast.Expr()
formast.parse_xml("test.txt", e)
class Visitor(formast.Visitor):
def expr_uint(self, v):
print(v)
def expr_add(self, left, right):
print "("
self.expr(left)
print "+"
self.expr(right)
print ")"
def expr_sub(self, left, right):
print "("
self.expr(left)
print "+"
self.expr(right)
print ")"
def expr_mul(self, left, right):
print "("
self.expr(left)
print "*"
self.expr(right)
print ")"
def expr_div(self, left, right):
print "("
self.expr(left)
print "/"
self.expr(right)
print ")"
def expr_neg(self, right):
print "-"
self.expr(right)
def expr_pos(self, right):
print "+"
self.expr(right)
visitor = Visitor()
visitor.expr(e)
|
bsd-3-clause
|
Python
|
df9b8428d6575bf68699534c37425bd1bc1c6ae8
|
decrease the cache time for the block
|
rezometz/django-paiji2-shoutbox,rezometz/django-paiji2-shoutbox
|
paiji2_shoutbox/modular.py
|
paiji2_shoutbox/modular.py
|
from django.conf.urls import url, include
from modular_blocks import ModuleApp, TemplateTagBlock, modules
from . import urls
class ShoutboxModule(ModuleApp):
app_name = 'bulletin_board'
name = 'bulletin-board'
urls = url(r'^shoutbox/', include(urls))
templatetag_blocks = [
TemplateTagBlock(
name='bulletin-board',
library='shoutbox',
tag='display_bulletin_board',
cache_time=1,
kwargs={
'nb': 10,
},
),
]
modules.register(ShoutboxModule)
|
from django.conf.urls import url, include
from modular_blocks import ModuleApp, TemplateTagBlock, modules
from . import urls
class ShoutboxModule(ModuleApp):
app_name = 'bulletin_board'
name = 'bulletin-board'
urls = url(r'^shoutbox/', include(urls))
templatetag_blocks = [
TemplateTagBlock(
name='bulletin-board',
library='shoutbox',
tag='display_bulletin_board',
cache_time=30 * 60,
kwargs={
'nb': 10,
},
),
]
modules.register(ShoutboxModule)
|
agpl-3.0
|
Python
|
c47d11fbe4e09dcec8d0c40d778c38b04b8ccc7b
|
Add List-Id and List-Unsubscribe headers
|
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
|
uchicagohvz/users/mailing_list.py
|
uchicagohvz/users/mailing_list.py
|
# Mailing list configuration
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from uchicagohvz import secrets
from .tasks import smtp_localhost_send
from .models import Profile
from rest_framework.response import Response
from rest_framework.views import APIView
import email
import hashlib
import hmac
def _verify(token, timestamp, signature):
return signature == hmac.new(
key=secrets.MAILGUN_API_KEY,
msg='{}{}'.format(timestamp, token),
digestmod=hashlib.sha256).hexdigest()
class ChatterMailgunHook(APIView):
authentication_classes = []
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
FIELDS = (
'recipient', 'sender', 'from',
'subject', 'body-mime',
'timestamp', 'token', 'signature'
)
verified = _verify(request.data['token'], request.data['timestamp'], request.data['signature'])
if all([x in request.data for x in FIELDS]) and verified:
msg = email.message_from_string(request.data['body-mime'])
for x in ('From', 'Sender', 'To', 'Reply-To', 'Subject'):
del msg[x]
listhost_addr = '[email protected]'
msg['From'] = request.data['from']
msg['Sender'] = listhost_addr
msg['To'] = listhost_addr
msg['Reply-To'] = listhost_addr
msg['Subject'] = "[HvZ-Chatter] " + request.data['subject']
msg['List-Id'] = 'HvZ-Chatter <https://www.uchicagohvz.org>'
msg['List-Unsubscribe'] = '<https://www.uchicagohvz.org/users/update_profile/>'
to_addrs = tuple(Profile.objects.filter(
user__is_active=True, subscribe_chatter_listhost=True).values_list('user__email', flat=True))
smtp_localhost_send(listhost_addr, to_addrs, msg.as_string())
return Response()
else:
return Response(status=406)
|
# Mailing list configuration
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from uchicagohvz import secrets
from .tasks import smtp_localhost_send
from .models import Profile
from rest_framework.response import Response
from rest_framework.views import APIView
import email
import hashlib
import hmac
def _verify(token, timestamp, signature):
return signature == hmac.new(
key=secrets.MAILGUN_API_KEY,
msg='{}{}'.format(timestamp, token),
digestmod=hashlib.sha256).hexdigest()
class ChatterMailgunHook(APIView):
authentication_classes = []
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
FIELDS = (
'recipient', 'sender', 'from',
'subject', 'body-mime',
'timestamp', 'token', 'signature'
)
verified = _verify(request.data['token'], request.data['timestamp'], request.data['signature'])
if all([x in request.data for x in FIELDS]) and verified:
msg = email.message_from_string(request.data['body-mime'])
for x in ('From', 'Sender', 'To', 'Reply-To', 'Subject'):
del msg[x]
listhost_addr = '[email protected]'
msg['From'] = request.data['from']
msg['Sender'] = listhost_addr
msg['To'] = listhost_addr
msg['Reply-To'] = listhost_addr
msg['Subject'] = "[HvZ-Chatter] " + request.data['subject']
to_addrs = tuple(Profile.objects.filter(
user__is_active=True, subscribe_chatter_listhost=True).values_list('user__email', flat=True))
smtp_localhost_send(listhost_addr, to_addrs, msg.as_string())
return Response()
else:
return Response(status=406)
|
mit
|
Python
|
0cc04e9a486fb7dcf312a5c336f8f529f8b1f32d
|
Update version 1.0.4 -> 1.0.5
|
TamiaLab/PySkCode
|
skcode/__init__.py
|
skcode/__init__.py
|
"""
SkCode (Python implementation of BBcode syntax) parser library.
"""
# Package information
__author__ = "Fabien Batteix (@skywodd)"
__copyright__ = "Copyright 2015, TamiaLab"
__credits__ = ["Fabien Batteix", "TamiaLab"]
__license__ = "GPLv3"
__version__ = "1.0.5"
__maintainer__ = "Fabien Batteix"
__email__ = "[email protected]"
__status__ = "Development" # "Production"
# User friendly imports
from .treebuilder import parse_skcode
from .render import (render_to_html,
render_to_skcode,
render_to_text)
|
"""
SkCode (Python implementation of BBcode syntax) parser library.
"""
# Package information
__author__ = "Fabien Batteix (@skywodd)"
__copyright__ = "Copyright 2015, TamiaLab"
__credits__ = ["Fabien Batteix", "TamiaLab"]
__license__ = "GPLv3"
__version__ = "1.0.4"
__maintainer__ = "Fabien Batteix"
__email__ = "[email protected]"
__status__ = "Development" # "Production"
# User friendly imports
from .treebuilder import parse_skcode
from .render import (render_to_html,
render_to_skcode,
render_to_text)
|
agpl-3.0
|
Python
|
f343a8bc7592ab9befb5c03ccd09db61439e3f76
|
remove extra buttons labelled Make Maintenance Visit
|
gangadharkadam/verveerp,mbauskar/phrerp,BhupeshGupta/erpnext,indictranstech/erpnext,gangadharkadam/saloon_erp_install,indictranstech/phrerp,saurabh6790/alert-med-app,suyashphadtare/vestasi-update-erp,gangadhar-kadam/verve_test_erp,saurabh6790/med_new_app,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/erpnext_smart,Tejal011089/paypal_erpnext,shft117/SteckerApp,gmarke/erpnext,netfirms/erpnext,suyashphadtare/sajil-final-erp,Tejal011089/trufil-erpnext,MartinEnder/erpnext-de,saurabh6790/test-erp,suyashphadtare/vestasi-update-erp,indictranstech/vestasi-erpnext,Tejal011089/trufil-erpnext,Tejal011089/fbd_erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,indictranstech/trufil-erpnext,suyashphadtare/vestasi-erp-final,MartinEnder/erpnext-de,saurabh6790/test_final_med_app,gmarke/erpnext,indictranstech/vestasi-erpnext,saurabh6790/medsynaptic1-app,gangadharkadam/contributionerp,pawaranand/phrerp,indictranstech/reciphergroup-erpnext,gangadhar-kadam/hrerp,hanselke/erpnext-1,suyashphadtare/vestasi-erp-jan-end,saurabh6790/aimobilize-app-backup,gangadhar-kadam/smrterp,Drooids/erpnext,hatwar/buyback-erpnext,suyashphadtare/sajil-erp,mbauskar/alec_frappe5_erpnext,aruizramon/alec_erpnext,4commerce-technologies-AG/erpnext,mbauskar/omnitech-demo-erpnext,njmube/erpnext,geekroot/erpnext,gangadhar-kadam/helpdesk-erpnext,treejames/erpnext,dieface/erpnext,suyashphadtare/vestasi-erp-1,shft117/SteckerApp,saurabh6790/medsynaptic1-app,gangadharkadam/v5_erp,gangadhar-kadam/laganerp,indictranstech/biggift-erpnext,SPKian/Testing2,mahabuber/erpnext,gangadhar-kadam/latestchurcherp,gangadharkadam/v5_erp,mbauskar/internal-hr,gangadharkadam/smrterp,meisterkleister/erpnext,hernad/erpnext,saurabh6790/OFF-RISAPP,gangadharkadam/saloon_erp_install,Tejal011089/digitales_erpnext,gangadharkadam/contributionerp,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,gangadharkadam/smrterp,rohitwaghchaure/digitales_erpnext,gangadharkadam/v4_erp,mbauskar/internal-hr,indictranstech/internal-erpnext,mahabuber/erpnext,gangadharkadam/verveerp,rohitwaghchaure/GenieManager-erpnext,indictranstech/trufil-erpnext,fuhongliang/erpnext,mbauskar/internal-hr,rohitwaghchaure/erpnext-receipher,indictranstech/internal-erpnext,mbauskar/alec_frappe5_erpnext,suyashphadtare/vestasi-erp-1,saurabh6790/omni-apps,saurabh6790/pow-app,indictranstech/vestasi-erpnext,gangadharkadam/office_erp,hanselke/erpnext-1,netfirms/erpnext,saurabh6790/ON-RISAPP,gangadharkadam/verveerp,gangadharkadam/letzerp,suyashphadtare/test,4commerce-technologies-AG/erpnext,rohitwaghchaure/erpnext_smart,pawaranand/phrerp,shitolepriya/test-erp,mahabuber/erpnext,treejames/erpnext,hernad/erpnext,gangadharkadam/contributionerp,gangadhar-kadam/mic-erpnext,geekroot/erpnext,gmarke/erpnext,sagar30051991/ozsmart-erp,Tejal011089/trufil-erpnext,BhupeshGupta/erpnext,suyashphadtare/vestasi-update-erp,ThiagoGarciaAlves/erpnext,saurabh6790/omn-app,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,saurabh6790/med_app_rels,indictranstech/Das_Erpnext,Yellowen/Owrang,meisterkleister/erpnext,gangadhar-kadam/mtn-erpnext,gangadhar-kadam/latestchurcherp,mbauskar/helpdesk-erpnext,rohitwaghchaure/GenieManager-erpnext,Tejal011089/digitales_erpnext,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/v5_erp,saurabh6790/test-med-app,gangadhar-kadam/latestchurcherp,indictranstech/focal-erpnext,gangadhar-kadam/powapp,suyashphadtare/gd-erp,mbauskar/Das_Erpnext,Tejal011089/paypal_erpnext,saurabh6790/alert-med-app,suyashphadtare/sajil-final-erp,sagar30051991/ozsmart-erp,indictranstech/erpnext,hanselke/erpnext-1,gangadhar-kadam/verve_live_erp,Tejal011089/fbd_erpnext,gangadharkadam/vlinkerp,anandpdoshi/erpnext,shitolepriya/test-erp,gangadharkadam/saloon_erp_install,mbauskar/omnitech-erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/Medsyn2_app,Drooids/erpnext,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/digitales_erpnext,susuchina/ERPNEXT,sagar30051991/ozsmart-erp,suyashphadtare/sajil-erp,Aptitudetech/ERPNext,mbauskar/helpdesk-erpnext,indictranstech/vestasi-erpnext,Tejal011089/huntercamp_erpnext,gangadhar-kadam/verve-erp,MartinEnder/erpnext-de,indictranstech/phrerp,gangadhar-kadam/hrerp,indictranstech/fbd_erpnext,aruizramon/alec_erpnext,saurabh6790/aimobilize,saurabh6790/trufil_app,rohitwaghchaure/erpnext-receipher,tmimori/erpnext,indictranstech/osmosis-erpnext,gangadharkadam/vlinkerp,gangadhar-kadam/adb-erp,indictranstech/buyback-erp,saurabh6790/test_final_med_app,saurabh6790/omni-apps,saurabh6790/ON-RISAPP,saurabh6790/med_new_app,indictranstech/buyback-erp,hanselke/erpnext-1,hatwar/buyback-erpnext,njmube/erpnext,mbauskar/Das_Erpnext,saurabh6790/omnitech-apps,gangadhar-kadam/verve_test_erp,suyashphadtare/test,mbauskar/phrerp,njmube/erpnext,indictranstech/osmosis-erpnext,saurabh6790/test-med-app,mbauskar/omnitech-erpnext,indictranstech/tele-erpnext,aruizramon/alec_erpnext,indictranstech/fbd_erpnext,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,gangadhar-kadam/sapphire_app,indictranstech/phrerp,anandpdoshi/erpnext,hatwar/buyback-erpnext,Suninus/erpnext,gangadharkadam/saloon_erp,gangadhar-kadam/latestchurcherp,indictranstech/fbd_erpnext,gangadharkadam/v4_erp,hatwar/Das_erpnext,hatwar/Das_erpnext,saurabh6790/medsyn-app1,gangadharkadam/letzerp,saurabh6790/aimobilize,hatwar/focal-erpnext,dieface/erpnext,sheafferusa/erpnext,hatwar/Das_erpnext,saurabh6790/med_app_rels,suyashphadtare/vestasi-erp-1,saurabh6790/medapp,anandpdoshi/erpnext,indictranstech/focal-erpnext,saurabh6790/medsyn-app,netfirms/erpnext,mbauskar/helpdesk-erpnext,saurabh6790/test-erp,gangadharkadam/contributionerp,saurabh6790/medsyn-app,ShashaQin/erpnext,rohitwaghchaure/digitales_erpnext,gsnbng/erpnext,SPKian/Testing2,saurabh6790/medsynaptic-app,mbauskar/omnitech-demo-erpnext,SPKian/Testing,geekroot/erpnext,saurabh6790/omnisys-app,gangadhar-kadam/church-erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/osmosis_erpnext,gangadhar-kadam/laganerp,gsnbng/erpnext,sheafferusa/erpnext,BhupeshGupta/erpnext,SPKian/Testing2,indictranstech/biggift-erpnext,mahabuber/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/johnerp,indictranstech/phrerp,mbauskar/omnitech-erpnext,saurabh6790/aimobilize-app-backup,gsnbng/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/New_Theme_Erp,gangadhar-kadam/prjapp,suyashphadtare/sajil-erp,gangadhar-kadam/powapp,SPKian/Testing2,Tejal011089/huntercamp_erpnext,gmarke/erpnext,gangadharkadam/vlinkerp,indictranstech/Das_Erpnext,gangadhar-kadam/verve_test_erp,gangadharkadam/vlinkerp,shitolepriya/test-erp,gangadhar-kadam/verve_erp,pawaranand/phrerp,gangadhar-kadam/verve_live_erp,sheafferusa/erpnext,gangadhar-kadam/nassimapp,saurabh6790/tru_app_back,pawaranand/phrerp,saurabh6790/trufil_app,Tejal011089/Medsyn2_app,gangadharkadam/sterp,suyashphadtare/vestasi-erp-jan-end,indictranstech/biggift-erpnext,indictranstech/tele-erpnext,saurabh6790/OFF-RISAPP,shitolepriya/test-erp,netfirms/erpnext,Tejal011089/fbd_erpnext,tmimori/erpnext,meisterkleister/erpnext,saurabh6790/omnit-app,suyashphadtare/test,gangadharkadam/saloon_erp,Drooids/erpnext,gangadharkadam/verveerp,rohitwaghchaure/New_Theme_Erp,ThiagoGarciaAlves/erpnext,gangadharkadam/sterp,mbauskar/alec_frappe5_erpnext,Tejal011089/paypal_erpnext,indictranstech/biggift-erpnext,Tejal011089/med2-app,gangadhar-kadam/verve_live_erp,gangadhar-kadam/laganerp,indictranstech/buyback-erp,pombredanne/erpnext,ShashaQin/erpnext,saurabh6790/omn-app,indictranstech/erpnext,indictranstech/reciphergroup-erpnext,rohitwaghchaure/New_Theme_Erp,mbauskar/phrerp,suyashphadtare/gd-erp,Tejal011089/digitales_erpnext,SPKian/Testing,shft117/SteckerApp,pombredanne/erpnext,Tejal011089/huntercamp_erpnext,njmube/erpnext,sagar30051991/ozsmart-erp,Tejal011089/paypal_erpnext,BhupeshGupta/erpnext,Tejal011089/digitales_erpnext,Suninus/erpnext,gangadhar-kadam/verve_erp,gangadharkadam/v4_erp,gangadharkadam/v6_erp,gangadharkadam/saloon_erp,gangadharkadam/letzerp,indictranstech/reciphergroup-erpnext,mbauskar/sapphire-erpnext,gangadhar-kadam/helpdesk-erpnext,anandpdoshi/erpnext,mbauskar/phrerp,Tejal011089/huntercamp_erpnext,gangadharkadam/letzerp,mbauskar/helpdesk-erpnext,rohitwaghchaure/New_Theme_Erp,Drooids/erpnext,susuchina/ERPNEXT,meisterkleister/erpnext,gangadhar-kadam/church-erpnext,suyashphadtare/sajil-final-erp,gangadharkadam/saloon_erp_install,gangadhar-kadam/powapp,SPKian/Testing,hatwar/focal-erpnext,saurabh6790/omnisys-app,MartinEnder/erpnext-de,indictranstech/internal-erpnext,rohitwaghchaure/erpnext_smart,rohitwaghchaure/GenieManager-erpnext,Suninus/erpnext,suyashphadtare/gd-erp,gangadhar-kadam/verve-erp,indictranstech/tele-erpnext,sheafferusa/erpnext,saurabh6790/medsynaptic-app,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/sms-erpnext,indictranstech/trufil-erpnext,gangadharkadam/tailorerp,dieface/erpnext,gangadharkadam/sher,indictranstech/osmosis-erpnext,hatwar/focal-erpnext,gangadharkadam/office_erp,gangadhar-kadam/sms-erpnext,hernad/erpnext,fuhongliang/erpnext,shft117/SteckerApp,gangadhar-kadam/verve_erp,gsnbng/erpnext,gangadhar-kadam/sapphire_app,mbauskar/sapphire-erpnext,SPKian/Testing,gangadharkadam/johnerp,indictranstech/focal-erpnext,gangadhar-kadam/verve-erp,gangadharkadam/v5_erp,4commerce-technologies-AG/erpnext,geekroot/erpnext,ShashaQin/erpnext,pombredanne/erpnext,gangadhar-kadam/sapphire_app,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,gangadharkadam/v6_erp,gangadhar-kadam/mtn-erpnext,indictranstech/focal-erpnext,saurabh6790/omnit-app,treejames/erpnext,gangadharkadam/v4_erp,saurabh6790/tru_app_back,tmimori/erpnext,Tejal011089/med2-app,saurabh6790/pow-app,fuhongliang/erpnext,gangadharkadam/sher,pombredanne/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/office_erp,gangadhar-kadam/smrterp,rohitwaghchaure/erpnext-receipher,indictranstech/fbd_erpnext,gangadharkadam/v6_erp,suyashphadtare/vestasi-erp-final,suyashphadtare/vestasi-erp-final,saurabh6790/test-erp,gangadhar-kadam/verve_test_erp,mbauskar/omnitech-demo-erpnext,Suninus/erpnext,fuhongliang/erpnext,hatwar/focal-erpnext,Tejal011089/osmosis_erpnext,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/v6_erp,Yellowen/Owrang,gangadharkadam/saloon_erp,tmimori/erpnext,hernad/erpnext,dieface/erpnext,susuchina/ERPNEXT,Tejal011089/fbd_erpnext,gangadharkadam/tailorerp,treejames/erpnext,gangadhar-kadam/prjapp,gangadhar-kadam/helpdesk-erpnext,saurabh6790/medsyn-app1,ShashaQin/erpnext,hatwar/Das_erpnext,saurabh6790/omnitech-apps,gangadhar-kadam/mic-erpnext,indictranstech/erpnext,indictranstech/internal-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,Tejal011089/osmosis_erpnext,susuchina/ERPNEXT,saurabh6790/test-erp,mbauskar/Das_Erpnext,gangadhar-kadam/adb-erp,saurabh6790/medapp,indictranstech/reciphergroup-erpnext,indictranstech/buyback-erp,gangadhar-kadam/verve_live_erp,suyashphadtare/gd-erp,indictranstech/tele-erpnext,gangadhar-kadam/nassimapp,aruizramon/alec_erpnext
|
erpnext/patches/jan_mar_2012/allocated_to_profile.py
|
erpnext/patches/jan_mar_2012/allocated_to_profile.py
|
def execute():
"""
Changes allocated_to option to Profile in
DocType Customer Issue
"""
import webnotes
webnotes.conn.sql("""
UPDATE `tabDocField`
SET options='Profile'
WHERE fieldname='allocated_to'
""")
webnotes.conn.sql("""
DELETE from `tabDocField`
WHERE parent='Customer Issue'
AND label='Make Maintenance Visit'
""")
from webnotes.modules.module_manager import reload_doc
reload_doc('support', 'doctype', 'customer_issue')
|
def execute():
"""
Changes allocated_to option to Profile in
DocType Customer Issue
"""
import webnotes
webnotes.conn.sql("""
UPDATE `tabDocField`
SET options='Profile'
WHERE fieldname='allocated_to'
""")
from webnotes.modules.module_manager import reload_doc
reload_doc('support', 'doctype', 'customer_issue')
|
agpl-3.0
|
Python
|
96176bb223f9971311a0a42c6c9845ca1c0170cc
|
Add base class to throttling
|
incuna/django-user-management,incuna/django-user-management
|
user_management/api/throttling.py
|
user_management/api/throttling.py
|
from rest_framework.throttling import ScopedRateThrottle
class DefaultRateMixin(object):
def get_rate(self):
try:
return self.THROTTLE_RATES[self.scope]
except KeyError:
return self.default_rate
class PostRequestThrottleMixin(object):
def allow_request(self, request, view):
"""
Throttle POST requests only.
"""
if request.method != 'POST':
return True
return super(PostRequestThrottleMixin, self).allow_request(request, view)
class ScopedRateThrottleBase(
DefaultRateMixin, PostRequestThrottleMixin, ScopedRateThrottle):
"""Base class to define a scoped rate throttle on POST request."""
class LoginRateThrottle(ScopedRateThrottleBase):
default_rate = '10/hour'
class UsernameLoginRateThrottle(LoginRateThrottle):
def get_cache_key(self, request, view):
if request.user.is_authenticated():
return None # Only throttle unauthenticated requests
ident = request.POST.get('username')
if ident is None:
return None # Only throttle username requests
return self.cache_format % {
'scope': self.scope,
'ident': ident.strip().lower(),
}
class PasswordResetRateThrottle(ScopedRateThrottleBase):
default_rate = '3/hour'
class ResendConfirmationEmailRateThrottle(ScopedRateThrottleBase):
default_rate = '3/hour'
|
from rest_framework.throttling import ScopedRateThrottle
class DefaultRateMixin(object):
def get_rate(self):
try:
return self.THROTTLE_RATES[self.scope]
except KeyError:
return self.default_rate
class PostRequestThrottleMixin(object):
def allow_request(self, request, view):
"""
Throttle POST requests only.
"""
if request.method != 'POST':
return True
return super(PostRequestThrottleMixin, self).allow_request(request, view)
class LoginRateThrottle(
DefaultRateMixin,
PostRequestThrottleMixin,
ScopedRateThrottle):
default_rate = '10/hour'
class UsernameLoginRateThrottle(LoginRateThrottle):
def get_cache_key(self, request, view):
if request.user.is_authenticated():
return None # Only throttle unauthenticated requests
ident = request.POST.get('username')
if ident is None:
return None # Only throttle username requests
return self.cache_format % {
'scope': self.scope,
'ident': ident.strip().lower(),
}
class PasswordResetRateThrottle(
DefaultRateMixin,
PostRequestThrottleMixin,
ScopedRateThrottle):
default_rate = '3/hour'
class ResendConfirmationEmailRateThrottle(
DefaultRateMixin,
PostRequestThrottleMixin,
ScopedRateThrottle):
default_rate = '3/hour'
|
bsd-2-clause
|
Python
|
70fdc88e73e52a800dd86504bab7fbf9ad89e1d8
|
Add partial_path property explicitly to the Work model.
|
MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging
|
app/soc/models/work.py
|
app/soc/models/work.py
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Work Model."""
__authors__ = [
'"Todd Larsen" <[email protected]>',
'"Sverre Rabbelier" <[email protected]>',
]
from google.appengine.ext import db
from django.utils.translation import ugettext_lazy
import polymodel
class Work(polymodel.PolyModel):
"""Model of a Work created by one or more Persons in Roles.
Work is a "base entity" of other more specific "works" created by Persons
serving in "roles".
authors) a many:many relationship with Roles, stored in a separate
WorksAuthors model, used to represent authorship of the Work. See
the WorksAuthors model class for details.
reviews) a 1:many relationship between a Work and the zero or more
Reviews of that Work. This relation is implemented as the 'reviews'
back-reference Query of the Review model 'reviewed' reference.
"""
#: Required field indicating the "title" of the work, which may have
#: different uses depending on the specific type of the work. Works
#: can be indexed, filtered, and sorted by 'title'.
title = db.StringProperty(required=True,
verbose_name=ugettext_lazy('Title'))
title.help_text = ugettext_lazy(
'title of the document; often used in the window title')
#: optional, indexed plain text field used for different purposes,
#: depending on the specific type of the work
abstract = db.StringProperty(multiline=True)
abstract.help_text = ugettext_lazy(
'short abstract, summary, or snippet;'
' 500 characters or less, plain text displayed publicly')
#: Required path, prepended to a "link name" to form the document URL.
#: The combined path and link name must be globally unique on the
#: site. Except in /site/docs (Developer) forms, this field is not
#: usually directly editable by the User, but is instead set by controller
#: logic to match the "scope" of the document.
partial_path = db.StringProperty(required=True,
verbose_name=ugettext_lazy('Partial path'))
partial_path.help_text = ugettext_lazy(
'path portion of URLs, prepended to link name')
#: Required link name, appended to a "path" to form the document URL.
#: The combined path and link name must be globally unique on the
#: site (but, unlike some link names, a Work link name can be reused,
#: as long as the combination with the preceding path is unique).
link_name = db.StringProperty(required=True,
verbose_name=ugettext_lazy('Link name'))
link_name.help_text = ugettext_lazy('link name used in URLs')
#: short name used in places such as the sidebar menu and breadcrumb trail
#: (optional: title will be used if short_name is not present)
short_name = db.StringProperty(verbose_name=ugettext_lazy('Short name'))
short_name.help_text = ugettext_lazy(
'short name used, for example, in the sidebar menu')
#: date when the work was created
created = db.DateTimeProperty(auto_now_add=True)
#: date when the work was last modified
modified = db.DateTimeProperty(auto_now=True)
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Work Model."""
__authors__ = [
'"Todd Larsen" <[email protected]>',
'"Sverre Rabbelier" <[email protected]>',
]
from google.appengine.ext import db
from django.utils.translation import ugettext_lazy
import polymodel
class Work(polymodel.PolyModel):
"""Model of a Work created by one or more Persons in Roles.
Work is a "base entity" of other more specific "works" created by Persons
serving in "roles".
authors) a many:many relationship with Roles, stored in a separate
WorksAuthors model, used to represent authorship of the Work. See
the WorksAuthors model class for details.
reviews) a 1:many relationship between a Work and the zero or more
Reviews of that Work. This relation is implemented as the 'reviews'
back-reference Query of the Review model 'reviewed' reference.
"""
#: Required field indicating the "title" of the work, which may have
#: different uses depending on the specific type of the work. Works
#: can be indexed, filtered, and sorted by 'title'.
title = db.StringProperty(required=True,
verbose_name=ugettext_lazy('Title'))
title.help_text = ugettext_lazy(
'title of the document; often used in the window title')
#: optional, indexed plain text field used for different purposes,
#: depending on the specific type of the work
abstract = db.StringProperty(multiline=True)
abstract.help_text = ugettext_lazy(
'short abstract, summary, or snippet;'
' 500 characters or less, plain text displayed publicly')
#: Required link name, appended to a "path" to form the document URL.
#: The combined "path" and link name must be globally unique on the
#: site (but, unlike some link names, a Work link name can be reused,
#: as long as the combination with the preceding path is unique).
link_name = db.StringProperty(required=True,
verbose_name=ugettext_lazy('Link name'))
link_name.help_text = ugettext_lazy('link name used in URLs')
#: short name used in places such as the sidebar menu and breadcrumb trail
#: (optional: title will be used if short_name is not present)
short_name = db.StringProperty(verbose_name=ugettext_lazy('Short name'))
short_name.help_text = ugettext_lazy(
'short name used, for example, in the sidebar menu')
#: date when the work was created
created = db.DateTimeProperty(auto_now_add=True)
#: date when the work was last modified
modified = db.DateTimeProperty(auto_now=True)
|
apache-2.0
|
Python
|
226639f98dee43e21db70070108d0c3131d75729
|
move to version 0.2.1b
|
bpow/gemini,bw2/gemini,bgruening/gemini,bpow/gemini,udp3f/gemini,bw2/gemini,udp3f/gemini,udp3f/gemini,xuzetan/gemini,bgruening/gemini,bw2/gemini,brentp/gemini,arq5x/gemini,heuermh/gemini,bgruening/gemini,heuermh/gemini,brentp/gemini,xuzetan/gemini,arq5x/gemini,brentp/gemini,arq5x/gemini,arq5x/gemini,bpow/gemini,bpow/gemini,bgruening/gemini,brentp/gemini,xuzetan/gemini,xuzetan/gemini,bw2/gemini,heuermh/gemini,heuermh/gemini,udp3f/gemini
|
gemini/version.py
|
gemini/version.py
|
__version__="0.2.1b"
|
__version__="0.2.0b"
|
mit
|
Python
|
e01eccd8af27ad97a20b784b81ddde5cc8515e4b
|
fix incorrect codding utf8 to utf-8 (#903)
|
joke2k/faker,danhuss/faker,joke2k/faker
|
faker/providers/internet/hu_HU/__init__.py
|
faker/providers/internet/hu_HU/__init__.py
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'gmail.com',
'hotmail.com',
'yahoo.com',
)
tlds = (
'hu',
'com',
'com.hu',
'info',
'org',
'net',
'biz',
)
replacements = (
('ö', 'o'),
('ü', 'u'),
('á', 'a'),
('é', 'e'),
('í', 'i'),
('ó', 'i'),
('ő', 'o'),
('ú', 'u'),
('ű', 'u'),
)
|
# coding=utf8
from __future__ import unicode_literals
from .. import Provider as InternetProvider
class Provider(InternetProvider):
free_email_domains = (
'gmail.com',
'hotmail.com',
'yahoo.com',
)
tlds = (
'hu',
'com',
'com.hu',
'info',
'org',
'net',
'biz',
)
replacements = (
('ö', 'o'),
('ü', 'u'),
('á', 'a'),
('é', 'e'),
('í', 'i'),
('ó', 'i'),
('ő', 'o'),
('ú', 'u'),
('ű', 'u'),
)
|
mit
|
Python
|
0352f542341fe25be74c0130e7e50394c6f0bb6d
|
add interactive message colorization
|
balabit/git-magic,balabit/git-magic
|
gitmagic/fixup.py
|
gitmagic/fixup.py
|
import gitmagic
import git.cmd
import tempfile
def fixup(repo, destination_picker, change_finder, args={}):
repo.index.reset()
for change in change_finder(repo):
_apply_change(repo, change)
destination_commits = destination_picker.pick(change)
if not destination_commits:
repo.index.commit( message = "WARNING: no destination commit")
continue
destination = destination_commits[0]
gitmagic.checkpoint( _colorize_change(change, destination), args)
repo.index.commit( message = "fixup! {}".format(destination.message))
def _apply_change(repo, change):
file_name = ""
with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
f.write(change.diff)
file_name = f.name
git_ = git.cmd.Git(repo.working_dir)
git_.execute(['git', 'apply', '--cache', file_name])
NO_COLOR = "\033[0m"
YELLOW = "\033[1;33m"
WHITE = "\033[1;37m"
GREEN = "\033[1;32m"
RED = "\033[1;31m"
BLUE = "\033[1;36m"
def _colorize(message, color):
return "{}{}{}".format(color, message, NO_COLOR)
def _colorize_change(change, commit):
message = _colorize("Should I create fixup commit for {} -> {}:{}\n".format(
change.a_file_name,
commit.hexsha[:7],
commit.summary), YELLOW)
message += _diff_colorizer(change.diff)
return message
def _is_diff_header_line(line):
return line[:1] == "@" or line[:3] == "+++" or line[:3] == "---"
def _diff_colorizer(diff):
colorized = ""
for line in diff.splitlines(keepends=True):
color = WHITE
first_char = line[:1]
if first_char == "-":
color = RED
if first_char == "+":
color = GREEN
if _is_diff_header_line(line):
color = BLUE
colorized += _colorize(line, color)
return colorized
|
import gitmagic
import git.cmd
import tempfile
def fixup(repo, destination_picker, change_finder, args={}):
repo.index.reset()
for change in change_finder(repo):
_apply_change(repo, change)
destination_commits = destination_picker.pick(change)
if not destination_commits:
repo.index.commit( message = "WARNING: no destination commit")
continue
destination = destination_commits[0]
gitmagic.checkpoint("Should I create fixup commit for {} -> {}:{}\n{}".format(
change.a_file_name,
destination.hexsha[:7],
destination.summary,
change.diff), args)
repo.index.commit( message = "fixup! {}".format(destination.message))
def _apply_change(repo, change):
file_name = ""
with tempfile.NamedTemporaryFile(mode="w", delete=False) as f:
f.write(change.diff)
file_name = f.name
git_ = git.cmd.Git(repo.working_dir)
git_.execute(['git', 'apply', '--cache', file_name])
|
mit
|
Python
|
835b1ff03d517c4a621237d3cd1682df1322e0e8
|
add missing build dependency to py-execnet (#6443)
|
tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,mfherbst/spack,EmreAtes/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,LLNL/spack,tmerrick1/spack,iulian787/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,mfherbst/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack
|
var/spack/repos/builtin/packages/py-execnet/package.py
|
var/spack/repos/builtin/packages/py-execnet/package.py
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyExecnet(PythonPackage):
"""execnet provides a share-nothing model with channel-send/receive
communication for distributing execution across many Python interpreters
across version, platform and network barriers."""
homepage = "http://codespeak.net/execnet"
url = "https://pypi.io/packages/source/e/execnet/execnet-1.4.1.tar.gz"
version('1.4.1', '0ff84b6c79d0dafb7e2971629c4d127a')
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('[email protected]:', type=('build', 'run'))
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyExecnet(PythonPackage):
"""execnet provides a share-nothing model with channel-send/receive
communication for distributing execution across many Python interpreters
across version, platform and network barriers."""
homepage = "http://codespeak.net/execnet"
url = "https://pypi.io/packages/source/e/execnet/execnet-1.4.1.tar.gz"
version('1.4.1', '0ff84b6c79d0dafb7e2971629c4d127a')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
|
lgpl-2.1
|
Python
|
baed814d73ea645794d172614bb79f456730b42c
|
Fix auth providers to work around Python's broken import system.
|
wevoice/wesub,wevoice/wesub,ofer43211/unisubs,pculture/unisubs,norayr/unisubs,ujdhesa/unisubs,eloquence/unisubs,ujdhesa/unisubs,wevoice/wesub,norayr/unisubs,eloquence/unisubs,ReachingOut/unisubs,ofer43211/unisubs,wevoice/wesub,pculture/unisubs,ReachingOut/unisubs,ofer43211/unisubs,ReachingOut/unisubs,norayr/unisubs,norayr/unisubs,eloquence/unisubs,ujdhesa/unisubs,eloquence/unisubs,ofer43211/unisubs,pculture/unisubs,pculture/unisubs,ReachingOut/unisubs,ujdhesa/unisubs
|
apps/auth/providers.py
|
apps/auth/providers.py
|
# Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.conf import settings
if not hasattr(settings, 'AUTHENTICATION_PROVIDER_REGISTRY'):
settings.AUTHENTICATION_PROVIDER_REGISTRY = {}
def add_authentication_provider(ap_instance):
if ap_instance.code in settings.AUTHENTICATION_PROVIDER_REGISTRY:
if settings.AUTHENTICATION_PROVIDER_REGISTRY[ap_instance.code] != ap_instance:
assert False, "Authentication provider code collision!"
settings.AUTHENTICATION_PROVIDER_REGISTRY[ap_instance.code] = ap_instance
def get_authentication_provider(key):
return settings.AUTHENTICATION_PROVIDER_REGISTRY.get(key)
def get_authentication_provider_choices():
choices = []
for provider in settings.AUTHENTICATION_PROVIDER_REGISTRY.values():
choices.append((provider.code, provider.verbose_name))
return choices
class AuthenticationProvider(object):
"""The base class that other authentication providers should implement.
In a nutshell, an AuthenticationProvider is a simple class that has:
* A code attribute. This should be a unique string less than
24 characters long that will be stored as an attribute of Teams.
* A verbose_name attribute, for admin labels.
* A url() method, which takes a "next" URL, and returns the URL we should
send the user to where they can log in with the provider.
* An image_url() method, which returns the URL for an image we should
display to the user when they're deciding whether or not to continue and
log in.
"""
code = None
verbose_name = None
def url(self, member, next=None):
"""Return the URL someone should be sent to where they will log in."""
assert False, "Not Implemented"
def image_url(self):
"""Return the URL of an image to display (probably a logo) or None."""
assert False, "Not Implemented"
class SampleAuthProvider(AuthenticationProvider):
code = 'sample'
verbose_name = 'Sample Provider'
def url(self, next=None):
return 'http://example.com/'
def image_url(self):
return 'http://placekitten.com/200/200/'
# add_authentication_provider(SampleAuthProvider)
|
# Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
authentication_provider_registry = {}
def add_authentication_provider(ap_class):
if ap_class.code in authentication_provider_registry:
if authentication_provider_registry[ap_class.code] != ap_class:
assert False, "Authentication provider code collision!"
authentication_provider_registry[ap_class.code] = ap_class
def get_authentication_provider(key):
return authentication_provider_registry.get(key)
def get_authentication_provider_choices():
choices = []
for provider in authentication_provider_registry.values():
choices.append((provider.code, provider.verbose_name))
return choices
class AuthenticationProvider(object):
"""The base class that other authentication providers should implement.
In a nutshell, an AuthenticationProvider is a simple class that has:
* A code attribute. This should be a unique string less than
24 characters long that will be stored as an attribute of Teams.
* A verbose_name attribute, for admin labels.
* A url() method, which takes a TeamMember object and a "next" URL, and
returns the URL we should send the user to where they can log in with the
provider.
* An image_url() method, which returns the URL for an image we should
display to the user when they're deciding whether or not to continue and
log in.
"""
code = None
verbose_name = None
def url(self, member, next=None):
"""Return the URL someone should be sent to where they will log in."""
assert False, "Not Implemented"
def image_url(self):
"""Return the URL of an image to display (probably a logo) or None."""
assert False, "Not Implemented"
class SampleAuthProvider(AuthenticationProvider):
code = 'sample'
verbose_name = 'Sample Provider'
def url(self, member, next=None):
return 'http://example.com/'
def image_url(self):
return 'http://placekitten.com/200/200/'
# add_authentication_provider(SampleAuthProvider)
|
agpl-3.0
|
Python
|
8f429a41f3541c5f32a9809a529dd800f7dafa0a
|
Fix log output for Docker daemonised
|
ps-jay/temp2dash
|
temp2dash.py
|
temp2dash.py
|
import json
import os
import requests
import sys
import time
import traceback
from temperusb import TemperHandler
URL = os.environ['DASHING_URL']
SCALE = float(os.environ['TEMP_SCALE'])
OFFSET = float(os.environ['TEMP_OFFSET'])
SENSOR = int(os.environ['TEMP_SENSOR'])
SLEEP = int(os.environ['SLEEP_TIME'])
th = TemperHandler()
devs = th.get_devices()
if len(devs) != 1:
print "Expected exactly one TEMPer device, found %d" % len(devs)
sys.exit(1)
dev = devs[0]
dev.set_calibration_data(scale=SCALE, offset=OFFSET)
chars = 0
while True:
try:
temperature = dev.get_temperature(sensor=SENSOR)
except Exception, err:
print "\nException on getting temperature\n"
print traceback.format_exc()
payload = {
'auth_token': 'abcdefghijklmnopqrstuvwxyz',
'temperature': '%0.0f%s' % (
temperature,
u'\N{DEGREE SIGN}',
),
}
string = ""
if chars > 72:
chars = 0
string = "\n"
string += '%0.1f, ' % temperature
chars += len(string)
sys.stdout.write(string)
sys.stdout.flush()
try:
post = requests.post(URL, data=json.dumps(payload))
except Exception, err:
print "\nException on posting temperature to dashing\n"
print traceback.format_exc()
if post.status_code != 204:
print "\nHTTP status from POST was %s (expected 204)\n" % post.status_code
time.sleep(SLEEP)
|
import json
import os
import requests
import sys
import time
import traceback
from temperusb import TemperHandler
URL = os.environ['DASHING_URL']
SCALE = float(os.environ['TEMP_SCALE'])
OFFSET = float(os.environ['TEMP_OFFSET'])
SENSOR = int(os.environ['TEMP_SENSOR'])
SLEEP = int(os.environ['SLEEP_TIME'])
th = TemperHandler()
devs = th.get_devices()
if len(devs) != 1:
print "Expected exactly one TEMPer device, found %d" % len(devs)
sys.exit(1)
dev = devs[0]
dev.set_calibration_data(scale=SCALE, offset=OFFSET)
while True:
try:
temperature = dev.get_temperature(sensor=SENSOR)
except Exception, err:
print "\nException on getting temperature\n"
print traceback.format_exc()
payload = {
'auth_token': 'abcdefghijklmnopqrstuvwxyz',
'temperature': '%0.0f%s' % (
temperature,
u'\N{DEGREE SIGN}',
),
}
sys.stdout.write(u'%0.1f%s, ' % (
temperature,
u'\N{DEGREE SIGN}',
))
sys.stdout.flush()
try:
post = requests.post(URL, data=json.dumps(payload))
except Exception, err:
print "\nException on posting temperature to dashing\n"
print traceback.format_exc()
if post.status_code != 204:
print "\nHTTP status from POST was %s (expected 204)\n" % post.status_code
time.sleep(SLEEP)
|
mit
|
Python
|
6e9f329f5a770955370e93c926c25d511ba8b981
|
Update the_ends_test/FunctionsUnitTest.py
|
Kevincavender/the-ends
|
the_ends_test/FunctionsUnitTest.py
|
the_ends_test/FunctionsUnitTest.py
|
import unittest
from the_ends.functions import function_finder
import sys
sys.path.insert(0, '/the_ends')
class TheEndsTestCases(unittest.TestCase):
def setUp(self):
pass
# before test cases
def tearDown(self):
pass
# after test cases
def test_isupper(self):
# example test
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_split(self):
# example test
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
def test_empty_test(self):
# checks for empty import pass
equ = ' '
fun_test = function_finder(equ)
self.assertFalse(fun_test)
def test_multiple_single_line(self):
# checks for multiple functions called in a single line
ans = [
['function1', 'function2'],
[{1: '( x', 2: 'x', 3: ' 5/4', 4: ' 12^(2-1))', 5: ' 8'}, {1: 'y', 2: ' 7', 3: ' 11*(y-3)'}]
]
equ = 'x=2*5+function1(( x=x, 5/4, 12^(2-1)), 8) * function2(y, 7, 11*(y-3))'
fun_test = function_finder(equ)
self.assertEqual(fun_test, ans)
if __name__ == '__main__':
unittest.main()
|
import unittest
from the_ends.functions import function_finder
class TheEndsTestCases(unittest.TestCase):
def setUp(self):
pass
# before test cases
def tearDown(self):
pass
# after test cases
def test_isupper(self):
# example test
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_split(self):
# example test
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
def test_empty_test(self):
# checks for empty import pass
equ = ' '
fun_test = function_finder(equ)
self.assertFalse(fun_test)
def test_multiple_single_line(self):
# checks for multiple functions called in a single line
ans = [
['function1', 'function2'],
[{1: '( x', 2: 'x', 3: ' 5/4', 4: ' 12^(2-1))', 5: ' 8'}, {1: 'y', 2: ' 7', 3: ' 11*(y-3)'}]
]
equ = 'x=2*5+function1(( x=x, 5/4, 12^(2-1)), 8) * function2(y, 7, 11*(y-3))'
fun_test = function_finder(equ)
self.assertEqual(fun_test, ans)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
Python
|
76829380376c31ea3f1e899770d1edffd1afc047
|
Change gravatar url to use https
|
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
|
apps/profiles/utils.py
|
apps/profiles/utils.py
|
import hashlib
def get_gravatar_url(email):
email_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest()
return "https://www.gravatar.com/avatar/{}".format(email_hash)
|
import hashlib
def get_gravatar_url(email):
email_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest()
return "http://www.gravatar.com/avatar/{}".format(email_hash)
|
mit
|
Python
|
c7f50eb666423ce3cc08d5e0714f4d18d672d326
|
clean up test
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
corehq/apps/hqadmin/tests/test_utils.py
|
corehq/apps/hqadmin/tests/test_utils.py
|
from django.test import TestCase, override_settings
from pillowtop.listener import BasicPillow
from corehq.apps.domain.models import Domain
from ..utils import pillow_seq_store, EPSILON
from ..models import PillowCheckpointSeqStore
class DummyPillow(BasicPillow):
document_class = Domain
def run(self):
pass
@override_settings(PILLOWTOPS={'test': ['corehq.apps.hqadmin.tests.test_utils.DummyPillow']})
class TestPillowCheckpointSeqStore(TestCase):
def setUp(self):
self.pillow = DummyPillow()
def test_basic_cloudant_seq(self):
seq = '1-blahblah'
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.checkpoint_manager.checkpoint_id)
self.assertEquals(store.seq, seq)
def test_basic_couchdb_seq(self):
seq = 100
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.checkpoint_manager.checkpoint_id)
self.assertEquals(store.seq, str(seq))
def test_small_rewind(self):
"""
We should not notify if the seq is not significantly less than the previous
"""
seq = '10-blahblah'
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
seq_rewind = '9-blahblah'
self.pillow.set_checkpoint({'seq': seq_rewind})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.checkpoint_manager.checkpoint_id)
self.assertEquals(store.seq, seq_rewind)
def test_large_rewind(self):
"""
We should notify if the seq is significantly less than the previous and not update the seq
"""
seq = '{}-blahblah'.format(EPSILON + 10)
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
seq_rewind = '9-blahblah'
self.pillow.set_checkpoint({'seq': seq_rewind})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.checkpoint_manager.checkpoint_id)
self.assertEquals(store.seq, seq)
|
from django.test import TestCase
from pillowtop.listener import BasicPillow
from corehq.apps.domain.models import Domain
from ..utils import pillow_seq_store, EPSILON
from ..models import PillowCheckpointSeqStore
def import_settings():
class MockSettings(object):
PILLOWTOPS = {'test': ['corehq.apps.hqadmin.tests.test_utils.DummyPillow']}
return MockSettings()
class DummyPillow(BasicPillow):
document_class = Domain
def run(self):
pass
class TestPillowCheckpointSeqStore(TestCase):
def setUp(self):
import pillowtop.run_pillowtop
pillowtop.utils.import_settings = import_settings
self.pillow = DummyPillow()
def test_basic_cloudant_seq(self):
seq = '1-blahblah'
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.get_checkpoint()['_id'])
self.assertEquals(store.seq, seq)
def test_basic_couchdb_seq(self):
seq = 100
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.get_checkpoint()['_id'])
self.assertEquals(store.seq, str(seq))
def test_small_rewind(self):
"""
We should not notify if the seq is not significantly less than the previous
"""
seq = '10-blahblah'
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
seq_rewind = '9-blahblah'
self.pillow.set_checkpoint({'seq': seq_rewind})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.get_checkpoint()['_id'])
self.assertEquals(store.seq, seq_rewind)
def test_large_rewind(self):
"""
We should notify if the seq is significantly less than the previous and not update the seq
"""
seq = '{}-blahblah'.format(EPSILON + 10)
self.pillow.set_checkpoint({'seq': seq})
pillow_seq_store()
seq_rewind = '9-blahblah'
self.pillow.set_checkpoint({'seq': seq_rewind})
pillow_seq_store()
store = PillowCheckpointSeqStore.objects.get(checkpoint_id=self.pillow.get_checkpoint()['_id'])
self.assertEquals(store.seq, seq)
|
bsd-3-clause
|
Python
|
11ef828a8180ba17f522e03ac198440feab40aa0
|
Update version
|
jblakeman/apt-select,jblakeman/apt-select
|
apt_select/__init__.py
|
apt_select/__init__.py
|
__version__ = '1.0.2'
|
__version__ = '1.0.1'
|
mit
|
Python
|
a3a408b9345291ca9a1999a779879afe0296f0a3
|
Update grayscale.py
|
userdw/RaspberryPi_3_Starter_Kit
|
08_Image_Processing/Color_Spaces/grayscale/grayscale.py
|
08_Image_Processing/Color_Spaces/grayscale/grayscale.py
|
import os, cv2
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
_projectDirectory = os.path.dirname(__file__)
_imagesDirectory = os.path.join(_projectDirectory, "images")
_images = []
for _root, _dirs, _files in os.walk(_imagesDirectory):
for _file in _files:
if _file.endswith(".jpg"):
_images.append(os.path.join(_imagesDirectory, _file))
_imageIndex = 0
_imageTotal = len(_images)
_img1 = cv2.imread(_images[_imageIndex], cv2.IMREAD_UNCHANGED)
_fig = plt.figure("Color Spaces")
_gs = GridSpec(2, 2)
_fig1 = plt.subplot(_gs[0:2, 0])
_fig1.set_title("RGB Space")
_img1Show = cv2.cvtColor(_img1, cv2.COLOR_BGR2RGB) #for displaying purpose
plt.imshow(_img1Show)
_img2, _img3, _img4 = cv2.split(_img1)
_fig2 = plt.subplot(_gs[0:2, 1])
_fig2.set_title("Grayscale Space")
_img2 = cv2.cvtColor(_img1, cv2.COLOR_BGR2GRAY)
plt.imshow(_img2, cmap = "gray")
plt.tight_layout()
plt.show()
|
import os, cv2
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
_projectDirectory = os.path.dirname(__file__)
_imagesDirectory = os.path.join(_projectDirectory, "images")
_images = []
for _root, _dirs, _files in os.walk(_imagesDirectory):
for _file in _files:
if _file.endswith(".jpg"):
_images.append(os.path.join(_imagesDirectory, _file))
_imageIndex = 0
_imageTotal = len(_images)
_img1 = cv2.imread(_images[_imageIndex], cv2.IMREAD_UNCHANGED)
_fig = plt.figure("Grayscale Space")
_gs = GridSpec(2, 2)
_fig1 = plt.subplot(_gs[0:2, 0])
_fig1.set_title("RGB Space")
_img1Show = cv2.cvtColor(_img1, cv2.COLOR_BGR2RGB) #for displaying purpose
plt.imshow(_img1Show)
_img2, _img3, _img4 = cv2.split(_img1)
_fig2 = plt.subplot(_gs[0:2, 1])
_fig2.set_title("Grayscale Space")
_img2 = cv2.cvtColor(_img1, cv2.COLOR_BGR2GRAY)
plt.imshow(_img2, cmap = "gray")
plt.tight_layout()
plt.show()
|
mit
|
Python
|
37167a9473a99931efbc60a8e46400ed017c8fa4
|
set up initial condition arrays
|
KayaBaber/Computational-Physics
|
Assignment_5_partial_differentials/P440_Assign5_Exp2.py
|
Assignment_5_partial_differentials/P440_Assign5_Exp2.py
|
'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 2 - Parabolic PDEs: The Wave Equation
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
import cmath
L = 2.*math.pi #set the x range to (0->2pi)
N = 1000 #number of spatial intervals and points (since it loops)
steps = 1000 #number of timesteps
stepSize = 0.1 #temporal step size
#make initial velocity array in real space
velPhysGauss = np.exp(-10 * ( (np.linspace(0,L,N+1)[:-1]-math.pi) ** 2)) + [0J]*N
#make initial density array in real space
denPhysFlat = [0 + 0J]*N
print denPhysFlat
print denPhysFlat[2]+2.
#fft both to fourier space
#make a column vector of density_f appended to velocity_f
#make derivative operator matrix
#i(diag (0,1,2,3,4,.. -4, -3, -2, -1)), where i is imaginary i
#make quad matrix [[I][op],[op][I]] and negative quad [[I][-op],[-op][I]]
#step forward
#matrix multiply the negative quad by the FFT column vector
#linear algebra solve the pos_quad*newFFTvector = above-result for newFFTvector
#make seperate copies of the velocity_f and density_f components of newFFTvector
#inverse FFT the components and append the real parts of velocity and density to a log
#repeat stepping for num steps
#plot the velocity and density logs in 3D
#maybe make an animation
|
'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 5 - PDEs
Exploration 2 - Parabolic PDEs: The Wave Equation
'''
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import math
#make initial velocity array in real space
#make initial density array in real space
#fft both to fourier space
#make a column vector of density_f appended to velocity_f
#set the x range to (0->2pi)
#make derivative operator matrix
#i(diag (0,1,2,3,4,.. -4, -3, -2, -1)), where i is imaginary i
#make quad matrix [[I][op],[op][I]] and negative quad [[I][-op],[-op][I]]
#matrix multiply the negative quad by the FFT column vector
#linear algebra solve the pos_quad*newFFTvector = above-result for newFFTvector
#make seperate copies of the
#inverse FFT newFFTvector and append the real parts of velocity and density
def make_banded(N,M):
bandTopBot = [-1.]*(N-1)
bandMid = [2. + (4.*M)/(N**2) ]*N
banded = np.diag(bandMid)
banded = np.add(banded,np.diag(bandTopBot,1))
banded = np.add(banded,np.diag(bandTopBot,-1))
return banded
def make_operator(N,M):
bandedCrank = make_banded(N,M)
negativeCrank = bandedCrank * (-1)
bandedCrank[0] = [1] + [0]*(N-1)
bandedCrank[-1] = [0]*(N-1) + [1]
invertedCrank = LA.inv(bandedCrank)
operatorCrank = invertedCrank.dot(negativeCrank)
return operatorCrank
|
mit
|
Python
|
60b2c0db865fcf09636359888ead82ffc7666ae3
|
Add test for failed login when user is not active
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core
|
yunity/userauth/tests/test_api.py
|
yunity/userauth/tests/test_api.py
|
from django.contrib import auth
from rest_framework import status
from rest_framework.test import APITestCase
from yunity.users.factories import UserFactory
class TestUserAuthAPI(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = UserFactory()
cls.disabled_user = UserFactory(is_active=False)
cls.url = '/api/auth/'
def test_login(self):
data = {'email': self.user.email, 'password': self.user.display_name}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['email'], self.user.email)
user = auth.get_user(self.client)
self.assertTrue(user.is_authenticated())
def test_no_credentials(self):
data = {}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'email': ['This field is required.'],
'password': ['This field is required.']})
def test_wrong_credentials(self):
data = {'email': self.user.email, 'password': 'wrong_password'}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data['non_field_errors'], ['Unable to login with provided credentials.', ])
def test_login_as_disabled_user_fails(self):
data = {'email': self.disabled_user.email, 'password': self.disabled_user.display_name}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
user = auth.get_user(self.client)
self.assertFalse(user.is_authenticated())
def test_status_not_logged_in(self):
response = self.client.get('/api/auth/status/')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data['error'], 'not_authed')
def test_status_as_user(self):
self.client.force_login(user=self.user)
response = self.client.get('/api/auth/status/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['display_name'], self.user.display_name)
|
from django.contrib import auth
from rest_framework import status
from rest_framework.test import APITestCase
from yunity.users.factories import UserFactory
class TestUserAuthAPI(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = UserFactory()
cls.url = '/api/auth/'
def test_login(self):
data = {'email': self.user.email, 'password': self.user.display_name}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['email'], self.user.email)
user = auth.get_user(self.client)
self.assertTrue(user.is_authenticated())
def test_no_credentials(self):
data = {}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'email': ['This field is required.'],
'password': ['This field is required.']})
def test_wrong_credentials(self):
data = {'email': self.user.email, 'password': 'wrong_password'}
response = self.client.post(self.url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data['non_field_errors'], ['Unable to login with provided credentials.', ])
def test_status_not_logged_in(self):
response = self.client.get('/api/auth/status/')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response.data['error'], 'not_authed')
def test_status_as_user(self):
self.client.force_login(user=self.user)
response = self.client.get('/api/auth/status/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['display_name'], self.user.display_name)
|
agpl-3.0
|
Python
|
67773a4b848d14bf6e6b160eb918e036971b7f0e
|
Use Python 3 type syntax in zerver/webhooks/semaphore/view.py.
|
timabbott/zulip,jackrzhang/zulip,rishig/zulip,rishig/zulip,hackerkid/zulip,andersk/zulip,synicalsyntax/zulip,rht/zulip,mahim97/zulip,eeshangarg/zulip,jackrzhang/zulip,showell/zulip,tommyip/zulip,mahim97/zulip,synicalsyntax/zulip,kou/zulip,showell/zulip,dhcrzf/zulip,rishig/zulip,rishig/zulip,punchagan/zulip,kou/zulip,punchagan/zulip,jackrzhang/zulip,punchagan/zulip,jackrzhang/zulip,synicalsyntax/zulip,showell/zulip,shubhamdhama/zulip,zulip/zulip,showell/zulip,kou/zulip,tommyip/zulip,jackrzhang/zulip,zulip/zulip,brainwane/zulip,showell/zulip,andersk/zulip,rht/zulip,shubhamdhama/zulip,brainwane/zulip,eeshangarg/zulip,synicalsyntax/zulip,shubhamdhama/zulip,mahim97/zulip,timabbott/zulip,rishig/zulip,tommyip/zulip,brainwane/zulip,zulip/zulip,mahim97/zulip,kou/zulip,rishig/zulip,dhcrzf/zulip,hackerkid/zulip,rht/zulip,brainwane/zulip,timabbott/zulip,brainwane/zulip,synicalsyntax/zulip,hackerkid/zulip,shubhamdhama/zulip,andersk/zulip,tommyip/zulip,dhcrzf/zulip,timabbott/zulip,rht/zulip,tommyip/zulip,jackrzhang/zulip,mahim97/zulip,tommyip/zulip,zulip/zulip,mahim97/zulip,zulip/zulip,hackerkid/zulip,kou/zulip,kou/zulip,dhcrzf/zulip,eeshangarg/zulip,synicalsyntax/zulip,eeshangarg/zulip,shubhamdhama/zulip,punchagan/zulip,timabbott/zulip,andersk/zulip,shubhamdhama/zulip,zulip/zulip,showell/zulip,andersk/zulip,eeshangarg/zulip,shubhamdhama/zulip,rht/zulip,tommyip/zulip,hackerkid/zulip,jackrzhang/zulip,timabbott/zulip,dhcrzf/zulip,brainwane/zulip,punchagan/zulip,kou/zulip,showell/zulip,eeshangarg/zulip,rishig/zulip,synicalsyntax/zulip,rht/zulip,timabbott/zulip,punchagan/zulip,hackerkid/zulip,andersk/zulip,hackerkid/zulip,brainwane/zulip,eeshangarg/zulip,dhcrzf/zulip,zulip/zulip,dhcrzf/zulip,punchagan/zulip,andersk/zulip,rht/zulip
|
zerver/webhooks/semaphore/view.py
|
zerver/webhooks/semaphore/view.py
|
# Webhooks for external integrations.
from typing import Any, Dict
import ujson
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_stream_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.models import UserProfile, get_client
@api_key_only_webhook_view('Semaphore')
@has_request_variables
def api_semaphore_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
stream: str=REQ(default='builds')) -> HttpResponse:
# semaphore only gives the last commit, even if there were multiple commits
# since the last build
branch_name = payload["branch_name"]
project_name = payload["project_name"]
result = payload["result"]
event = payload["event"]
commit_id = payload["commit"]["id"]
commit_url = payload["commit"]["url"]
author_email = payload["commit"]["author_email"]
message = payload["commit"]["message"]
if event == "build":
build_url = payload["build_url"]
build_number = payload["build_number"]
content = u"[build %s](%s): %s\n" % (build_number, build_url, result)
elif event == "deploy":
build_url = payload["build_html_url"]
build_number = payload["build_number"]
deploy_url = payload["html_url"]
deploy_number = payload["number"]
server_name = payload["server_name"]
content = u"[deploy %s](%s) of [build %s](%s) on server %s: %s\n" % \
(deploy_number, deploy_url, build_number, build_url, server_name, result)
else: # should never get here
content = u"%s: %s\n" % (event, result)
content += "!avatar(%s) [`%s`](%s): %s" % (author_email, commit_id[:7],
commit_url, message)
subject = u"%s/%s" % (project_name, branch_name)
check_send_stream_message(user_profile, request.client, stream, subject, content)
return json_success()
|
# Webhooks for external integrations.
from typing import Any, Dict
import ujson
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_stream_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.models import UserProfile, get_client
@api_key_only_webhook_view('Semaphore')
@has_request_variables
def api_semaphore_webhook(request, user_profile,
payload=REQ(argument_type='body'),
stream=REQ(default='builds')):
# type: (HttpRequest, UserProfile, Dict[str, Any], str) -> HttpResponse
# semaphore only gives the last commit, even if there were multiple commits
# since the last build
branch_name = payload["branch_name"]
project_name = payload["project_name"]
result = payload["result"]
event = payload["event"]
commit_id = payload["commit"]["id"]
commit_url = payload["commit"]["url"]
author_email = payload["commit"]["author_email"]
message = payload["commit"]["message"]
if event == "build":
build_url = payload["build_url"]
build_number = payload["build_number"]
content = u"[build %s](%s): %s\n" % (build_number, build_url, result)
elif event == "deploy":
build_url = payload["build_html_url"]
build_number = payload["build_number"]
deploy_url = payload["html_url"]
deploy_number = payload["number"]
server_name = payload["server_name"]
content = u"[deploy %s](%s) of [build %s](%s) on server %s: %s\n" % \
(deploy_number, deploy_url, build_number, build_url, server_name, result)
else: # should never get here
content = u"%s: %s\n" % (event, result)
content += "!avatar(%s) [`%s`](%s): %s" % (author_email, commit_id[:7],
commit_url, message)
subject = u"%s/%s" % (project_name, branch_name)
check_send_stream_message(user_profile, request.client, stream, subject, content)
return json_success()
|
apache-2.0
|
Python
|
8d8f470ad0788b1e6e91155f07b351de04051824
|
add test for search by name and pagination
|
andela-brotich/CP2-bucket-list-api,brotich/CP2-bucket-list-api
|
app/mod_bucketlists/tests/test_bucketlist.py
|
app/mod_bucketlists/tests/test_bucketlist.py
|
from app.test_config import BaseTestCase
class BucketListTestCase(BaseTestCase):
def test_creates_new_bucketlist_with_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.token, follow_redirects=True)
self.assertEqual(201, response.status_code)
response = response.data.decode('utf-8')
self.assertIn(data['bucket_name'], response)
self.assertIn('date_created', response)
def test_gets_bucketlist_names_for_the_user(self):
response = self.client.get('/bucketlists/', headers=self.token, follow_redirects=True)
response = response.data.decode('utf-8')
self.assertIn('Checkpoint', response)
self.assertIn('created_by', response)
self.assertIn('date_created', response)
def test_search_bucketlist_by_name(self):
response = self.client.get('/bucketlists/?q=Check', headers=self.token, follow_redirects=True)
response = response.data.decode('utf-8')
self.assertIn('Checkpoint', response)
self.assertIn('created_by', response)
self.assertIn('date_created', response)
self.assertIn('next', response)
self.assertIn('prev', response)
def test_error_on_bucketlist_creation_with_invalid_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.invalid_token, follow_redirects=True)
self.assertEqual(403, response.status_code)
response = response.data.decode('utf-8')
self.assertIn('error', response)
self.assertIn('invalid token', response)
def test_error_on_bucketlist_creation_with_expired_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.expired_token, follow_redirects=True)
self.assertEqual(403, response.status_code)
response = response.data.decode('utf-8')
self.assertIn('error', response)
self.assertIn('expired token', response)
|
from app.test_config import BaseTestCase
class BucketListTestCase(BaseTestCase):
def test_creates_new_bucketlist_with_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.token, follow_redirects=True)
self.assertEqual(201, response.status_code)
response = response.data.decode('utf-8')
self.assertIn(data['bucket_name'], response)
self.assertIn('date_created', response)
def test_gets_bucketlist_names_for_the_user(self):
response = self.client.get('/bucketlists/', headers=self.token, follow_redirects=True)
response = response.data.decode('utf-8')
self.assertIn('Checkpoint', response)
self.assertIn('created_by', response)
self.assertIn('date_created', response)
def test_error_on_bucketlist_creation_with_invalid_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.invalid_token, follow_redirects=True)
self.assertEqual(403, response.status_code)
response = response.data.decode('utf-8')
self.assertIn('error', response)
self.assertIn('invalid token', response)
def test_error_on_bucketlist_creation_with_expired_token(self):
data = {
'bucket_name': 'Christmas'
}
response = self.client.post('/bucketlists/', data=data, headers=self.expired_token, follow_redirects=True)
self.assertEqual(403, response.status_code)
response = response.data.decode('utf-8')
self.assertIn('error', response)
self.assertIn('expired token', response)
|
mit
|
Python
|
7855d8a4a4c3151f0b3f4da04696322cca92ee06
|
fix tests
|
ministryofjustice/cla_frontend,ministryofjustice/cla_frontend,ministryofjustice/cla_frontend,ministryofjustice/cla_frontend
|
cla_frontend/apps/cla_auth/tests/urls.py
|
cla_frontend/apps/cla_auth/tests/urls.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import http
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from . import base
from django.core.urlresolvers import reverse_lazy
@login_required
def test_view(request):
return http.HttpResponse('logged in')
zone_url = patterns('',
url(r'^login/$', 'cla_auth.views.login', {
'zone_name': base.DEFAULT_ZONE_NAME
}, name='login'),
url(r'^test/$',
test_view,
name=base.DEFAULT_ZONE_PROFILE['LOGIN_REDIRECT_URL'].split(':')[1]
),
)
global_urls = patterns(
'',
url(r'^logout/$', 'django.contrib.auth.views.logout',
{'next_page': reverse_lazy('login')},
name='global_logout'),
)
urlpatterns = patterns('',
url(r'^test_zone/', include(zone_url, namespace=base.DEFAULT_ZONE_NAME)),
url(r'^auth/', include(global_urls, namespace='auth')),
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import http
from django.conf.urls import patterns, include, url
from django.contrib.auth.decorators import login_required
from . import base
@login_required
def test_view(request):
return http.HttpResponse('logged in')
zone_url = patterns('',
url(r'^login/$', 'cla_auth.views.login', {
'zone_name': base.DEFAULT_ZONE_NAME
}, name='login'),
url(r'^test/$',
test_view,
name=base.DEFAULT_ZONE_PROFILE['LOGIN_REDIRECT_URL'].split(':')[1]
)
)
urlpatterns = patterns('',
url(r'^test_zone/', include(zone_url, namespace=base.DEFAULT_ZONE_NAME)),
)
|
mit
|
Python
|
4bc0b7981f6eaa1744c90d0c080b9678af52d624
|
fix bug in picture specs
|
ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople
|
apps/project_sheet/project_pictures_specs.py
|
apps/project_sheet/project_pictures_specs.py
|
"""
Specification for image manipulation throw imagekit
"""
from imagekit.specs import ImageSpec
from imagekit import processors
from imagekit.processors import ImageProcessor
from imagekit.lib import ImageColor, Image
class Center(ImageProcessor):
"""
Generic image centering processor
"""
width = None
height = None
background_color = '#000000'
@classmethod
def process(cls, img, fmt, obj):
if cls.width and cls.height:
background_color = ImageColor.getrgb(cls.background_color)
#FIXME : Image is not imported but it never raises exception so ...
bg_picture = Image.new("RGB", (cls.width, cls.height), background_color)
## paste it
bg_w, bg_h = bg_picture.size
img_w, img_h = img.size
coord_x, coord_y = (bg_w - img_w) / 2, (bg_h - img_h) / 2
bg_picture.paste(img, (coord_x, coord_y, coord_x + img_w, coord_y + img_h))
return bg_picture, fmt
class ResizeThumb(processors.Resize):
"""
Resizing processor providing media thumbnail
"""
width = 95
height = 65
crop = True
class ResizeIDCard(processors.Resize):
"""
Resizing processor providing profile ID card
"""
width = 137
height = 71
crop = True
class ResizeDisplay(processors.Resize):
"""
Resizing processor for media gallery
"""
width = 700
class PreResizeMosaic(processors.Resize):
"""
Resizing processor for mosaic
"""
width = 200
class CenterMosaic(processors.Resize):
#FIXME : semantic ? Center or Resize ?
width = 40
height = 40
crop = True
class CenterDisplay(Center):
"""
Image centering processor for media gallery
"""
width = 700
height = 460
class EnhanceThumb(processors.Adjustment):
"""
Adjustment processor to enhance the image at small sizes
"""
contrast = 1.2
sharpness = 1.1
class Thumbnail(ImageSpec):
access_as = 'thumbnail_image'
pre_cache = True
processors = [ResizeThumb, EnhanceThumb]
class Display(ImageSpec):
access_as = 'display'
increment_count = True
processors = [ResizeDisplay, CenterDisplay]
class MosaicTile(ImageSpec):
"""
For the Homepage
"""
access_as = 'mosaic_tile'
processors = [PreResizeMosaic, CenterMosaic]
class IDCard(ImageSpec):
"""
Preview when displaying a project sheet card
"""
access_as = 'thumbnail_idcard'
pre_cache = True
processors = [ResizeIDCard, EnhanceThumb]
|
"""
Specification for image manipulation throw imagekit
"""
from imagekit.specs import ImageSpec
from imagekit import processors
from imagekit.processors import ImageProcessor
from imagekit.lib import ImageColor
class Center(ImageProcessor):
"""
Generic image centering processor
"""
width = None
height = None
background_color = '#000000'
@classmethod
def process(cls, img, fmt, obj):
if cls.width and cls.height:
background_color = ImageColor.getrgb(cls.background_color)
#FIXME : Image is not imported but it never raises exception so ...
bg_picture = Image.new("RGB", (cls.width, cls.height), background_color)
## paste it
bg_w, bg_h = bg_picture.size
img_w, img_h = img.size
coord_x, coord_y = (bg_w - img_w) / 2, (bg_h - img_h) / 2
bg_picture.paste(img, (coord_x, coord_y, coord_x + img_w, coord_y + img_h))
return bg_picture, fmt
class ResizeThumb(processors.Resize):
"""
Resizing processor providing media thumbnail
"""
width = 95
height = 65
crop = True
class ResizeIDCard(processors.Resize):
"""
Resizing processor providing profile ID card
"""
width = 137
height = 71
crop = True
class ResizeDisplay(processors.Resize):
"""
Resizing processor for media gallery
"""
width = 700
class PreResizeMosaic(processors.Resize):
"""
Resizing processor for mosaic
"""
width = 200
class CenterMosaic(processors.Resize):
#FIXME : semantic ? Center or Resize ?
width = 40
height = 40
crop = True
class CenterDisplay(Center):
"""
Image centering processor for media gallery
"""
width = 700
height = 460
class EnhanceThumb(processors.Adjustment):
"""
Adjustment processor to enhance the image at small sizes
"""
contrast = 1.2
sharpness = 1.1
class Thumbnail(ImageSpec):
access_as = 'thumbnail_image'
pre_cache = True
processors = [ResizeThumb, EnhanceThumb]
class Display(ImageSpec):
access_as = 'display'
increment_count = True
processors = [ResizeDisplay, CenterDisplay]
class MosaicTile(ImageSpec):
"""
For the Homepage
"""
access_as = 'mosaic_tile'
processors = [PreResizeMosaic, CenterMosaic]
class IDCard(ImageSpec):
"""
Preview when displaying a project sheet card
"""
access_as = 'thumbnail_idcard'
pre_cache = True
processors = [ResizeIDCard, EnhanceThumb]
|
agpl-3.0
|
Python
|
354eea19773b652e705f68648c68c235bfa27dd7
|
Fix weird naming
|
nanonyme/nanoplay
|
twisted/plugins/nanoplay_plugin.py
|
twisted/plugins/nanoplay_plugin.py
|
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.internet import reactor
from twisted.application import service, strports
from nanoplay import PayloadProtocol, ControlProtocol, CustomServer, Player
class Options(usage.Options):
optParameters = [
["payload", "p",
"tcp:port=5000", "Endpoint to listen for files on"],
["control", "c",
"tcp:port=5001", "Endpoint to listen for control commands on"]
]
class NanoplayMaker(object):
implements(service.IServiceMaker, IPlugin)
tapname = "nanoplay"
description = "nanoplay, trivial music player"
options = Options
def makeService(self, options):
"""
Construct a TCPServer from a factory defined in myproject.
"""
player = Player(reactor)
reactor.addSystemEventTrigger("before", "shutdown", player.kill)
s = service.MultiService()
payload_service = strports.service(options["payload"],
CustomServer(PayloadProtocol, player))
payload_service.setServiceParent(s)
control_service = strports.service(options["control"],
CustomServer(ControlProtocol, player))
control_service.setServiceParent(s)
return s
serviceMaker = NanoplayMaker()
|
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.internet import reactor
from twisted.application import service, strports
from nanoplay import PayloadProtocol, ControlProtocol, CustomServer, Player
class Options(usage.Options):
optParameters = [
["payload", "p",
"tcp:port=5000", "Endpoint to listen for files on"],
["control", "c",
"tcp:port=5001", "Endpoint to listen for control commands on"]
]
class NanoplayMaker(object):
implements(service.IServiceMaker, IPlugin)
tapname = "nanoplay"
description = "nanoplay, trivial music player"
options = Options
def makeService(self, options):
"""
Construct a TCPServer from a factory defined in myproject.
"""
player = Player(reactor)
reactor.addSystemEventTrigger("before", "shutdown", player.kill)
s = service.MultiService()
payload_service = strports.service(options["payload"],
CustomServer(PayloadProtocol, player))
payload_service.setServiceParent(s)
payload_service = strports.service(options["control"],
CustomServer(ControlProtocol, player))
payload_service.setServiceParent(s)
return s
serviceMaker = NanoplayMaker()
|
mit
|
Python
|
5eb11aa2a41e2d2448cf81d3ef4416a7aaf3a537
|
change db location to match reinit.sh script
|
ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide
|
calebasse/settings/local_settings_example.py
|
calebasse/settings/local_settings_example.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'calebasse/calebasse.sqlite3',
}
}
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'calebasse.sqlite3',
}
}
|
agpl-3.0
|
Python
|
26d104b5758d41954d0da4a3447cc22c089c1cf0
|
fix migrations
|
misli/cmsplugin-iframe2,misli/cmsplugin-iframe2
|
cmsplugin_iframe2/migrations/0001_initial.py
|
cmsplugin_iframe2/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2017-04-01 18:26
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from ..conf import settings
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='IFramePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='cmsplugin_iframe2_iframeplugin', serialize=False, to='cms.CMSPlugin')),
('style', models.CharField(blank=True, choices=settings.CMSPLUGIN_IFRAME_CLASSES, help_text='value of HTML attribute class', max_length=50, null=True, verbose_name='style')),
('width', models.CharField(blank=True, choices=settings.CMSPLUGIN_IFRAME_WIDTHS, max_length=10, null=True, verbose_name='width')),
('height', models.CharField(blank=True, choices=settings.CMSPLUGIN_IFRAME_HEIGHTS, max_length=10, null=True, verbose_name='height')),
('align', models.CharField(blank=True, choices=[('left', 'align left'), ('right', 'align right')], max_length=10, null=True, verbose_name='align')),
('src', models.TextField(verbose_name='url')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2017-04-01 18:26
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='IFramePlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='cmsplugin_iframe2_iframeplugin', serialize=False, to='cms.CMSPlugin')),
('style', models.CharField(blank=True, choices=[(None, 'no class')], help_text='value of HTML attribute class', max_length=50, null=True, verbose_name='style')),
('width', models.CharField(blank=True, choices=[('200', '200 pixels'), ('400', '400 pixels'), ('800', '800 pixels'), ('100%', '100 %')], max_length=10, null=True, verbose_name='width')),
('height', models.CharField(blank=True, choices=[('150', '150 pixels'), ('300', '300 pixels'), ('600', '600 pixels'), ('1200', '1200 pixels'), ('100%', '100 %')], max_length=10, null=True, verbose_name='height')),
('align', models.CharField(blank=True, choices=[('left', 'align left'), ('right', 'align right')], max_length=10, null=True, verbose_name='align')),
('src', models.TextField(verbose_name='url')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
bsd-3-clause
|
Python
|
7ef6132194ccd207c554521209ba3472bf523940
|
Make factories return unicode data
|
devs1991/test_edx_docmode,Shrhawk/edx-platform,LICEF/edx-platform,nanolearning/edx-platform,shubhdev/openedx,dsajkl/123,carsongee/edx-platform,nanolearningllc/edx-platform-cypress,beni55/edx-platform,mtlchun/edx,hkawasaki/kawasaki-aio8-0,MSOpenTech/edx-platform,andyzsf/edx,Lektorium-LLC/edx-platform,torchingloom/edx-platform,TeachAtTUM/edx-platform,polimediaupv/edx-platform,shabab12/edx-platform,rue89-tech/edx-platform,syjeon/new_edx,LICEF/edx-platform,nanolearningllc/edx-platform-cypress,wwj718/ANALYSE,devs1991/test_edx_docmode,polimediaupv/edx-platform,analyseuc3m/ANALYSE-v1,mjirayu/sit_academy,mcgachey/edx-platform,Edraak/circleci-edx-platform,etzhou/edx-platform,eemirtekin/edx-platform,bitifirefly/edx-platform,vasyarv/edx-platform,MakeHer/edx-platform,UXE/local-edx,gymnasium/edx-platform,jamiefolsom/edx-platform,nttks/edx-platform,pku9104038/edx-platform,WatanabeYasumasa/edx-platform,kmoocdev2/edx-platform,abdoosh00/edraak,kamalx/edx-platform,chand3040/cloud_that,kmoocdev/edx-platform,Stanford-Online/edx-platform,fly19890211/edx-platform,rhndg/openedx,msegado/edx-platform,jolyonb/edx-platform,atsolakid/edx-platform,miptliot/edx-platform,cyanna/edx-platform,vasyarv/edx-platform,ZLLab-Mooc/edx-platform,chauhanhardik/populo,nanolearningllc/edx-platform-cypress-2,lduarte1991/edx-platform,cecep-edu/edx-platform,ZLLab-Mooc/edx-platform,nagyistoce/edx-platform,ampax/edx-platform,unicri/edx-platform,IndonesiaX/edx-platform,jazkarta/edx-platform,cecep-edu/edx-platform,devs1991/test_edx_docmode,ampax/edx-platform,zubair-arbi/edx-platform,B-MOOC/edx-platform,IONISx/edx-platform,wwj718/edx-platform,SivilTaram/edx-platform,etzhou/edx-platform,ak2703/edx-platform,zubair-arbi/edx-platform,pabloborrego93/edx-platform,nttks/jenkins-test,peterm-itr/edx-platform,etzhou/edx-platform,don-github/edx-platform,JioEducation/edx-platform,jjmiranda/edx-platform,bdero/edx-platform,kmoocdev/edx-platform,Ayub-Khan/edx-platform,abdoosh00/edraak,analyseuc3m/ANALYSE-v1,benpatterson/edx-platform,xuxiao19910803/edx-platform,Edraak/circleci-edx-platform,nanolearningllc/edx-platform-cypress-2,doismellburning/edx-platform,mjirayu/sit_academy,deepsrijit1105/edx-platform,hamzehd/edx-platform,halvertoluke/edx-platform,deepsrijit1105/edx-platform,olexiim/edx-platform,zadgroup/edx-platform,pabloborrego93/edx-platform,jbassen/edx-platform,JCBarahona/edX,EDUlib/edx-platform,hamzehd/edx-platform,dcosentino/edx-platform,devs1991/test_edx_docmode,angelapper/edx-platform,motion2015/a3,ferabra/edx-platform,jonathan-beard/edx-platform,solashirai/edx-platform,appliedx/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,xuxiao19910803/edx,iivic/BoiseStateX,ak2703/edx-platform,Softmotions/edx-platform,nagyistoce/edx-platform,TeachAtTUM/edx-platform,nttks/edx-platform,RPI-OPENEDX/edx-platform,arifsetiawan/edx-platform,jazkarta/edx-platform-for-isc,cognitiveclass/edx-platform,praveen-pal/edx-platform,pomegranited/edx-platform,kamalx/edx-platform,10clouds/edx-platform,jazkarta/edx-platform-for-isc,DefyVentures/edx-platform,atsolakid/edx-platform,RPI-OPENEDX/edx-platform,doganov/edx-platform,shubhdev/edxOnBaadal,etzhou/edx-platform,miptliot/edx-platform,MSOpenTech/edx-platform,valtech-mooc/edx-platform,chand3040/cloud_that,wwj718/ANALYSE,romain-li/edx-platform,jonathan-beard/edx-platform,sameetb-cuelogic/edx-platform-test,caesar2164/edx-platform,Ayub-Khan/edx-platform,abdoosh00/edx-rtl-final,zerobatu/edx-platform,shubhdev/edx-platform,zerobatu/edx-platform,bigdatauniversity/edx-platform,SivilTaram/edx-platform,ampax/edx-platform-backup,wwj718/ANALYSE,xingyepei/edx-platform,olexiim/edx-platform,shabab12/edx-platform,jazkarta/edx-platform-for-isc,LearnEra/LearnEraPlaftform,mbareta/edx-platform-ft,stvstnfrd/edx-platform,jazztpt/edx-platform,procangroup/edx-platform,xuxiao19910803/edx,appsembler/edx-platform,synergeticsedx/deployment-wipro,CourseTalk/edx-platform,zhenzhai/edx-platform,y12uc231/edx-platform,cognitiveclass/edx-platform,syjeon/new_edx,synergeticsedx/deployment-wipro,mbareta/edx-platform-ft,nttks/jenkins-test,ahmedaljazzar/edx-platform,CourseTalk/edx-platform,sudheerchintala/LearnEraPlatForm,martynovp/edx-platform,mbareta/edx-platform-ft,appliedx/edx-platform,vikas1885/test1,EduPepperPDTesting/pepper2013-testing,doismellburning/edx-platform,edx-solutions/edx-platform,itsjeyd/edx-platform,jelugbo/tundex,IITBinterns13/edx-platform-dev,inares/edx-platform,cpennington/edx-platform,nttks/edx-platform,jazkarta/edx-platform,IndonesiaX/edx-platform,jswope00/GAI,kxliugang/edx-platform,dsajkl/123,hkawasaki/kawasaki-aio8-0,mtlchun/edx,kursitet/edx-platform,rationalAgent/edx-platform-custom,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-2,defance/edx-platform,Edraak/edx-platform,AkA84/edx-platform,arifsetiawan/edx-platform,kamalx/edx-platform,JCBarahona/edX,zadgroup/edx-platform,playm2mboy/edx-platform,benpatterson/edx-platform,motion2015/a3,xingyepei/edx-platform,gsehub/edx-platform,zhenzhai/edx-platform,sameetb-cuelogic/edx-platform-test,arbrandes/edx-platform,jswope00/GAI,a-parhom/edx-platform,yokose-ks/edx-platform,nttks/jenkins-test,jamiefolsom/edx-platform,ovnicraft/edx-platform,longmen21/edx-platform,vismartltd/edx-platform,xinjiguaike/edx-platform,10clouds/edx-platform,philanthropy-u/edx-platform,franosincic/edx-platform,louyihua/edx-platform,ubc/edx-platform,cpennington/edx-platform,yokose-ks/edx-platform,MSOpenTech/edx-platform,Kalyzee/edx-platform,chauhanhardik/populo,pdehaye/theming-edx-platform,appliedx/edx-platform,carsongee/edx-platform,pabloborrego93/edx-platform,Shrhawk/edx-platform,tiagochiavericosta/edx-platform,cognitiveclass/edx-platform,inares/edx-platform,Ayub-Khan/edx-platform,cyanna/edx-platform,morenopc/edx-platform,prarthitm/edxplatform,shubhdev/openedx,kursitet/edx-platform,bigdatauniversity/edx-platform,mahendra-r/edx-platform,devs1991/test_edx_docmode,appliedx/edx-platform,shubhdev/edx-platform,benpatterson/edx-platform,antonve/s4-project-mooc,Unow/edx-platform,yokose-ks/edx-platform,polimediaupv/edx-platform,appsembler/edx-platform,IndonesiaX/edx-platform,naresh21/synergetics-edx-platform,IONISx/edx-platform,kursitet/edx-platform,msegado/edx-platform,EduPepperPD/pepper2013,eduNEXT/edunext-platform,vikas1885/test1,sameetb-cuelogic/edx-platform-test,ubc/edx-platform,doismellburning/edx-platform,TsinghuaX/edx-platform,waheedahmed/edx-platform,xingyepei/edx-platform,jswope00/griffinx,nanolearningllc/edx-platform-cypress-2,ampax/edx-platform-backup,B-MOOC/edx-platform,dsajkl/reqiop,solashirai/edx-platform,ferabra/edx-platform,waheedahmed/edx-platform,deepsrijit1105/edx-platform,zadgroup/edx-platform,mitocw/edx-platform,marcore/edx-platform,OmarIthawi/edx-platform,openfun/edx-platform,Livit/Livit.Learn.EdX,Edraak/edx-platform,dkarakats/edx-platform,Softmotions/edx-platform,caesar2164/edx-platform,raccoongang/edx-platform,4eek/edx-platform,CredoReference/edx-platform,eduNEXT/edunext-platform,utecuy/edx-platform,Semi-global/edx-platform,nanolearningllc/edx-platform-cypress,valtech-mooc/edx-platform,kalebhartje/schoolboost,doganov/edx-platform,waheedahmed/edx-platform,CourseTalk/edx-platform,waheedahmed/edx-platform,Edraak/edraak-platform,procangroup/edx-platform,vismartltd/edx-platform,edx/edx-platform,tanmaykm/edx-platform,cyanna/edx-platform,devs1991/test_edx_docmode,y12uc231/edx-platform,ferabra/edx-platform,playm2mboy/edx-platform,Edraak/circleci-edx-platform,IndonesiaX/edx-platform,PepperPD/edx-pepper-platform,pelikanchik/edx-platform,don-github/edx-platform,y12uc231/edx-platform,miptliot/edx-platform,halvertoluke/edx-platform,mbareta/edx-platform-ft,rue89-tech/edx-platform,UXE/local-edx,AkA84/edx-platform,zofuthan/edx-platform,cselis86/edx-platform,fintech-circle/edx-platform,jswope00/griffinx,UXE/local-edx,ahmadio/edx-platform,SivilTaram/edx-platform,utecuy/edx-platform,hkawasaki/kawasaki-aio8-2,ampax/edx-platform,LearnEra/LearnEraPlaftform,4eek/edx-platform,auferack08/edx-platform,ampax/edx-platform-backup,motion2015/a3,leansoft/edx-platform,MSOpenTech/edx-platform,ampax/edx-platform,Shrhawk/edx-platform,Ayub-Khan/edx-platform,prarthitm/edxplatform,bitifirefly/edx-platform,kursitet/edx-platform,beacloudgenius/edx-platform,nanolearningllc/edx-platform-cypress,ESOedX/edx-platform,analyseuc3m/ANALYSE-v1,jswope00/GAI,J861449197/edx-platform,caesar2164/edx-platform,inares/edx-platform,shurihell/testasia,zadgroup/edx-platform,cselis86/edx-platform,amir-qayyum-khan/edx-platform,vikas1885/test1,unicri/edx-platform,beacloudgenius/edx-platform,leansoft/edx-platform,PepperPD/edx-pepper-platform,nikolas/edx-platform,ahmadiga/min_edx,itsjeyd/edx-platform,motion2015/a3,motion2015/a3,shubhdev/edx-platform,xuxiao19910803/edx-platform,nanolearningllc/edx-platform-cypress,martynovp/edx-platform,arbrandes/edx-platform,Lektorium-LLC/edx-platform,pku9104038/edx-platform,ahmadiga/min_edx,stvstnfrd/edx-platform,proversity-org/edx-platform,xinjiguaike/edx-platform,motion2015/edx-platform,BehavioralInsightsTeam/edx-platform,jazztpt/edx-platform,wwj718/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,fly19890211/edx-platform,UOMx/edx-platform,franosincic/edx-platform,morpheby/levelup-by,vikas1885/test1,jswope00/griffinx,chudaol/edx-platform,fly19890211/edx-platform,jonathan-beard/edx-platform,defance/edx-platform,hastexo/edx-platform,solashirai/edx-platform,atsolakid/edx-platform,mushtaqak/edx-platform,amir-qayyum-khan/edx-platform,jamiefolsom/edx-platform,beacloudgenius/edx-platform,bitifirefly/edx-platform,EduPepperPD/pepper2013,eemirtekin/edx-platform,nikolas/edx-platform,zubair-arbi/edx-platform,zhenzhai/edx-platform,andyzsf/edx,raccoongang/edx-platform,vismartltd/edx-platform,raccoongang/edx-platform,chauhanhardik/populo_2,antoviaque/edx-platform,rismalrv/edx-platform,amir-qayyum-khan/edx-platform,shabab12/edx-platform,morenopc/edx-platform,shashank971/edx-platform,dcosentino/edx-platform,Ayub-Khan/edx-platform,jbassen/edx-platform,chudaol/edx-platform,y12uc231/edx-platform,kursitet/edx-platform,dkarakats/edx-platform,yokose-ks/edx-platform,knehez/edx-platform,alexthered/kienhoc-platform,unicri/edx-platform,shubhdev/edxOnBaadal,polimediaupv/edx-platform,benpatterson/edx-platform,msegado/edx-platform,Edraak/circleci-edx-platform,bigdatauniversity/edx-platform,UOMx/edx-platform,IndonesiaX/edx-platform,dsajkl/reqiop,cselis86/edx-platform,ovnicraft/edx-platform,morpheby/levelup-by,JioEducation/edx-platform,eestay/edx-platform,rationalAgent/edx-platform-custom,jruiperezv/ANALYSE,pelikanchik/edx-platform,ak2703/edx-platform,shubhdev/edx-platform,vismartltd/edx-platform,kxliugang/edx-platform,UOMx/edx-platform,beacloudgenius/edx-platform,bitifirefly/edx-platform,ahmadio/edx-platform,wwj718/edx-platform,mjg2203/edx-platform-seas,praveen-pal/edx-platform,ahmadiga/min_edx,olexiim/edx-platform,nikolas/edx-platform,apigee/edx-platform,rationalAgent/edx-platform-custom,chand3040/cloud_that,ZLLab-Mooc/edx-platform,JCBarahona/edX,polimediaupv/edx-platform,cecep-edu/edx-platform,kmoocdev2/edx-platform,vasyarv/edx-platform,EduPepperPDTesting/pepper2013-testing,beni55/edx-platform,naresh21/synergetics-edx-platform,edry/edx-platform,MakeHer/edx-platform,carsongee/edx-platform,jazztpt/edx-platform,dcosentino/edx-platform,a-parhom/edx-platform,utecuy/edx-platform,jamiefolsom/edx-platform,olexiim/edx-platform,shubhdev/openedx,dsajkl/reqiop,mushtaqak/edx-platform,B-MOOC/edx-platform,doismellburning/edx-platform,jswope00/griffinx,ahmadio/edx-platform,IITBinterns13/edx-platform-dev,motion2015/edx-platform,sudheerchintala/LearnEraPlatForm,xuxiao19910803/edx,atsolakid/edx-platform,jelugbo/tundex,iivic/BoiseStateX,gymnasium/edx-platform,alu042/edx-platform,auferack08/edx-platform,ovnicraft/edx-platform,marcore/edx-platform,antoviaque/edx-platform,hkawasaki/kawasaki-aio8-1,mtlchun/edx,ahmadiga/min_edx,fly19890211/edx-platform,angelapper/edx-platform,ferabra/edx-platform,mjirayu/sit_academy,jolyonb/edx-platform,xinjiguaike/edx-platform,motion2015/edx-platform,chauhanhardik/populo_2,rue89-tech/edx-platform,zubair-arbi/edx-platform,mjirayu/sit_academy,rismalrv/edx-platform,SravanthiSinha/edx-platform,jelugbo/tundex,edry/edx-platform,bigdatauniversity/edx-platform,SravanthiSinha/edx-platform,jjmiranda/edx-platform,DNFcode/edx-platform,halvertoluke/edx-platform,jonathan-beard/edx-platform,adoosii/edx-platform,PepperPD/edx-pepper-platform,jbzdak/edx-platform,y12uc231/edx-platform,hkawasaki/kawasaki-aio8-0,mushtaqak/edx-platform,bigdatauniversity/edx-platform,shashank971/edx-platform,OmarIthawi/edx-platform,naresh21/synergetics-edx-platform,knehez/edx-platform,xinjiguaike/edx-platform,motion2015/edx-platform,chrisndodge/edx-platform,andyzsf/edx,jbassen/edx-platform,edry/edx-platform,ampax/edx-platform-backup,shubhdev/openedx,pepeportela/edx-platform,rationalAgent/edx-platform-custom,eduNEXT/edunext-platform,vasyarv/edx-platform,rue89-tech/edx-platform,rismalrv/edx-platform,kmoocdev/edx-platform,auferack08/edx-platform,Semi-global/edx-platform,shurihell/testasia,stvstnfrd/edx-platform,TsinghuaX/edx-platform,mtlchun/edx,EduPepperPD/pepper2013,kmoocdev2/edx-platform,pomegranited/edx-platform,AkA84/edx-platform,adoosii/edx-platform,bdero/edx-platform,shashank971/edx-platform,ahmedaljazzar/edx-platform,franosincic/edx-platform,playm2mboy/edx-platform,sudheerchintala/LearnEraPlatForm,jamesblunt/edx-platform,dsajkl/123,philanthropy-u/edx-platform,pdehaye/theming-edx-platform,raccoongang/edx-platform,vikas1885/test1,ubc/edx-platform,jelugbo/tundex,pomegranited/edx-platform,Unow/edx-platform,tiagochiavericosta/edx-platform,msegado/edx-platform,yokose-ks/edx-platform,zofuthan/edx-platform,Edraak/edx-platform,LICEF/edx-platform,hastexo/edx-platform,edx/edx-platform,openfun/edx-platform,ovnicraft/edx-platform,simbs/edx-platform,zerobatu/edx-platform,rismalrv/edx-platform,beni55/edx-platform,shubhdev/openedx,hkawasaki/kawasaki-aio8-2,AkA84/edx-platform,jzoldak/edx-platform,unicri/edx-platform,edx-solutions/edx-platform,mushtaqak/edx-platform,chrisndodge/edx-platform,zhenzhai/edx-platform,Unow/edx-platform,tiagochiavericosta/edx-platform,IONISx/edx-platform,EDUlib/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,openfun/edx-platform,knehez/edx-platform,chand3040/cloud_that,jazkarta/edx-platform-for-isc,Semi-global/edx-platform,cpennington/edx-platform,cselis86/edx-platform,romain-li/edx-platform,teltek/edx-platform,jbzdak/edx-platform,marcore/edx-platform,pelikanchik/edx-platform,cyanna/edx-platform,ZLLab-Mooc/edx-platform,wwj718/ANALYSE,philanthropy-u/edx-platform,ESOedX/edx-platform,fintech-circle/edx-platform,martynovp/edx-platform,jazkarta/edx-platform,rismalrv/edx-platform,jamesblunt/edx-platform,ahmadiga/min_edx,ak2703/edx-platform,morpheby/levelup-by,simbs/edx-platform,benpatterson/edx-platform,Livit/Livit.Learn.EdX,chudaol/edx-platform,ahmadio/edx-platform,franosincic/edx-platform,BehavioralInsightsTeam/edx-platform,BehavioralInsightsTeam/edx-platform,mahendra-r/edx-platform,hkawasaki/kawasaki-aio8-0,hmcmooc/muddx-platform,zerobatu/edx-platform,IONISx/edx-platform,dsajkl/123,Edraak/edraak-platform,dsajkl/reqiop,zofuthan/edx-platform,proversity-org/edx-platform,antonve/s4-project-mooc,hmcmooc/muddx-platform,xuxiao19910803/edx,rhndg/openedx,deepsrijit1105/edx-platform,pdehaye/theming-edx-platform,miptliot/edx-platform,valtech-mooc/edx-platform,halvertoluke/edx-platform,hastexo/edx-platform,eemirtekin/edx-platform,nikolas/edx-platform,praveen-pal/edx-platform,chauhanhardik/populo_2,AkA84/edx-platform,nagyistoce/edx-platform,mitocw/edx-platform,JioEducation/edx-platform,xingyepei/edx-platform,EduPepperPDTesting/pepper2013-testing,ubc/edx-platform,xuxiao19910803/edx,eduNEXT/edx-platform,RPI-OPENEDX/edx-platform,chrisndodge/edx-platform,philanthropy-u/edx-platform,jzoldak/edx-platform,LICEF/edx-platform,Edraak/circleci-edx-platform,knehez/edx-platform,antonve/s4-project-mooc,hamzehd/edx-platform,4eek/edx-platform,eduNEXT/edx-platform,doganov/edx-platform,chauhanhardik/populo,eestay/edx-platform,mjg2203/edx-platform-seas,nanolearning/edx-platform,hamzehd/edx-platform,rue89-tech/edx-platform,TsinghuaX/edx-platform,mcgachey/edx-platform,zofuthan/edx-platform,jazkarta/edx-platform,kamalx/edx-platform,ESOedX/edx-platform,Endika/edx-platform,louyihua/edx-platform,doismellburning/edx-platform,eemirtekin/edx-platform,pomegranited/edx-platform,halvertoluke/edx-platform,pomegranited/edx-platform,xuxiao19910803/edx-platform,alu042/edx-platform,chudaol/edx-platform,zhenzhai/edx-platform,nanolearning/edx-platform,caesar2164/edx-platform,SravanthiSinha/edx-platform,hkawasaki/kawasaki-aio8-2,kalebhartje/schoolboost,wwj718/edx-platform,alexthered/kienhoc-platform,10clouds/edx-platform,procangroup/edx-platform,peterm-itr/edx-platform,WatanabeYasumasa/edx-platform,prarthitm/edxplatform,Softmotions/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,nagyistoce/edx-platform,jonathan-beard/edx-platform,TeachAtTUM/edx-platform,sameetb-cuelogic/edx-platform-test,shashank971/edx-platform,edx/edx-platform,xingyepei/edx-platform,4eek/edx-platform,unicri/edx-platform,morpheby/levelup-by,romain-li/edx-platform,beacloudgenius/edx-platform,kmoocdev/edx-platform,OmarIthawi/edx-platform,Stanford-Online/edx-platform,jolyonb/edx-platform,arbrandes/edx-platform,fintech-circle/edx-platform,Edraak/edx-platform,kalebhartje/schoolboost,synergeticsedx/deployment-wipro,J861449197/edx-platform,nttks/edx-platform,torchingloom/edx-platform,morenopc/edx-platform,kxliugang/edx-platform,DefyVentures/edx-platform,synergeticsedx/deployment-wipro,ESOedX/edx-platform,angelapper/edx-platform,kmoocdev2/edx-platform,CourseTalk/edx-platform,hkawasaki/kawasaki-aio8-1,simbs/edx-platform,mahendra-r/edx-platform,Softmotions/edx-platform,andyzsf/edx,gymnasium/edx-platform,mtlchun/edx,procangroup/edx-platform,Kalyzee/edx-platform,hmcmooc/muddx-platform,MakeHer/edx-platform,wwj718/ANALYSE,iivic/BoiseStateX,J861449197/edx-platform,J861449197/edx-platform,chauhanhardik/populo_2,EduPepperPDTesting/pepper2013-testing,leansoft/edx-platform,shubhdev/edx-platform,SivilTaram/edx-platform,hamzehd/edx-platform,defance/edx-platform,naresh21/synergetics-edx-platform,DefyVentures/edx-platform,MakeHer/edx-platform,cecep-edu/edx-platform,rhndg/openedx,gsehub/edx-platform,pdehaye/theming-edx-platform,EduPepperPDTesting/pepper2013-testing,jzoldak/edx-platform,waheedahmed/edx-platform,jruiperezv/ANALYSE,ferabra/edx-platform,RPI-OPENEDX/edx-platform,arbrandes/edx-platform,antoviaque/edx-platform,iivic/BoiseStateX,Stanford-Online/edx-platform,romain-li/edx-platform,shashank971/edx-platform,jolyonb/edx-platform,louyihua/edx-platform,teltek/edx-platform,mitocw/edx-platform,apigee/edx-platform,DNFcode/edx-platform,jbassen/edx-platform,martynovp/edx-platform,ubc/edx-platform,stvstnfrd/edx-platform,jbzdak/edx-platform,mcgachey/edx-platform,dcosentino/edx-platform,longmen21/edx-platform,DNFcode/edx-platform,appsembler/edx-platform,eduNEXT/edx-platform,rationalAgent/edx-platform-custom,morenopc/edx-platform,dcosentino/edx-platform,syjeon/new_edx,Semi-global/edx-platform,openfun/edx-platform,nanolearning/edx-platform,nttks/jenkins-test,bitifirefly/edx-platform,valtech-mooc/edx-platform,Lektorium-LLC/edx-platform,mjg2203/edx-platform-seas,fly19890211/edx-platform,shubhdev/edxOnBaadal,gymnasium/edx-platform,Softmotions/edx-platform,rhndg/openedx,edx-solutions/edx-platform,Lektorium-LLC/edx-platform,EDUlib/edx-platform,martynovp/edx-platform,dkarakats/edx-platform,mjirayu/sit_academy,WatanabeYasumasa/edx-platform,ampax/edx-platform-backup,Livit/Livit.Learn.EdX,alexthered/kienhoc-platform,alexthered/kienhoc-platform,playm2mboy/edx-platform,arifsetiawan/edx-platform,motion2015/edx-platform,ovnicraft/edx-platform,shurihell/testasia,sameetb-cuelogic/edx-platform-test,cecep-edu/edx-platform,gsehub/edx-platform,DefyVentures/edx-platform,RPI-OPENEDX/edx-platform,a-parhom/edx-platform,apigee/edx-platform,analyseuc3m/ANALYSE-v1,playm2mboy/edx-platform,inares/edx-platform,hkawasaki/kawasaki-aio8-1,bdero/edx-platform,marcore/edx-platform,praveen-pal/edx-platform,doganov/edx-platform,Shrhawk/edx-platform,peterm-itr/edx-platform,jswope00/GAI,DNFcode/edx-platform,CredoReference/edx-platform,tanmaykm/edx-platform,hkawasaki/kawasaki-aio8-1,mcgachey/edx-platform,tiagochiavericosta/edx-platform,edry/edx-platform,nttks/jenkins-test,pepeportela/edx-platform,jruiperezv/ANALYSE,franosincic/edx-platform,wwj718/edx-platform,kxliugang/edx-platform,cselis86/edx-platform,IITBinterns13/edx-platform-dev,OmarIthawi/edx-platform,nttks/edx-platform,doganov/edx-platform,lduarte1991/edx-platform,BehavioralInsightsTeam/edx-platform,pelikanchik/edx-platform,jamesblunt/edx-platform,olexiim/edx-platform,PepperPD/edx-pepper-platform,DNFcode/edx-platform,EduPepperPD/pepper2013,jjmiranda/edx-platform,EDUlib/edx-platform,Endika/edx-platform,LICEF/edx-platform,TsinghuaX/edx-platform,jzoldak/edx-platform,TeachAtTUM/edx-platform,mjg2203/edx-platform-seas,SravanthiSinha/edx-platform,eduNEXT/edunext-platform,jruiperezv/ANALYSE,abdoosh00/edx-rtl-final,peterm-itr/edx-platform,nanolearningllc/edx-platform-cypress-2,leansoft/edx-platform,don-github/edx-platform,abdoosh00/edraak,don-github/edx-platform,JCBarahona/edX,4eek/edx-platform,Endika/edx-platform,jbzdak/edx-platform,IITBinterns13/edx-platform-dev,Kalyzee/edx-platform,edry/edx-platform,mcgachey/edx-platform,arifsetiawan/edx-platform,antonve/s4-project-mooc,hastexo/edx-platform,SivilTaram/edx-platform,utecuy/edx-platform,chrisndodge/edx-platform,amir-qayyum-khan/edx-platform,Edraak/edx-platform,itsjeyd/edx-platform,itsjeyd/edx-platform,arifsetiawan/edx-platform,UXE/local-edx,atsolakid/edx-platform,cognitiveclass/edx-platform,chudaol/edx-platform,cyanna/edx-platform,solashirai/edx-platform,eduNEXT/edx-platform,Shrhawk/edx-platform,beni55/edx-platform,adoosii/edx-platform,cognitiveclass/edx-platform,sudheerchintala/LearnEraPlatForm,etzhou/edx-platform,Kalyzee/edx-platform,valtech-mooc/edx-platform,dsajkl/123,zerobatu/edx-platform,msegado/edx-platform,don-github/edx-platform,simbs/edx-platform,angelapper/edx-platform,utecuy/edx-platform,PepperPD/edx-pepper-platform,nanolearning/edx-platform,kxliugang/edx-platform,teltek/edx-platform,iivic/BoiseStateX,mushtaqak/edx-platform,defance/edx-platform,devs1991/test_edx_docmode,MSOpenTech/edx-platform,jazkarta/edx-platform,eemirtekin/edx-platform,B-MOOC/edx-platform,lduarte1991/edx-platform,shurihell/testasia,dkarakats/edx-platform,DefyVentures/edx-platform,Edraak/edraak-platform,auferack08/edx-platform,rhndg/openedx,romain-li/edx-platform,xuxiao19910803/edx-platform,jelugbo/tundex,solashirai/edx-platform,Kalyzee/edx-platform,EduPepperPDTesting/pepper2013-testing,kalebhartje/schoolboost,alexthered/kienhoc-platform,mitocw/edx-platform,SravanthiSinha/edx-platform,torchingloom/edx-platform,ak2703/edx-platform,zubair-arbi/edx-platform,gsehub/edx-platform,syjeon/new_edx,kamalx/edx-platform,Semi-global/edx-platform,zofuthan/edx-platform,nanolearningllc/edx-platform-cypress-2,tanmaykm/edx-platform,appsembler/edx-platform,hmcmooc/muddx-platform,carsongee/edx-platform,pku9104038/edx-platform,Livit/Livit.Learn.EdX,Endika/edx-platform,teltek/edx-platform,ahmedaljazzar/edx-platform,morenopc/edx-platform,longmen21/edx-platform,jbassen/edx-platform,IONISx/edx-platform,simbs/edx-platform,UOMx/edx-platform,J861449197/edx-platform,lduarte1991/edx-platform,xuxiao19910803/edx-platform,pku9104038/edx-platform,shubhdev/edxOnBaadal,shubhdev/edxOnBaadal,jazztpt/edx-platform,chand3040/cloud_that,kmoocdev/edx-platform,kalebhartje/schoolboost,alu042/edx-platform,Stanford-Online/edx-platform,LearnEra/LearnEraPlaftform,vismartltd/edx-platform,jazkarta/edx-platform-for-isc,nikolas/edx-platform,edx-solutions/edx-platform,knehez/edx-platform,openfun/edx-platform,zadgroup/edx-platform,torchingloom/edx-platform,a-parhom/edx-platform,ahmadio/edx-platform,pabloborrego93/edx-platform,LearnEra/LearnEraPlaftform,adoosii/edx-platform,10clouds/edx-platform,xinjiguaike/edx-platform,fintech-circle/edx-platform,jamesblunt/edx-platform,mahendra-r/edx-platform,pepeportela/edx-platform,JioEducation/edx-platform,chauhanhardik/populo,edx/edx-platform,longmen21/edx-platform,appliedx/edx-platform,jswope00/griffinx,mahendra-r/edx-platform,MakeHer/edx-platform,dkarakats/edx-platform,louyihua/edx-platform,JCBarahona/edX,ahmedaljazzar/edx-platform,eestay/edx-platform,Unow/edx-platform,shabab12/edx-platform,jruiperezv/ANALYSE,CredoReference/edx-platform,shurihell/testasia,abdoosh00/edraak,nagyistoce/edx-platform,apigee/edx-platform,tanmaykm/edx-platform,jbzdak/edx-platform,torchingloom/edx-platform,bdero/edx-platform,inares/edx-platform,proversity-org/edx-platform,chauhanhardik/populo_2,abdoosh00/edx-rtl-final,vasyarv/edx-platform,abdoosh00/edx-rtl-final,antoviaque/edx-platform,WatanabeYasumasa/edx-platform,alu042/edx-platform,EduPepperPD/pepper2013,antonve/s4-project-mooc,devs1991/test_edx_docmode,eestay/edx-platform,tiagochiavericosta/edx-platform,B-MOOC/edx-platform,longmen21/edx-platform,jamesblunt/edx-platform,eestay/edx-platform,jjmiranda/edx-platform,adoosii/edx-platform,jazztpt/edx-platform,ZLLab-Mooc/edx-platform,beni55/edx-platform,leansoft/edx-platform
|
common/djangoapps/student/tests/factories.py
|
common/djangoapps/student/tests/factories.py
|
from student.models import (User, UserProfile, Registration,
CourseEnrollmentAllowed, CourseEnrollment)
from django.contrib.auth.models import Group
from datetime import datetime
from factory import DjangoModelFactory, SubFactory, PostGenerationMethodCall, post_generation, Sequence
from uuid import uuid4
class GroupFactory(DjangoModelFactory):
FACTORY_FOR = Group
name = u'staff_MITx/999/Robot_Super_Course'
class UserProfileFactory(DjangoModelFactory):
FACTORY_FOR = UserProfile
user = None
name = u'Robot Test'
level_of_education = None
gender = u'm'
mailing_address = None
goals = u'World domination'
class RegistrationFactory(DjangoModelFactory):
FACTORY_FOR = Registration
user = None
activation_key = uuid4().hex.decode('ascii')
class UserFactory(DjangoModelFactory):
FACTORY_FOR = User
username = Sequence(u'robot{0}'.format)
email = Sequence(u'robot+test+{0}@edx.org'.format)
password = PostGenerationMethodCall('set_password',
'test')
first_name = Sequence(u'Robot{0}'.format)
last_name = 'Test'
is_staff = False
is_active = True
is_superuser = False
last_login = datetime(2012, 1, 1)
date_joined = datetime(2011, 1, 1)
@post_generation
def profile(obj, create, extracted, **kwargs):
if create:
obj.save()
return UserProfileFactory.create(user=obj, **kwargs)
elif kwargs:
raise Exception("Cannot build a user profile without saving the user")
else:
return None
class AdminFactory(UserFactory):
is_staff = True
class CourseEnrollmentFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollment
user = SubFactory(UserFactory)
course_id = u'edX/toy/2012_Fall'
class CourseEnrollmentAllowedFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollmentAllowed
email = '[email protected]'
course_id = 'edX/test/2012_Fall'
|
from student.models import (User, UserProfile, Registration,
CourseEnrollmentAllowed, CourseEnrollment)
from django.contrib.auth.models import Group
from datetime import datetime
from factory import DjangoModelFactory, SubFactory, PostGenerationMethodCall, post_generation, Sequence
from uuid import uuid4
class GroupFactory(DjangoModelFactory):
FACTORY_FOR = Group
name = 'staff_MITx/999/Robot_Super_Course'
class UserProfileFactory(DjangoModelFactory):
FACTORY_FOR = UserProfile
user = None
name = 'Robot Test'
level_of_education = None
gender = 'm'
mailing_address = None
goals = 'World domination'
class RegistrationFactory(DjangoModelFactory):
FACTORY_FOR = Registration
user = None
activation_key = uuid4().hex
class UserFactory(DjangoModelFactory):
FACTORY_FOR = User
username = Sequence('robot{0}'.format)
email = Sequence('robot+test+{0}@edx.org'.format)
password = PostGenerationMethodCall('set_password',
'test')
first_name = Sequence('Robot{0}'.format)
last_name = 'Test'
is_staff = False
is_active = True
is_superuser = False
last_login = datetime(2012, 1, 1)
date_joined = datetime(2011, 1, 1)
@post_generation
def profile(obj, create, extracted, **kwargs):
if create:
obj.save()
return UserProfileFactory.create(user=obj, **kwargs)
elif kwargs:
raise Exception("Cannot build a user profile without saving the user")
else:
return None
class AdminFactory(UserFactory):
is_staff = True
class CourseEnrollmentFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollment
user = SubFactory(UserFactory)
course_id = 'edX/toy/2012_Fall'
class CourseEnrollmentAllowedFactory(DjangoModelFactory):
FACTORY_FOR = CourseEnrollmentAllowed
email = '[email protected]'
course_id = 'edX/test/2012_Fall'
|
agpl-3.0
|
Python
|
6a58541a0fe1a942c3a2c187eb0358bd8350a51f
|
Change default output folder of minimize-content-pack.py.
|
fle-internal/content-pack-maker
|
minimize-content-pack.py
|
minimize-content-pack.py
|
"""
minimize-content-pack
Remove assessment items, subtitles and po files from a content pack.
Usage:
minimize-content-pack.py <old-content-pack-path> <out-path>
"""
import zipfile
from pathlib import Path
from docopt import docopt
ITEMS_TO_TRANSFER = [
"metadata.json",
"content.db",
"backend.mo",
"frontend.mo",
]
def minimize_content_pack(oldpackpath: Path, outpath: Path):
with zipfile.ZipFile(str(oldpackpath)) as oldzf,\
zipfile.ZipFile(str(outpath), "w") as newzf:
items = list(i for i in oldzf.namelist()
for will_be_transferred in ITEMS_TO_TRANSFER
if will_be_transferred in i)
for item in items:
bytes = oldzf.read(item)
newzf.writestr(item, bytes)
def main():
args = docopt(__doc__)
contentpackpath = Path(args["<old-content-pack-path>"])
outpath = Path(args["<out-path>"] or
"out/minimal.zip")
outpath = outpath.expanduser()
minimize_content_pack(contentpackpath, outpath)
if __name__ == "__main__":
main()
|
"""
minimize-content-pack
Remove assessment items, subtitles and po files from a content pack.
Usage:
minimize-content-pack.py <old-content-pack-path> <out-path>
"""
import zipfile
from pathlib import Path
from docopt import docopt
ITEMS_TO_TRANSFER = [
"metadata.json",
"content.db",
"backend.mo",
"frontend.mo",
]
def minimize_content_pack(oldpackpath: Path, outpath: Path):
with zipfile.ZipFile(str(oldpackpath)) as oldzf,\
zipfile.ZipFile(str(outpath), "w") as newzf:
items = list(i for i in oldzf.namelist()
for will_be_transferred in ITEMS_TO_TRANSFER
if will_be_transferred in i)
for item in items:
bytes = oldzf.read(item)
newzf.writestr(item, bytes)
def main():
args = docopt(__doc__)
contentpackpath = Path(args["<old-content-pack-path>"])
outpath = Path(args["<out-path>"] or
"minimal.zip")
outpath = outpath.expanduser()
minimize_content_pack(contentpackpath, outpath)
if __name__ == "__main__":
main()
|
bsd-2-clause
|
Python
|
35e6559bd13f46679333e72b6356a82a0657cce4
|
fix thinko in kepler test
|
adrn/gala,adrn/gala,adrn/gala,adrn/gary,adrn/gary,adrn/gary
|
gala/potential/potential/tests/test_against_galpy.py
|
gala/potential/potential/tests/test_against_galpy.py
|
"""Test some builtin potentials against galpy"""
# Third-party
import numpy as np
from astropy.constants import G
import astropy.units as u
import pytest
# This project
from ...._cconfig import GSL_ENABLED
from ....units import galactic
from ..builtin import (KeplerPotential, MiyamotoNagaiPotential,
NFWPotential, PowerLawCutoffPotential,
BovyMWPotential2014)
try:
import galpy
import galpy.orbit
import galpy.potential
GALPY_INSTALLED = True
except ImportError:
GALPY_INSTALLED = False
# Set to arbitrary values for testing
ro = 8.1 * u.kpc
vo = 240 * u.km/u.s
ntest = 128
def helper(gala_pot, galpy_pot):
Rs = np.random.uniform(1, 15, size=ntest) * u.kpc
zs = np.random.uniform(1, 15, size=ntest) * u.kpc
xyz = np.zeros((3, Rs.size)) * u.kpc
xyz[0] = Rs
assert np.allclose(gala_pot.circular_velocity(xyz).to_value(u.km/u.s),
galpy_pot.vcirc(R=Rs.to_value(ro)))
xyz[2] = zs
assert np.allclose(gala_pot.density(xyz).to_value(u.Msun/u.pc**3),
galpy_pot.dens(R=Rs.to_value(ro), z=zs.to_value(ro)))
assert np.allclose(gala_pot.energy(xyz).to_value((u.km / u.s)**2),
galpy_pot(R=Rs.to_value(ro), z=zs.to_value(ro)))
assert np.allclose(gala_pot.gradient(xyz).to_value((u.km/u.s) * u.pc/u.Myr / u.pc)[2],
-galpy_pot.zforce(R=Rs.to_value(ro), z=zs.to_value(ro)))
@pytest.mark.skipif(not GALPY_INSTALLED,
reason="requires galpy to run this test")
def test_kepler():
from galpy.potential import KeplerPotential as BovyKeplerPotential
M = 5e10 * u.Msun
gala_pot = KeplerPotential(m=M, units=galactic)
amp = (G*M).to_value(vo**2 * ro)
bovy_pot = BovyKeplerPotential(amp=amp, ro=ro, vo=vo)
helper(gala_pot, bovy_pot)
|
"""Test some builtin potentials against galpy"""
# Third-party
import numpy as np
from astropy.constants import G
import astropy.units as u
import pytest
# This project
from ...._cconfig import GSL_ENABLED
from ....units import galactic
from ..builtin import (KeplerPotential, MiyamotoNagaiPotential,
NFWPotential, PowerLawCutoffPotential,
BovyMWPotential2014)
try:
import galpy
import galpy.orbit
import galpy.potential
GALPY_INSTALLED = True
except ImportError:
GALPY_INSTALLED = False
# Set to arbitrary values for testing
ro = 8.1 * u.kpc
vo = 240 * u.km/u.s
ntest = 128
def helper(gala_pot, galpy_pot):
Rs = np.random.uniform(1, 15, size=ntest) * u.kpc
zs = np.random.uniform(1, 15, size=ntest) * u.kpc
xyz = np.zeros((3, Rs.size)) * u.kpc
xyz[0] = Rs
assert np.allclose(gala_pot.circular_velocity(xyz).to_value(u.km/u.s),
galpy_pot.vcirc(R=Rs.to_value(ro)))
xyz[2] = zs
assert np.allclose(gala_pot.density(xyz).to_value(u.Msun/u.pc**3),
galpy_pot.dens(R=Rs.to_value(ro), z=zs.to_value(ro)))
assert np.allclose(gala_pot.energy(xyz).to_value((u.km / u.s)**2),
galpy_pot(R=Rs.to_value(ro), z=zs.to_value(ro)))
assert np.allclose(gala_pot.gradient(xyz).to_value((u.km/u.s) * u.pc/u.Myr / u.pc),
-galpy_pot.Rforce(R=Rs.to_value(ro), z=zs.to_value(ro)))
@pytest.mark.skipif(not GALPY_INSTALLED,
reason="requires galpy to run this test")
def test_kepler():
from galpy.potential import KeplerPotential as BovyKeplerPotential
M = 5e10 * u.Msun
gala_pot = KeplerPotential(m=M, units=galactic)
amp = (G*M).to_value(vo**2 * ro)
bovy_pot = BovyKeplerPotential(amp=amp, ro=ro, vo=vo)
helper(gala_pot, bovy_pot)
|
mit
|
Python
|
20f7102daf411a07ec922fceb2fac6c00356a84b
|
Revert "Version in function"
|
django/asgi_redis
|
asgi_redis/__init__.py
|
asgi_redis/__init__.py
|
import pkg_resources
from .core import RedisChannelLayer
from .local import RedisLocalChannelLayer
__version__ = pkg_resources.require('asgi_redis')[0].version
|
import pkg_resources
from .core import RedisChannelLayer
from .local import RedisLocalChannelLayer
def get_version():
return pkg_resources.require('asgi_redis')[0].version
|
bsd-3-clause
|
Python
|
ea0847a1c509b2eba1e652b597f2921b0c19da2d
|
Add field for name in mail dict
|
Nedgang/adt_project
|
mail_parser.py
|
mail_parser.py
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import os, sys
from email.parser import Parser
import json
import re
def parse_mail(file_in):
"""
Extract Subject & Body of mail file
headers must be formatted as a block of RFC 2822 style
"""
# filename_out = os.path.splitext(os.path.basename(file_in))[0] + ".json"
# infile_path = os.path.dirname(file_in)
# dirname = infile_path.split('/').pop()
#
# PATH_out = infile_path + '/' + dirname + '_' + filename_out
with open(file_in, 'r') as INFILE:
raw_mail = Parser().parse(INFILE)
formated_mail = {
"body": raw_mail.get_payload(),
"subject": raw_mail['subject'],
}
# small correction of text, remove email adresses in the text
reg = re.compile("[^@|\s]+@[^@]+\.[^@|\s]+") # black magic
formated_mail['body'] = re.sub(reg, "",formated_mail['body'])
date = os.path.dirname(file_in).split('/').pop() + '-'
name = os.path.splitext(os.path.basename(file_in))[0]
formated_mail['name'] = date+name
return formated_mail
def write_json(dico, fileout):
"""
Write dict into json-styled file
Je collectionne les canards...
... vivants !
"""
with open(fileout, "w") as OUTFILE:
json.dump(dico, OUTFILE, ensure_ascii=False)
def correct_mail(file_in):
"""
Remove all blank lines in mail files, it fucks-up the parsing if not
"""
new_body = ""
with open(file_in, 'r') as INFILE:
# select only non-blank lines, use generator to avoid memory storage
for line in (l for l in INFILE if(len(l.strip()) > 1)):
new_body += line
# regenerate the mail file
with open(file_in, 'w') as INFILE:
INFILE.write(new_body)
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import os, sys
from email.parser import Parser
import json
import re
def parse_mail(file_in):
"""
Extract Subject & Body of mail file
headers must be formatted as a block of RFC 2822 style
"""
# filename_out = os.path.splitext(os.path.basename(file_in))[0] + ".json"
# infile_path = os.path.dirname(file_in)
# dirname = infile_path.split('/').pop()
#
# PATH_out = infile_path + '/' + dirname + '_' + filename_out
with open(file_in, 'r') as INFILE:
raw_mail = Parser().parse(INFILE)
formated_mail = {
"body": raw_mail.get_payload(),
"subject": raw_mail['subject'],
}
# small correction of text, remove email adresses in the text
reg = re.compile("[^@|\s]+@[^@]+\.[^@|\s]+") # black magic
formated_mail['body'] = re.sub(reg, "",formated_mail['body'])
return formated_mail
def write_json(dico, fileout):
"""
Write dict into json-styled file
Je collectionne les canards...
... vivants !
"""
with open(fileout, "w") as OUTFILE:
json.dump(dico, OUTFILE, ensure_ascii=False)
def correct_mail(file_in):
"""
Remove all blank lines in mail files, it fucks-up the parsing if not
"""
new_body = ""
with open(file_in, 'r') as INFILE:
# select only non-blank lines, use generator to avoid memory storage
for line in (l for l in INFILE if(len(l.strip()) > 1)):
new_body += line
# regenerate the mail file
with open(file_in, 'w') as INFILE:
INFILE.write(new_body)
|
mit
|
Python
|
ae916c1ee52941bb5a1ccf87abe2a9758897bd08
|
Add deprecation warnings and message to getlines function
|
ipython/ipython,ipython/ipython
|
IPython/utils/ulinecache.py
|
IPython/utils/ulinecache.py
|
"""
This module has been deprecated since IPython 6.0.
Wrapper around linecache which decodes files to unicode according to PEP 263.
"""
import functools
import linecache
import sys
from warnings import warn
from IPython.utils import py3compat
from IPython.utils import openpy
getline = linecache.getline
# getlines has to be looked up at runtime, because doctests monkeypatch it.
@functools.wraps(linecache.getlines)
def getlines(filename, module_globals=None):
"""
Deprecated since IPython 6.0
"""
warn(("`IPython.utils.ulinecache.getlines` is deprecated since"
" IPython 6.0 and will be removed in future versions."),
DeprecationWarning, stacklevel=2)
return linecache.getlines(filename, module_globals=module_globals)
|
"""
Wrapper around linecache which decodes files to unicode according to PEP 263.
"""
import functools
import linecache
import sys
from IPython.utils import py3compat
from IPython.utils import openpy
getline = linecache.getline
# getlines has to be looked up at runtime, because doctests monkeypatch it.
@functools.wraps(linecache.getlines)
def getlines(filename, module_globals=None):
return linecache.getlines(filename, module_globals=module_globals)
|
bsd-3-clause
|
Python
|
04065919be55d8e4371cc1e7fec1a0148298ccf7
|
throw if obj is not serializable
|
Geotab/mygeotab-python
|
mygeotab/serializers.py
|
mygeotab/serializers.py
|
# -*- coding: utf-8 -*-
"""
mygeotab.serializers
~~~~~~~~~~~~~~~~~~~~
JSON serialization and deserialization helper objects for the MyGeotab API.
"""
import re
import arrow
import six
use_rapidjson = False
try:
import rapidjson
DATETIME_MODE = rapidjson.DM_SHIFT_TO_UTC | rapidjson.DM_ISO8601
use_rapidjson = True
except ImportError:
pass
import json
from mygeotab import dates
DATETIME_REGEX = re.compile(r"^\d{4}\-\d{2}\-\d{2}")
def json_serialize(obj):
if use_rapidjson:
return rapidjson.dumps(obj, default=object_serializer)
return json.dumps(obj, default=object_serializer, separators=(",", ":"))
def json_deserialize(json_str):
if use_rapidjson:
return rapidjson.loads(json_str, datetime_mode=DATETIME_MODE)
return json.loads(json_str, object_hook=object_deserializer)
def object_serializer(obj):
"""Helper to serialize a field into a compatible MyGeotab object.
:param obj: The object.
"""
return dates.format_iso_datetime(obj) if ((type(obj) is dict) and 'isoformat' in obj) else raise TypeError("Unserializable object {} of type {}".format(obj, type(obj)))
def object_deserializer(obj):
"""Helper to deserialize a raw result dict into a proper dict.
:param obj: The dict.
"""
for key, val in obj.items():
if isinstance(val, six.string_types) and DATETIME_REGEX.search(val):
try:
obj[key] = dates.localize_datetime(arrow.get(val).datetime)
except (ValueError, arrow.parser.ParserError):
obj[key] = val
return obj
|
# -*- coding: utf-8 -*-
"""
mygeotab.serializers
~~~~~~~~~~~~~~~~~~~~
JSON serialization and deserialization helper objects for the MyGeotab API.
"""
import re
import arrow
import six
use_rapidjson = False
try:
import rapidjson
DATETIME_MODE = rapidjson.DM_SHIFT_TO_UTC | rapidjson.DM_ISO8601
use_rapidjson = True
except ImportError:
pass
import json
from mygeotab import dates
DATETIME_REGEX = re.compile(r"^\d{4}\-\d{2}\-\d{2}")
def json_serialize(obj):
if use_rapidjson:
return rapidjson.dumps(obj, default=object_serializer)
return json.dumps(obj, default=object_serializer, separators=(",", ":"))
def json_deserialize(json_str):
if use_rapidjson:
return rapidjson.loads(json_str, datetime_mode=DATETIME_MODE)
return json.loads(json_str, object_hook=object_deserializer)
def object_serializer(obj):
"""Helper to serialize a field into a compatible MyGeotab object.
:param obj: The object.
"""
return dates.format_iso_datetime(obj) if ((type(obj) is dict) and 'isoformat' in obj) else obj
def object_deserializer(obj):
"""Helper to deserialize a raw result dict into a proper dict.
:param obj: The dict.
"""
for key, val in obj.items():
if isinstance(val, six.string_types) and DATETIME_REGEX.search(val):
try:
obj[key] = dates.localize_datetime(arrow.get(val).datetime)
except (ValueError, arrow.parser.ParserError):
obj[key] = val
return obj
|
apache-2.0
|
Python
|
c2c4e47f5cdae6e683e87dcc8c7b536633755c5a
|
fix with black formatter
|
mbeacom/locust,locustio/locust,mbeacom/locust,locustio/locust,locustio/locust,locustio/locust,mbeacom/locust,mbeacom/locust
|
examples/distribuited_execution_terraform/aws/plan/basic.py
|
examples/distribuited_execution_terraform/aws/plan/basic.py
|
import time
from locust import HttpUser, task, between
class Quickstart(HttpUser):
wait_time = between(1, 5)
@task
def google(self):
self.client.request_name = "google"
self.client.get("https://google.com/")
@task
def microsoft(self):
self.client.request_name = "microsoft"
self.client.get("https://microsoft.com/")
@task
def facebook(self):
self.client.request_name = "facebook"
self.client.get("https://facebook.com/")
|
import time
from locust import HttpUser, task, between
class Quickstart(HttpUser):
wait_time = between(1, 5)
@task
def google(self):
self.client.request_name = "google"
self.client.get("https://google.com/")
@task
def microsoft(self):
self.client.request_name = "microsoft"
self.client.get("https://microsoft.com/")
@task
def facebook(self):
self.client.request_name = "facebook"
self.client.get("https://facebook.com/")
|
mit
|
Python
|
a967fbb3b38e0788ccbde0650076ab05e693806a
|
Bump version number.
|
GreatFruitOmsk/nativeconfig
|
nativeconfig/version.py
|
nativeconfig/version.py
|
VERSION = '3.0.0'
|
VERSION = '2.9.1'
|
mit
|
Python
|
1bfab9dd43fc52bfdea0943703ee530e3b0f98de
|
remove SpecsParser
|
kaczmarj/neurodocker,kaczmarj/neurodocker
|
neurodocker/__init__.py
|
neurodocker/__init__.py
|
# Author: Jakub Kaczmarzyk <[email protected]>
from __future__ import absolute_import
import logging
import sys
LOG_FORMAT = '[NEURODOCKER %(asctime)s %(levelname)s]: %(message)s'
logging.basicConfig(stream=sys.stdout, datefmt='%H:%M:%S', level=logging.INFO,
format=LOG_FORMAT)
from neurodocker.docker import DockerContainer, DockerImage
from neurodocker.dockerfile import Dockerfile
def _get_version():
"""Return version string."""
import os
BASE_PATH = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(BASE_PATH, "VERSION"), 'r') as fp:
return fp.read().strip()
__version__ = _get_version()
|
# Author: Jakub Kaczmarzyk <[email protected]>
from __future__ import absolute_import
import logging
import sys
LOG_FORMAT = '[NEURODOCKER %(asctime)s %(levelname)s]: %(message)s'
logging.basicConfig(stream=sys.stdout, datefmt='%H:%M:%S', level=logging.INFO,
format=LOG_FORMAT)
from neurodocker import interfaces
SUPPORTED_SOFTWARE = {'afni': interfaces.AFNI,
'ants': interfaces.ANTs,
'freesurfer': interfaces.FreeSurfer,
'fsl': interfaces.FSL,
'miniconda': interfaces.Miniconda,
'mrtrix3': interfaces.MRtrix3,
'neurodebian': interfaces.NeuroDebian,
'spm': interfaces.SPM,
}
from neurodocker.docker import DockerContainer, DockerImage
from neurodocker.dockerfile import Dockerfile
from neurodocker.parser import SpecsParser
def _get_version():
"""Return version string."""
import os
BASE_PATH = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(BASE_PATH, "VERSION"), 'r') as fp:
return fp.read().strip()
__version__ = _get_version()
|
apache-2.0
|
Python
|
44d74984bd4168eddb4cc5f9c0e77aad4e498a02
|
fix broken plots
|
saketkc/moca,saketkc/moca,saketkc/moca
|
moca/plotter/__init__.py
|
moca/plotter/__init__.py
|
from .plotter import create_plot
|
from .seqstats import perform_t_test
from .seqstats import get_pearson_corr
from .plotter import create_plot
|
isc
|
Python
|
d2f1595fbb9e8d29e2126aa9453f4159e9b85a0d
|
add event to receive panel on focus
|
SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview
|
guicomm/events.py
|
guicomm/events.py
|
import wx.lib.newevent
# plot data
(NewPlotEvent, EVT_NEW_PLOT) = wx.lib.newevent.NewEvent()
# print the messages on statusbar
(StatusEvent, EVT_STATUS) = wx.lib.newevent.NewEvent()
#create a panel slicer
(SlicerPanelEvent, EVT_SLICER_PANEL) = wx.lib.newevent.NewEvent()
#print update paramaters for panel slicer
(SlicerParamUpdateEvent, EVT_SLICER_PARS_UPDATE) = wx.lib.newevent.NewEvent()
#update the slicer from the panel
(SlicerParameterEvent, EVT_SLICER_PARS) = wx.lib.newevent.NewEvent()
#slicer event
(SlicerEvent, EVT_SLICER) = wx.lib.newevent.NewEvent()
# event containinG A DICTIONARY OF NAME and errors of selected data
(ErrorDataEvent, ERR_DATA) = wx.lib.newevent.NewEvent()
## event that that destroy a page associate with Data1D removed from the graph
(RemoveDataEvent, EVT_REMOVE_DATA) = wx.lib.newevent.NewEvent()
##event that allow to add more that to the same plot
(AddManyDataEvent, EVT_ADD_MANY_DATA) = wx.lib.newevent.NewEvent()
|
import wx.lib.newevent
# plot data
(NewPlotEvent, EVT_NEW_PLOT) = wx.lib.newevent.NewEvent()
# print the messages on statusbar
(StatusEvent, EVT_STATUS) = wx.lib.newevent.NewEvent()
#create a panel slicer
(SlicerPanelEvent, EVT_SLICER_PANEL) = wx.lib.newevent.NewEvent()
#print update paramaters for panel slicer
(SlicerParamUpdateEvent, EVT_SLICER_PARS_UPDATE) = wx.lib.newevent.NewEvent()
#update the slicer from the panel
(SlicerParameterEvent, EVT_SLICER_PARS) = wx.lib.newevent.NewEvent()
#slicer event
(SlicerEvent, EVT_SLICER) = wx.lib.newevent.NewEvent()
# event containinG A DICTIONARY OF NAME and errors of selected data
(ErrorDataEvent, ERR_DATA) = wx.lib.newevent.NewEvent()
## event that that destroy a page associate with Data1D removed from the graph
(RemoveDataEvent, EVT_REMOVE_DATA) = wx.lib.newevent.NewEvent()
|
bsd-3-clause
|
Python
|
a0e07c3ecf84219b79889509e29da0b800e36a97
|
fix angle normalization in get_draw_angles()
|
mozman/ezdxf,mozman/ezdxf,mozman/ezdxf,mozman/ezdxf,mozman/ezdxf
|
src/ezdxf/addons/drawing/utils.py
|
src/ezdxf/addons/drawing/utils.py
|
# Created: 06.2020
# Copyright (c) 2020, Matthew Broadway
# License: MIT License
import enum
import math
from math import tau
from typing import Union, List
from ezdxf.addons.drawing.type_hints import Radians
from ezdxf.entities import Face3d, Solid, Trace
from ezdxf.math import Vector, Z_AXIS, OCS
def normalize_angle(theta: Radians) -> Radians:
# have to mod tau twice to obtain [0, tau), because some angles once normalised become exactly equal to tau
# e.g. (-1e-16 % tau) == tau
# so (-1e-16 % tau) % tau == 0.0
return (theta % tau) % tau
def get_draw_angles(start: float, end: float, extrusion: Vector):
if extrusion.isclose(Z_AXIS):
return start, end
else:
ocs = OCS(extrusion)
s = ocs.to_wcs(Vector.from_angle(start))
e = ocs.to_wcs(Vector.from_angle(end))
return normalize_angle(e.angle), normalize_angle(s.angle)
def get_tri_or_quad_points(shape: Union[Face3d, Solid, Trace]) -> List[Vector]:
d = shape.dxf
vertices: List[Vector] = [d.vtx0, d.vtx1, d.vtx2]
if d.vtx3 != d.vtx2: # when the face is a triangle, vtx2 == vtx3
vertices.append(d.vtx3)
if not vertices[0].isclose(vertices[-1]):
vertices.append(vertices[0])
return vertices
|
# Created: 06.2020
# Copyright (c) 2020, Matthew Broadway
# License: MIT License
import enum
import math
from math import tau
from typing import Union, List
from ezdxf.addons.drawing.type_hints import Radians
from ezdxf.entities import Face3d, Solid, Trace
from ezdxf.math import Vector, Z_AXIS, OCS
def normalize_angle(theta: Radians) -> Radians:
# have to mod tau twice to obtain [0, tau), because some angles once normalised become exactly equal to tau
# e.g. (-1e-16 % tau) == tau
# so (-1e-16 % tau) % tau == 0.0
return (theta % tau) % tau
def get_draw_angles(start: float, end: float, extrusion: Vector):
if extrusion.isclose(Z_AXIS):
return start, end
else:
ocs = OCS(extrusion)
s = ocs.to_wcs(Vector.from_angle(start))
e = ocs.to_wcs(Vector.from_angle(end))
return e.angle % math.tau, s.angle % math.tau
def get_tri_or_quad_points(shape: Union[Face3d, Solid, Trace]) -> List[Vector]:
d = shape.dxf
vertices: List[Vector] = [d.vtx0, d.vtx1, d.vtx2]
if d.vtx3 != d.vtx2: # when the face is a triangle, vtx2 == vtx3
vertices.append(d.vtx3)
if not vertices[0].isclose(vertices[-1]):
vertices.append(vertices[0])
return vertices
|
mit
|
Python
|
ad42d5df34074bfb21229a962d4b2a548a796e9a
|
Update data_validation/jellyfish_distance.py
|
GoogleCloudPlatform/professional-services-data-validator,GoogleCloudPlatform/professional-services-data-validator
|
data_validation/jellyfish_distance.py
|
data_validation/jellyfish_distance.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jellyfish
def extract_closest_match(search_key, target_list, score_cutoff=0):
"""Return str value from target list with highest score using Jaro
for String distance.
search_key (str): A string used to search for cloest match.
target_list (list): A list of strings for comparison.
score_cutoff (float): A scorre cutoff (betwen 0 and 1) to be met.
"""
highest_score = score_cutoff
highest_value_key = None
for target_key in target_list:
score = jellyfish.jaro_distance(search_key, target_key)
if score >= highest_score:
highest_score = score
highest_value_key = target_key
return highest_value_key
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jellyfish
def extractClosestMatch(search_key, target_list, score_cutoff=0):
"""Return str value from target list with highest score using Jaro
for String distance.
search_key (str): A string used to search for cloest match.
target_list (list): A list of strings for comparison.
score_cutoff (float): A scorre cutoff (betwen 0 and 1) to be met.
"""
highest_score = score_cutoff
highest_value_key = None
for target_key in target_list:
score = jellyfish.jaro_distance(search_key, target_key)
if score >= highest_score:
highest_score = score
highest_value_key = target_key
return highest_value_key
|
apache-2.0
|
Python
|
7c2f915b0ca89db2c44a73af8db3f803687f068b
|
reimplement load_library and backup_library
|
j39m/katowice,j39m/katowice,j39m/katowice
|
unskipper.py
|
unskipper.py
|
#! /usr/bin/env python3
# unskipper.py will prune all skipcounts from your Quod Libet library;
# the resulting lack of '~#skipcount' in your per-song entries will all
# be interpreted by QL as being skipcount 0.
import os
import sys
import shutil
import quodlibet.library
HOME = os.getenv("HOME")
QLDIR = ".quodlibet"
SONGS_PATH = os.path.join(
HOME,
QLDIR,
"songs",
)
BKUP_PATH = os.path.join(
HOME,
QLDIR,
"songs.bk",
)
def load_library():
songs = quodlibet.library.init()
songs.load(SONGS_PATH)
return songs
def backup_library():
with open(SONGS_PATH, "rb") as spt, open(BKUP_PATH, "wb") as bpt:
shutil.copyfileobj(spt, bpt)
def prune_skips(song_pickle):
"""Main function for pruning skips from a pickle."""
raise NotImplementedError
found_skips = False
skipfmt = "prune {:d} skips on ``{:s}.''"
for song in song_pickle:
try:
skipmsg = skipfmt.format(song.pop("~#skipcount"), song["title"])
found_skips = True
print(skipmsg)
except KeyError:
continue
# write the finished pickle down
try:
pickle.dump(song_pickle, open(SONGS_PATH, "w"))
except pickle.PicklingError:
print ("NANISORE?")
return 1
return 0
def main():
"""The main entry point."""
raise NotImplementedError
songs = load_library()
backup_library()
return prune_skips(songs)
##### EXECUTION BEGINS HEEEERREEEEE #####
if __name__ == "__main__":
ret = main()
sys.exit(ret)
|
#! /usr/bin/env python3
# unskipper.py will prune all skipcounts from your Quod Libet library;
# the resulting lack of '~#skipcount' in your per-song entries will all
# be interpreted by QL as being skipcount 0.
import os
import sys
import shutil
import pickle
HOME = os.getenv("HOME")
QLDIR = ".quodlibet"
PATH_TO_SONGS = os.path.join(
HOME,
QLDIR,
"songs",
)
PATH_TO_BKUP = os.path.join(
HOME,
QLDIR,
"unpruned",
)
def load_library():
raise NotImplementedError
sfh = open(PATH_TO_SONGS, 'r')
songs = pickle.load(sfh)
sfh.close()
return songs
def backup_library():
raise NotImplementedError
sfh = open(PATH_TO_SONGS, "rb")
bfh = open(PATH_TO_BKUP, "wb")
shutil.copyfileobj(sfh, bfh)
sfh.close()
bfh.close()
return 0
def prune_skips(song_pickle):
"""Main function for pruning skips from a pickle."""
raise NotImplementedError
found_skips = False
skipfmt = "prune {:d} skips on ``{:s}.''"
for song in song_pickle:
try:
skipmsg = skipfmt.format(song.pop("~#skipcount"), song["title"])
found_skips = True
print(skipmsg)
except KeyError:
continue
# write the finished pickle down
try:
pickle.dump(song_pickle, open(PATH_TO_SONGS, "w"))
except pickle.PicklingError:
print ("NANISORE?")
return 1
return 0
def query_library_by_tag(lib, val, tag="artist", corr="~#playcount", rkey="title"):
"""
query the library "lib" by the tag "tag," searching for entries with tag
value "val."
returns a dictionary of the results.
"""
raise NotImplementedError
retv = {}
for song in lib:
if tag in song and val in song[tag] and corr in song:
try:
lkey = song[rkey]
except KeyError:
lkey = None
if lkey in retv:
if not isinstance(retv[lkey], list):
retv[lkey] = [retv[lkey],]
retv[lkey].append(song[corr])
else:
retv[lkey] = song[corr]
return retv
def main():
"""The main entry point."""
raise NotImplementedError
songs = load_library()
backup_library()
return prune_skips(songs)
##### EXECUTION BEGINS HEEEERREEEEE #####
if __name__ == "__main__":
ret = main()
sys.exit(ret)
|
bsd-2-clause
|
Python
|
a889d4726189d1a7c9a9fbd074ca2c1d6eca9d98
|
delete unnecessary constraint
|
yuyu2172/chainercv,pfnet/chainercv,yuyu2172/chainercv,chainer/chainercv,chainer/chainercv
|
chainercv/links/model/extraction_chain.py
|
chainercv/links/model/extraction_chain.py
|
import chainer
import collections
class ExtractionChain(chainer.Chain):
def __init__(self, layers, layer_names=None):
super(ExtractionChain, self).__init__()
if not isinstance(layers, collections.OrderedDict):
layers = collections.OrderedDict(
[(str(i), function) for i, function in enumerate(layers)])
self._layers = layers
if layer_names is None:
layer_names = self._layers.keys()[-1]
if (not isinstance(layer_names, str) and
all([isinstance(name, str) for name in layer_names])):
return_tuple = True
else:
return_tuple = False
layer_names = [layer_names]
self._return_tuple = return_tuple
self._layer_names = list(layer_names)
with self.init_scope():
for name, function in self._layers.items():
if isinstance(function, chainer.Link):
setattr(self, name, function)
def __call__(self, x):
features = {}
h = x
for name, function in self._layers.items():
h = function(h)
if name in self._layer_names:
features[name] = h
if self._return_tuple:
features = tuple(
[features[name] for name in self._layer_names])
else:
features = list(features.values())[0]
return features
def copy(self):
ret = super(ExtractionChain, self).copy()
layers = []
for name, function in self._layers.items():
if name in self._children:
function = ret[name]
layers.append((name, function))
ret.layers = collections.OrderedDict(layers)
return ret
|
import chainer
import collections
class ExtractionChain(chainer.Chain):
def __init__(self, layers, layer_names=None):
super(ExtractionChain, self).__init__()
if not isinstance(layers, collections.OrderedDict):
if layer_names is not None:
raise ValueError('`layer_names` needs to be `None` unless '
'`layers` is OrderedDict.')
layers = collections.OrderedDict(
[(str(i), function) for i, function in enumerate(layers)])
self._layers = layers
if layer_names is None:
layer_names = self._layers.keys()[-1]
if (not isinstance(layer_names, str) and
all([isinstance(name, str) for name in layer_names])):
return_tuple = True
else:
return_tuple = False
layer_names = [layer_names]
self._return_tuple = return_tuple
self._layer_names = list(layer_names)
with self.init_scope():
for name, function in self._layers.items():
if isinstance(function, chainer.Link):
setattr(self, name, function)
def __call__(self, x):
features = {}
h = x
for name, function in self._layers.items():
h = function(h)
if name in self._layer_names:
features[name] = h
if self._return_tuple:
features = tuple(
[features[name] for name in self._layer_names])
else:
features = list(features.values())[0]
return features
def copy(self):
ret = super(ExtractionChain, self).copy()
layers = []
for name, function in self._layers.items():
if name in self._children:
function = ret[name]
layers.append((name, function))
ret.layers = collections.OrderedDict(layers)
return ret
|
mit
|
Python
|
ce143f40f3131bbd04e40cacec50cae3e725b598
|
use new package module
|
sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary
|
updatecmd.py
|
updatecmd.py
|
#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import package
import files
import shutil
import pwd
import grp
import files
def doUpdate(cfg, root, pkgName, binaries = 1, sources = 0):
if root == "/":
print "using srs to update to your actual system is dumb."
import sys
sys.exit(0)
if pkgName[0] != "/":
pkgName = cfg.packagenamespace + "/" + pkgName
pkgSet = package.PackageSet(cfg.reppath, pkgName)
if (not len(pkgSet.versionList())):
raise KeyError, "no versions exist of %s" % pkgName
pkg = pkgSet.getLatestPackage(cfg.defaultbranch)
fileList = []
packageFiles = []
if binaries:
packageFiles = packageFiles + pkg.fileList()
if sources:
packageFiles = packageFiles + pkg.sourceList()
for (fileName, version) in packageFiles:
infoFile = files.FileDB(cfg.reppath, cfg.reppath + fileName)
fileList.append(infoFile)
for infoFile in fileList:
f = infoFile.getVersion(version)
f.restore(cfg.reppath, cfg.sourcepath, root)
|
#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import package
import files
import shutil
import pwd
import grp
import files
def doUpdate(cfg, root, pkgName, binaries = 1, sources = 0):
if root == "/":
print "using srs to update to your actual system is dumb."
import sys
sys.exit(0)
if pkgName[0] != "/":
pkgName = cfg.packagenamespace + "/" + pkgName
pkgSet = package.PackageSet(cfg.reppath, pkgName)
if (not len(pkgSet.versionList())):
raise KeyError, "no versions exist of %s" % pkgName
(version, pkg) = pkgSet.getLatest()
fileList = []
packageFiles = []
if binaries:
packageFiles = packageFiles + pkg.fileList()
if sources:
packageFiles = packageFiles + pkg.sourceList()
for (fileName, version) in packageFiles:
infoFile = files.FileDB(cfg.reppath, cfg.reppath + fileName)
fileList.append(infoFile)
for infoFile in fileList:
f = infoFile.getVersion(version)
f.restore(cfg.reppath, cfg.sourcepath, root)
|
apache-2.0
|
Python
|
6ebf6e6f2e8c4e2be5e4778089a8d4a66432c88b
|
update ProgressHook
|
yuyu2172/chainercv,chainer/chainercv,pfnet/chainercv,chainer/chainercv,yuyu2172/chainercv
|
chainercv/utils/iterator/progress_hook.py
|
chainercv/utils/iterator/progress_hook.py
|
from __future__ import division
import sys
import time
class ProgressHook(object):
"""A hook class reporting the progress of iteration.
This is a hook class designed for
:func:`~chainercv.utils.apply_prediction_to_iterator`.
Args:
n_total (int): The number of images. This argument is optional.
"""
def __init__(self, n_total=None):
self.n_total = n_total
self.start = time.time()
self.n_processed = 0
def __call__(self, in_values, out_values, rest_values):
self.n_processed += len(in_values[0])
fps = self.n_processed / (time.time() - self.start)
if self.n_total is not None:
sys.stdout.write(
'\r{:d} of {:d} samples, {:.2f} samples/sec'.format(
self.n_processed, self.n_total, fps))
else:
sys.stdout.write(
'\r{:d} samples, {:.2f} samples/sec'.format(
self.n_processed, fps))
sys.stdout.flush()
|
from __future__ import division
import sys
import time
class ProgressHook(object):
"""A hook class reporting the progress of iteration.
This is a hook class designed for
:func:`~chainercv.utils.apply_prediction_to_iterator`.
Args:
n_total (int): The number of images. This argument is optional.
"""
def __init__(self, n_total=None):
self.n_total = n_total
self.start = time.time()
self.n_processed = 0
def __call__(self, imgs, pred_values, gt_values):
self.n_processed += len(imgs)
fps = self.n_processed / (time.time() - self.start)
if self.n_total is not None:
sys.stdout.write(
'\r{:d} of {:d} images, {:.2f} FPS'.format(
self.n_processed, self.n_total, fps))
else:
sys.stdout.write(
'\r{:d} images, {:.2f} FPS'.format(
self.n_processed, fps))
sys.stdout.flush()
|
mit
|
Python
|
22f293ff16dd977c6a37b64566b37405d81cb767
|
Make the KeyIdentifier.key_id field a property.
|
atlassian/asap-authentication-python
|
atlassian_jwt_auth/key.py
|
atlassian_jwt_auth/key.py
|
import os
import re
import requests
class KeyIdentifier(object):
""" This class represents a key identifier """
def __init__(self, identifier):
self.__key_id = validate_key_identifier(identifier)
@property
def key_id(self):
return self.__key_id
def validate_key_identifier(identifier):
""" returns a validated key identifier. """
regex = re.compile('^[\w.\-\+/]*$')
_error_msg = 'Invalid key identifier %s' % identifier
if not identifier:
raise ValueError(_error_msg)
if not regex.match(identifier):
raise ValueError(_error_msg)
normalised = os.path.normpath(identifier)
if normalised != identifier:
raise ValueError(_error_msg)
if normalised.startswith('/'):
raise ValueError(_error_msg)
if '..' in normalised:
raise ValueError(_error_msg)
return identifier
class HTTPSPublicKeyRetriever(object):
""" This class retrieves public key from a https location based upon the
given key id.
"""
def __init__(self, base_url):
if not base_url.startswith('https://'):
raise ValueError('The base url must start with https://')
if not base_url.endswith('/'):
base_url += '/'
self.base_url = base_url
def retrieve(self, key_identifier):
""" returns the public key for given key_identifier. """
if not isinstance(key_identifier, KeyIdentifier):
key_identifier = KeyIdentifier(key_identifier)
PEM_FILE_TYPE = 'application/x-pem-file'
url = self.base_url + key_identifier.key_id
resp = requests.get(url,
headers={'accept': PEM_FILE_TYPE})
resp.raise_for_status()
if resp.headers['content-type'] != PEM_FILE_TYPE:
raise ValueError("Invalid content-type, '%s', for url '%s' ." %
(resp.headers['content-type'], url))
return resp.text
|
import os
import re
import requests
class KeyIdentifier(object):
""" This class represents a key identifier """
def __init__(self, identifier):
self.key_id = validate_key_identifier(identifier)
def validate_key_identifier(identifier):
""" returns a validated key identifier. """
regex = re.compile('^[\w.\-\+/]*$')
_error_msg = 'Invalid key identifier %s' % identifier
if not identifier:
raise ValueError(_error_msg)
if not regex.match(identifier):
raise ValueError(_error_msg)
normalised = os.path.normpath(identifier)
if normalised != identifier:
raise ValueError(_error_msg)
if normalised.startswith('/'):
raise ValueError(_error_msg)
if '..' in normalised:
raise ValueError(_error_msg)
return identifier
class HTTPSPublicKeyRetriever(object):
""" This class retrieves public key from a https location based upon the
given key id.
"""
def __init__(self, base_url):
if not base_url.startswith('https://'):
raise ValueError('The base url must start with https://')
if not base_url.endswith('/'):
base_url += '/'
self.base_url = base_url
def retrieve(self, key_identifier):
""" returns the public key for given key_identifier. """
if not isinstance(key_identifier, KeyIdentifier):
key_identifier = KeyIdentifier(key_identifier)
PEM_FILE_TYPE = 'application/x-pem-file'
url = self.base_url + key_identifier.key_id
resp = requests.get(url,
headers={'accept': PEM_FILE_TYPE})
resp.raise_for_status()
if resp.headers['content-type'] != PEM_FILE_TYPE:
raise ValueError("Invalid content-type, '%s', for url '%s' ." %
(resp.headers['content-type'], url))
return resp.text
|
mit
|
Python
|
fab58f03eaf09b9f286a10f5a91a945f53a92a29
|
Drop native specification
|
nanshe-org/splauncher,DudLab/splauncher,jakirkham/splauncher,DudLab/splauncher,jakirkham/splauncher,nanshe-org/splauncher
|
splauncher/core.py
|
splauncher/core.py
|
from __future__ import print_function
__author__ = "John Kirkham <[email protected]>"
__date__ = "$May 18, 2015 16:52:18 EDT$"
import datetime
import os
import logging
drmaa_logger = logging.getLogger(__name__)
try:
import drmaa
except ImportError:
# python-drmaa is not installed.
drmaa_logger.error(
"Was not able to import drmaa. " +
"If this is meant to be run using the OpenGrid submission " +
"system, then drmaa needs to be installed via pip or " +
"easy_install."
)
raise
except RuntimeError:
# The drmaa library was not specified, but python-drmaa is installed.
drmaa_logger.error(
"Was able to import drmaa. " +
"However, the drmaa library could not be found. Please " +
"either specify the location of libdrmaa.so using the " +
"DRMAA_LIBRARY_PATH environment variable or disable/remove " +
"use_drmaa from the config file."
)
raise
def main(*argv):
job_time = datetime.datetime.utcnow()
job_time_str = job_time.isoformat().replace(":", ".")
job_name = "splaunch_" + argv[1].replace("/", "-") + "_" + job_time_str
s = drmaa.Session()
s.initialize()
session_name = s.contact
job_template = s.createJobTemplate()
job_template.jobName = job_name
job_template.remoteCommand = argv[1]
job_template.args = argv[2:]
job_template.jobEnvironment = os.environ
job_template.inputPath = "localhost:" + os.devnull
job_template.outputPath = "localhost:" + job_name + ".out"
job_template.errorPath = "localhost:" + job_name + ".err"
job_template.workingDirectory = os.getcwd()
process_id = s.runJob(job_template)
s.deleteJobTemplate(job_template)
s.exit()
print(
"From context \"%s\" launched job \"%s\" with process ID \"%s\"." % (
session_name, job_name, process_id
)
)
return(0)
|
from __future__ import print_function
__author__ = "John Kirkham <[email protected]>"
__date__ = "$May 18, 2015 16:52:18 EDT$"
import datetime
import os
import logging
drmaa_logger = logging.getLogger(__name__)
try:
import drmaa
except ImportError:
# python-drmaa is not installed.
drmaa_logger.error(
"Was not able to import drmaa. " +
"If this is meant to be run using the OpenGrid submission " +
"system, then drmaa needs to be installed via pip or " +
"easy_install."
)
raise
except RuntimeError:
# The drmaa library was not specified, but python-drmaa is installed.
drmaa_logger.error(
"Was able to import drmaa. " +
"However, the drmaa library could not be found. Please " +
"either specify the location of libdrmaa.so using the " +
"DRMAA_LIBRARY_PATH environment variable or disable/remove " +
"use_drmaa from the config file."
)
raise
def main(*argv):
job_time = datetime.datetime.utcnow()
job_time_str = job_time.isoformat().replace(":", ".")
job_name = "splaunch_" + argv[1].replace("/", "-") + "_" + job_time_str
s = drmaa.Session()
s.initialize()
session_name = s.contact
job_template = s.createJobTemplate()
job_template.jobName = job_name
job_template.remoteCommand = argv[1]
job_template.args = argv[2:]
job_template.jobEnvironment = os.environ
job_template.inputPath = "localhost:" + os.devnull
job_template.outputPath = "localhost:" + job_name + ".out"
job_template.errorPath = "localhost:" + job_name + ".err"
job_template.workingDirectory = os.getcwd()
job_template.nativeSpecification = "-pe batch " + str(1)
process_id = s.runJob(job_template)
s.deleteJobTemplate(job_template)
s.exit()
print(
"From context \"%s\" launched job \"%s\" with process ID \"%s\"." % (
session_name, job_name, process_id
)
)
return(0)
|
bsd-3-clause
|
Python
|
e61247230b291bcf9f9dcc3050876b9f812c6541
|
change url for methodcheck thanks steve steiner http://www.atxconsulting.com/blog/tjfontaine/2010/02/09/updated-linode-api#comment-195
|
ryanshawty/linode-python,tjfontaine/linode-python
|
methodcheck.py
|
methodcheck.py
|
#!/usr/bin/python
"""
A quick script to verify that api.py is in sync with Linode's
published list of methods.
Copyright (c) 2009 Ryan Tucker <[email protected]>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# URL of API documentation
apidocurl = 'http://www.linode.com/api/autodoc.cfm'
import api
import re
import urllib
tmpfile, httpheaders = urllib.urlretrieve(apidocurl)
tmpfd = open(tmpfile)
local_methods = api.Api.valid_commands()
remote_methods = []
# Read in the list of methods Linode has
rg = re.compile('.*?\\?method=((?:[a-z][a-z\\.\\d\\-]+)\\.(?:[a-z][a-z\\-]+))(?![\\w\\.])')
for i in tmpfd.readlines():
m = rg.search(i)
if m:
remote_methods.append(m.group(1).replace('.','_'))
# Cross-check!
for i in local_methods:
if i not in remote_methods:
print 'REMOTE Missing: ' + i
for i in remote_methods:
if i not in local_methods:
print 'LOCAL Missing: ' + i
|
#!/usr/bin/python
"""
A quick script to verify that api.py is in sync with Linode's
published list of methods.
Copyright (c) 2009 Ryan Tucker <[email protected]>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
# URL of API documentation
apidocurl = 'http://beta.linode.com/api/autodoc.cfm'
import api
import re
import urllib
tmpfile, httpheaders = urllib.urlretrieve(apidocurl)
tmpfd = open(tmpfile)
local_methods = api.Api.valid_commands()
remote_methods = []
# Read in the list of methods Linode has
rg = re.compile('.*?\\?method=((?:[a-z][a-z\\.\\d\\-]+)\\.(?:[a-z][a-z\\-]+))(?![\\w\\.])')
for i in tmpfd.readlines():
m = rg.search(i)
if m:
remote_methods.append(m.group(1).replace('.','_'))
# Cross-check!
for i in local_methods:
if i not in remote_methods:
print 'REMOTE Missing: ' + i
for i in remote_methods:
if i not in local_methods:
print 'LOCAL Missing: ' + i
|
mit
|
Python
|
cc413b49ce9dd63fcbe9396a5ac1c8c68872a6c1
|
Update information in pkginfo, including the version information.
|
PyCQA/astroid
|
astroid/__pkginfo__.py
|
astroid/__pkginfo__.py
|
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information"""
distname = 'astroid'
modname = 'astroid'
numversion = (1, 3, 0)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0', 'six']
license = 'LGPL'
author = 'Logilab'
author_email = '[email protected]'
mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid'
description = "A abstract syntax tree for Python with inference support."
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
]
|
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information"""
distname = 'astroid'
modname = 'astroid'
numversion = (1, 2, 1)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0', 'six']
license = 'LGPL'
author = 'Logilab'
author_email = '[email protected]'
mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid'
description = "rebuild a new abstract syntax tree from Python's ast"
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
]
|
lgpl-2.1
|
Python
|
a891594150a4456e0894f2b5b70f2bd4b650bd77
|
use debug to log dqn_freeze model update to prevent log overflow
|
kengz/openai_lab,kengz/openai_gym,kengz/openai_gym,kengz/openai_lab,kengz/openai_lab,kengz/openai_gym
|
rl/agent/dqn_freeze.py
|
rl/agent/dqn_freeze.py
|
import os
import numpy as np
from rl.agent.double_dqn import DoubleDQN
from rl.agent.dqn import DQN
from keras.models import load_model
from rl.util import logger
class DQNFreeze(DoubleDQN):
'''
Extends DQN agent to freeze target Q network
and periodically update them to the weights of the
exploration model
Avoids oscillations and breaks correlation
between Q-network and target
http://www0.cs.ucl.ac.uk/staff/d.silver/web/Resources_files/deep_rl.pdf
Exploration model periodically saved and loaded into target Q network
'''
def compute_Q_states(self, minibatch):
Q_states = np.clip(self.model.predict(minibatch['states']),
-self.clip_val, self.clip_val)
Q_next_states = np.clip(self.model2.predict(minibatch['next_states']),
-self.clip_val, self.clip_val)
Q_next_states_max = np.amax(Q_next_states, axis=1)
return (Q_states, Q_next_states, Q_next_states_max)
def train_an_epoch(self):
# Should call DQN to train an epoch, not DoubleDQN
return DQN.train_an_epoch(self)
def update_target_model(self):
pid = os.getpid()
name = 'temp_Q_model_freeze_' + str(pid) + '.h5'
self.model.save(name)
self.model2 = load_model(name)
logger.debug("Updated target model weights")
def update(self, sys_vars):
'''
Agent update apart from training the Q function
'''
done = sys_vars['done']
timestep_check = sys_vars['t'] == (self.env_spec['timestep_limit'] - 1)
if done or timestep_check:
self.update_target_model()
super(DQNFreeze, self).update(sys_vars)
|
import os
import numpy as np
from rl.agent.double_dqn import DoubleDQN
from rl.agent.dqn import DQN
from keras.models import load_model
from rl.util import logger
class DQNFreeze(DoubleDQN):
'''
Extends DQN agent to freeze target Q network
and periodically update them to the weights of the
exploration model
Avoids oscillations and breaks correlation
between Q-network and target
http://www0.cs.ucl.ac.uk/staff/d.silver/web/Resources_files/deep_rl.pdf
Exploration model periodically saved and loaded into target Q network
'''
def compute_Q_states(self, minibatch):
Q_states = np.clip(self.model.predict(minibatch['states']),
-self.clip_val, self.clip_val)
Q_next_states = np.clip(self.model2.predict(minibatch['next_states']),
-self.clip_val, self.clip_val)
Q_next_states_max = np.amax(Q_next_states, axis=1)
return (Q_states, Q_next_states, Q_next_states_max)
def train_an_epoch(self):
# Should call DQN to train an epoch, not DoubleDQN
return DQN.train_an_epoch(self)
def update_target_model(self):
pid = os.getpid()
name = 'temp_Q_model_freeze_' + str(pid) + '.h5'
self.model.save(name)
self.model2 = load_model(name)
logger.info("Updated target model weights")
def update(self, sys_vars):
'''
Agent update apart from training the Q function
'''
done = sys_vars['done']
timestep_check = sys_vars['t'] == (self.env_spec['timestep_limit'] - 1)
if done or timestep_check:
self.update_target_model()
super(DQNFreeze, self).update(sys_vars)
|
mit
|
Python
|
38efa9aa11f949fc8bd0b6c4d1a673ca3416dd3c
|
Fix up iterator implementation in LISTALLOBJECTs
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
groundstation/transfer/request_handlers/listallobjects.py
|
groundstation/transfer/request_handlers/listallobjects.py
|
import groundstation.transfer.request
from groundstation import settings
from groundstation import logger
log = logger.getLogger(__name__)
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
def handle_listallobjects(self):
if not self.station.recently_queried(self.origin):
log.info("%s not up to date, issuing LISTALLOBJECTS" % (self.origin))
# Pass in the station for gizmo_factory in the constructor
listobjects = groundstation.transfer.request.Request("LISTALLOBJECTS", station=self.station)
self.stream.enqueue(listobjects)
else:
log.info("object cache for %s still valid" % (self.origin))
log.info("Handling LISTALLOBJECTS")
payload = self.station.objects()
if len(payload) > settings.LISTALLOBJECTS_CHUNK_THRESHOLD:
log.info("Lots of objects to send, registering an iterator")
@self.station.register_iter
def iterator():
for chunk in chunks(payload, settings.LISTALLOBJECTS_CHUNK_THRESHOLD):
log.info("Sending %i object descriptions" % (len(chunk)))
response = self._Response(self.id, "DESCRIBEOBJECTS",
chr(0).join(chunk))
self.stream.enqueue(response)
yield
self.TERMINATE()
else:
log.info("Sending %i object descriptions" % (len(payload)))
response = self._Response(self.id, "DESCRIBEOBJECTS",
chr(0).join(payload))
self.stream.enqueue(response)
self.TERMINATE()
|
import groundstation.transfer.request
from groundstation import settings
from groundstation import logger
log = logger.getLogger(__name__)
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
def handle_listallobjects(self):
if not self.station.recently_queried(self.origin):
log.info("%s not up to date, issuing LISTALLOBJECTS" % (self.origin))
# Pass in the station for gizmo_factory in the constructor
listobjects = groundstation.transfer.request.Request("LISTALLOBJECTS", station=self.station)
self.stream.enqueue(listobjects)
else:
log.info("object cache for %s still valid" % (self.origin))
log.info("Handling LISTALLOBJECTS")
payload = self.station.objects()
if len(payload) > settings.LISTALLOBJECTS_CHUNK_THRESHOLD:
log.info("Lots of objects to send, registering an iterator")
def _():
for chunk in chunks(payload, settings.LISTALLOBJECTS_CHUNK_THRESHOLD):
log.info("Sending %i object descriptions" % (len(payload)))
response = self._Response(self.id, "DESCRIBEOBJECTS",
chr(0).join(payload))
self.stream.enqueue(response)
yield
self.TERMINATE()
self.station.register_iter(_())
else:
log.info("Sending %i object descriptions" % (len(payload)))
response = self._Response(self.id, "DESCRIBEOBJECTS",
chr(0).join(payload))
self.stream.enqueue(response)
self.TERMINATE()
|
mit
|
Python
|
1fc0026aa72f7fcf66c221de402971023361e6c3
|
implement memo logger
|
arskom/spyne,arskom/spyne,arskom/spyne
|
spyne/util/memo.py
|
spyne/util/memo.py
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
"""The module for memoization stuff.
When you have memory leaks in your daemon, the reason could very well be
reckless usage of the tools here.
"""
import logging
logger = logging.getLogger(__name__)
import functools
MEMOIZATION_STATS_LOG_INTERVAL = 60.0
def _do_log():
logger.debug("%d memoizers", len(memoize.registry))
for memo in memoize.registry:
logger.debug("%r: %d entries.", memo.func, len(memo.memo))
def start_memoization_stats_logger():
import threading
_do_log()
t = threading.Timer(MEMOIZATION_STATS_LOG_INTERVAL,
start_memoization_stats_logger)
t.daemon = True
t.start()
class memoize(object):
"""A memoization decorator that keeps caching until reset."""
registry = []
def __init__(self, func):
self.func = func
self.memo = {}
memoize.registry.append(self)
def __call__(self, *args, **kwargs):
key = self.get_key(args, kwargs)
if not key in self.memo:
self.memo[key] = self.func(*args, **kwargs)
return self.memo[key]
def get_key(self, args, kwargs):
return tuple(args), tuple(kwargs.items())
def reset(self):
self.memo = {}
class memoize_id(memoize):
"""A memoization decorator that keeps caching until reset for unhashable
types. It works on id()'s of objects instead."""
def get_key(self, args, kwargs):
return tuple([id(a) for a in args]), \
tuple([(k, id(v)) for k, v in kwargs.items()])
class memoize_id_method(memoize_id):
"""A memoization decorator that keeps caching until reset for unhashable
types on instance methods. It works on id()'s of objects instead."""
def __get__(self, obj, objtype):
"""Support instance methods."""
fn = functools.partial(self.__call__, obj)
fn.reset = self.reset
return fn
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
"""The module for memoization stuff.
When you have memory leaks in your daemon, the reason could very well be
reckless usage of the tools here.
"""
import functools
class memoize(object):
"""A memoization decorator that keeps caching until reset."""
def __init__(self, func):
self.func = func
self.memo = {}
def __call__(self, *args, **kwargs):
key = self.get_key(args, kwargs)
if not key in self.memo:
self.memo[key] = self.func(*args, **kwargs)
return self.memo[key]
def get_key(self, args, kwargs):
return tuple(args), tuple(kwargs.items())
def reset(self):
self.memo = {}
class memoize_id(memoize):
"""A memoization decorator that keeps caching until reset for unhashable
types. It works on id()'s of objects instead."""
def get_key(self, args, kwargs):
return tuple([id(a) for a in args]), \
tuple([(k, id(v)) for k, v in kwargs.items()])
class memoize_id_method(memoize_id):
"""A memoization decorator that keeps caching until reset for unhashable
types on instance methods. It works on id()'s of objects instead."""
def __get__(self, obj, objtype):
"""Support instance methods."""
fn = functools.partial(self.__call__, obj)
fn.reset = self.reset
return fn
|
lgpl-2.1
|
Python
|
ae044f507f3bcf508648b1a73a802b657009cd48
|
fix nxos_reboot command format (#30549)
|
thaim/ansible,thaim/ansible
|
lib/ansible/modules/network/nxos/nxos_reboot.py
|
lib/ansible/modules/network/nxos/nxos_reboot.py
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_reboot
extends_documentation_fragment: nxos
version_added: 2.2
short_description: Reboot a network device.
description:
- Reboot a network device.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- The module will fail due to timeout issues, but the reboot will be
performed anyway.
options:
confirm:
description:
- Safeguard boolean. Set to true if you're sure you want to reboot.
required: false
default: false
'''
EXAMPLES = '''
- nxos_reboot:
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
rebooted:
description: Whether the device was instructed to reboot.
returned: success
type: boolean
sample: true
'''
from ansible.module_utils.nxos import run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def reboot(module):
cmds = [
{'command': 'terminal dont-ask', 'output': 'text'},
{'command': 'reload', 'output': 'text'}
]
run_commands(module, cmds)
def main():
argument_spec = {}
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = dict(changed=False, warnings=warnings)
if not module.check_mode:
reboot(module)
results['changed'] = True
module.exit_json(**results)
if __name__ == '__main__':
main()
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_reboot
extends_documentation_fragment: nxos
version_added: 2.2
short_description: Reboot a network device.
description:
- Reboot a network device.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- The module will fail due to timeout issues, but the reboot will be
performed anyway.
options:
confirm:
description:
- Safeguard boolean. Set to true if you're sure you want to reboot.
required: false
default: false
'''
EXAMPLES = '''
- nxos_reboot:
confirm: true
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
rebooted:
description: Whether the device was instructed to reboot.
returned: success
type: boolean
sample: true
'''
from ansible.module_utils.nxos import run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def reboot(module):
cmds = [
{'command': 'terminal-dont-ask'},
{'command': 'reload', 'output': 'text'}
]
run_commands(module, cmds)
def main():
argument_spec = {}
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = dict(changed=False, warnings=warnings)
if not module.check_mode:
reboot(module)
results['changed'] = True
module.exit_json(**results)
if __name__ == '__main__':
main()
|
mit
|
Python
|
b8ed081ac4cc5953aaf5b1a2091fefa59d375bf1
|
Add logging for extension
|
JIghtuse/uno-image-manipulation-example
|
uno_image.py
|
uno_image.py
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import logging
import uno
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
logging.basicConfig(filename="opencl_uno_example.log",
level=logging.WARNING)
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
def show_warning(self, title, msg):
"""Shows warning message box"""
frame = self.desktop.ActiveFrame
window = frame.ContainerWindow
window.Toolkit.createMessageBox(
window,
uno.Enum('com.sun.star.awt.MessageBoxType', 'WARNINGBOX'),
uno.getConstantByName("com.sun.star.awt.MessageBoxButtons.BUTTONS_OK"),
title,
msg).execute()
def trigger(self, args):
"""This method provides options for ImageExample service"""
if args == 'show_warning':
self.show_warning("Warning", "Warning description here")
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
"""
Example usage of UNO, graphic objects and networking in LO extension
"""
import uno
import unohelper
from com.sun.star.task import XJobExecutor
class ImageExample(unohelper.Base, XJobExecutor):
'''Class that implements the service registered in LibreOffice'''
def __init__(self, context):
self.context = context
self.desktop = self.createUnoService("com.sun.star.frame.Desktop")
self.graphics = self.createUnoService("com.sun.star.graphic.GraphicProvider")
def createUnoService(self, name):
return self.context.ServiceManager.createInstanceWithContext(name, self.context)
def show_warning(self, title, msg):
"""Shows warning message box"""
frame = self.desktop.ActiveFrame
window = frame.ContainerWindow
window.Toolkit.createMessageBox(
window,
uno.Enum('com.sun.star.awt.MessageBoxType', 'WARNINGBOX'),
uno.getConstantByName("com.sun.star.awt.MessageBoxButtons.BUTTONS_OK"),
title,
msg).execute()
def trigger(self, args):
"""This method provides options for ImageExample service"""
if args == 'show_warning':
self.show_warning("Warning", "Warning description here")
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(
ImageExample,
'org.libreoffice.imageexample.ImageExample',
('com.sun.star.task.JobExecutor',))
|
mpl-2.0
|
Python
|
77e09f2f085bc894c1f45e94662e32a981e9b0db
|
Convert Chinese quotation
|
fan-jiang/Dujing
|
PythonScript/Helper/Helper.py
|
PythonScript/Helper/Helper.py
|
# This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
with open(filePath, 'r') as file:
for line in file:
print line
except IOError:
print ("The file (" + filePath + ") does not exist.")
if __name__ == '__main__':
main()
|
mit
|
Python
|
35825bbf06d7eb98c9e06cbd98e610659627c3d4
|
ajuste conta dv
|
thiagosm/pyboleto,thiagosm/pyboleto
|
pyboleto/bank/safra.py
|
pyboleto/bank/safra.py
|
# -*- coding: utf-8 -*-
from ..data import BoletoData, CustomProperty
class BoletoSafra(BoletoData):
"""
Boleto Safra
"""
agencia_cedente = CustomProperty('agencia_cedente', 5)
conta_cedente = CustomProperty('conta_cedente', 8)
conta_cedente_dv = CustomProperty('conta_cedente_dv',1)
nosso_numero = CustomProperty('nosso_numero', 8)
def __init__(self):
BoletoData.__init__(self)
self.codigo_banco = "422"
self.logo_image = "logo_safra.jpg"
self.modalidade_cobranca = '2'
@property
def agencia_conta_cedente(self):
return "%s/%s-%s" % (self.agencia_cedente, self.conta_cedente,self.conta_cedente_dv)
@property
def dv_nosso_numero(self):
_c = '98765432'
_d = '%8s' %(self.nosso_numero.zfill(8))
t = 0
for i in range(len(_c)):
t+= int(_d[i]) * int(_c[i])
r = t % 11
if r == 0:
return 1
elif r == 1:
return 0
else:
return 11-r
def format_nosso_numero(self):
return "%s-%s" % (self.nosso_numero,self.dv_nosso_numero)
@property
def campo_livre(self):
content = "%1s%5s%8s%1s%8s%1s%1s" % ('7',
self.agencia_cedente,
self.conta_cedente,
self.conta_cedente_dv,
self.nosso_numero,
self.dv_nosso_numero,
self.modalidade_cobranca)
return str(content)
|
# -*- coding: utf-8 -*-
from ..data import BoletoData, CustomProperty
class BoletoSafra(BoletoData):
"""
Boleto Safra
"""
agencia_cedente = CustomProperty('agencia_cedente', 5)
conta_cedente = CustomProperty('conta_cedente', 8)
conta_cedente_dv = CustomProperty('conta_cedente_dv',1)
nosso_numero = CustomProperty('nosso_numero', 8)
def __init__(self):
BoletoData.__init__(self)
self.codigo_banco = "422"
self.logo_image = "logo_safra.jpg"
self.modalidade_cobranca = '2'
@property
def agencia_conta_cedente(self):
return "%s/%s" % (self.agencia_cedente, self.conta_cedente)
@property
def dv_nosso_numero(self):
_c = '98765432'
_d = '%8s' %(self.nosso_numero.zfill(8))
t = 0
for i in range(len(_c)):
t+= int(_d[i]) * int(_c[i])
r = t % 11
if r == 0:
return 1
elif r == 1:
return 0
else:
return 11-r
def format_nosso_numero(self):
return "%s-%s" % (self.nosso_numero,self.dv_nosso_numero)
@property
def campo_livre(self):
content = "%1s%5s%8s%1s%8s%1s%1s" % ('7',
self.agencia_cedente,
self.conta_cedente,
self.conta_cedente_dv,
self.nosso_numero,
self.dv_nosso_numero,
self.modalidade_cobranca)
return str(content)
|
bsd-3-clause
|
Python
|
670bc221b7af6398c90dbbde64feb22003c97690
|
Revert "Violate architecture (on purpose)"
|
terceiro/squad,terceiro/squad,terceiro/squad,terceiro/squad
|
squad/api/views.py
|
squad/api/views.py
|
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from django.http import HttpResponse
import logging
from squad.http import read_file_upload
from squad.core.models import Group
from squad.core.models import Project
from squad.core.models import Build
from squad.core.models import Environment
from squad.core.models import TestRun
from squad.core.models import Token
from squad.core.tasks import ReceiveTestRun
from squad.core.tasks import exceptions
logger = logging.getLogger()
def valid_token(token, project):
return project.tokens.filter(key=token).exists() or Token.objects.filter(project=None).exists()
@csrf_exempt
@require_http_methods(["POST"])
def add_test_run(request, group_slug, project_slug, version, environment_slug):
group = get_object_or_404(Group, slug=group_slug)
project = get_object_or_404(group.projects, slug=project_slug)
# authenticate token X project
token = request.META.get('HTTP_AUTH_TOKEN', None)
if token:
if valid_token(token, project):
pass
else:
return HttpResponseForbidden()
else:
return HttpResponse('Authentication needed', status=401)
test_run_data = {
'version': version,
'environment_slug': environment_slug,
}
uploads = {
'tests_file': 'tests',
'metrics_file': 'metrics',
'log_file': 'log',
'metadata': 'metadata',
}
for key, field in uploads.items():
if field in request.FILES:
f = request.FILES[field]
test_run_data[key] = read_file_upload(f).decode('utf-8')
if 'attachment' in request.FILES:
attachments = {}
for f in request.FILES.getlist('attachment'):
attachments[f.name] = read_file_upload(f)
test_run_data['attachments'] = attachments
receive = ReceiveTestRun(project)
try:
receive(**test_run_data)
except exceptions.invalid_input as e:
logger.warning(request.get_full_path() + ": " + str(e))
return HttpResponse(str(e), status=400)
return HttpResponse('', status=201)
|
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseForbidden
from django.http import HttpResponse
import logging
# an architecture violation
from squad.frontend import views
from squad.http import read_file_upload
from squad.core.models import Group
from squad.core.models import Project
from squad.core.models import Build
from squad.core.models import Environment
from squad.core.models import TestRun
from squad.core.models import Token
from squad.core.tasks import ReceiveTestRun
from squad.core.tasks import exceptions
logger = logging.getLogger()
def valid_token(token, project):
return project.tokens.filter(key=token).exists() or Token.objects.filter(project=None).exists()
@csrf_exempt
@require_http_methods(["POST"])
def add_test_run(request, group_slug, project_slug, version, environment_slug):
group = get_object_or_404(Group, slug=group_slug)
project = get_object_or_404(group.projects, slug=project_slug)
# authenticate token X project
token = request.META.get('HTTP_AUTH_TOKEN', None)
if token:
if valid_token(token, project):
pass
else:
return HttpResponseForbidden()
else:
return HttpResponse('Authentication needed', status=401)
test_run_data = {
'version': version,
'environment_slug': environment_slug,
}
uploads = {
'tests_file': 'tests',
'metrics_file': 'metrics',
'log_file': 'log',
'metadata': 'metadata',
}
for key, field in uploads.items():
if field in request.FILES:
f = request.FILES[field]
test_run_data[key] = read_file_upload(f).decode('utf-8')
if 'attachment' in request.FILES:
attachments = {}
for f in request.FILES.getlist('attachment'):
attachments[f.name] = read_file_upload(f)
test_run_data['attachments'] = attachments
receive = ReceiveTestRun(project)
try:
receive(**test_run_data)
except exceptions.invalid_input as e:
logger.warning(request.get_full_path() + ": " + str(e))
return HttpResponse(str(e), status=400)
return HttpResponse('', status=201)
|
agpl-3.0
|
Python
|
d8241adb51dcb81b99013aa23744a7a4a45f7d84
|
fix self importer
|
kronenthaler/mod-pbxproj,dayongxie/mod-pbxproj
|
mod_pbxproj.py
|
mod_pbxproj.py
|
# MIT License
#
# Copyright (c) 2016 Ignacio Calderon aka kronenthaler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This is a backwards-compatibility file. For Unity developers this is the only file it needs to be added to the Unity
# project.
# This file will install the proper python package into the user's python's local space, if it's not present at run-time
# of this script. Afterwards, it will import all necessary modules to the developer to make his/her own script work as
# before.
from distutils.core import run_setup, setup
import site
__author__ = 'kronenthaler'
__version__ = '2.0.0'
__package_name__ = 'mod_pbxproj_installer'
try:
# check if file exists
from pbxproj import XcodeProject
except:
# install it if not present
print 'Installing package...'
setup(name=__package_name__,
license='MIT License',
install_requires=['mod-pbxproj'],
script_args=['install', '--user', '--force', '--record', '.uninstall_files'])
# force the refresh of the packages
reload(site)
# import publicly
from pbxproj import *
|
# MIT License
#
# Copyright (c) 2016 Ignacio Calderon aka kronenthaler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This is a backwards-compatibility file. For Unity developers this is the only file it needs to be added to the Unity
# project.
# This file will install the proper python package into the user's python's local space, if it's not present at run-time
# of this script. Afterwards, it will import all necessary modules to the developer to make his/her own script work as
# before.
from distutils.core import run_setup, setup
import site
__author__ = 'kronenthaler'
__version__ = '1.3.1'
__package_name__ = 'mod_pbxproj_installer'
try:
# check if file exists
from mod_pbxproj import XcodeProject
except:
# install it if not present
print 'Installing package...'
setup(name=__package_name__,
license='MIT License',
install_requires=['mod-pbxproj'],
script_args=['install', '--user', '--force', '--record', '.uninstall_files'])
# force the refresh of the packages
reload(site)
# import publicly
from mod_pbxproj import *
|
mit
|
Python
|
b221251b13882789c2ed95e4cd24b2327e068711
|
Bump @graknlabs_client_java and @graknlabs_benchmark
|
lolski/grakn,lolski/grakn,graknlabs/grakn,graknlabs/grakn,graknlabs/grakn,lolski/grakn,lolski/grakn,graknlabs/grakn
|
dependencies/graknlabs/dependencies.bzl
|
dependencies/graknlabs/dependencies.bzl
|
#
# GRAKN.AI - THE KNOWLEDGE GRAPH
# Copyright (C) 2018 Grakn Labs Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "b5b9f44fc074aa91cda43b4ca8468634bfb44482", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_graql():
git_repository(
name = "graknlabs_graql",
remote = "https://github.com/graknlabs/graql",
commit = "e28748cffcc74bdb8fdd754f90c8cf71e2e79d2b", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_graql
)
def graknlabs_protocol():
git_repository(
name = "graknlabs_protocol",
remote = "https://github.com/graknlabs/protocol",
commit = "e9cb7fa417822247e26cd571e838688f11b4ae35", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_protocol
)
def graknlabs_client_java():
git_repository(
name = "graknlabs_client_java",
remote = "https://github.com/graknlabs/client-java",
commit = "97197af2424e9c1f056a46633ee1c566e3f85111",
)
def graknlabs_benchmark():
git_repository(
name = "graknlabs_benchmark",
remote = "https://github.com/graknlabs/benchmark.git",
commit = "a2d31c7a0e7a478a909792e12aa1198d8a56c16c" # keep in sync with protocol changes
)
|
#
# GRAKN.AI - THE KNOWLEDGE GRAPH
# Copyright (C) 2018 Grakn Labs Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "b5b9f44fc074aa91cda43b4ca8468634bfb44482", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_graql():
git_repository(
name = "graknlabs_graql",
remote = "https://github.com/graknlabs/graql",
commit = "e28748cffcc74bdb8fdd754f90c8cf71e2e79d2b", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_graql
)
def graknlabs_protocol():
git_repository(
name = "graknlabs_protocol",
remote = "https://github.com/graknlabs/protocol",
commit = "e9cb7fa417822247e26cd571e838688f11b4ae35", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_protocol
)
def graknlabs_client_java():
git_repository(
name = "graknlabs_client_java",
remote = "https://github.com/graknlabs/client-java",
commit = "a19c068650e83a1b6aa388a63847e92164bc7795",
)
def graknlabs_benchmark():
git_repository(
name = "graknlabs_benchmark",
remote = "https://github.com/graknlabs/benchmark.git",
commit = "97ecea678665e155587a58ebcacb8d3226a76fc8" # keep in sync with protocol changes
)
|
agpl-3.0
|
Python
|
7b1d8bd1b2a8b1cb78ec9ab13b61acde977e5642
|
remove ability to create/delete volumes on v2
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
api/v2/views/volume.py
|
api/v2/views/volume.py
|
import django_filters
from rest_framework import viewsets
from core.models import Volume
from api.v2.serializers.details import VolumeSerializer
from core.query import only_current_source
class VolumeFilter(django_filters.FilterSet):
min_size = django_filters.NumberFilter(name="size", lookup_type='gte')
max_size = django_filters.NumberFilter(name="size", lookup_type='lte')
class Meta:
model = Volume
fields = ['min_size', 'max_size', 'projects']
class VolumeViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows providers to be viewed or edited.
"""
queryset = Volume.objects.all()
serializer_class = VolumeSerializer
filter_class = VolumeFilter
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
def get_queryset(self):
"""
Filter projects by current user
"""
user = self.request.user
return Volume.objects.filter(only_current_source(), instance_source__created_by=user)
|
import django_filters
from rest_framework import viewsets
from core.models import Volume
from api.v2.serializers.details import VolumeSerializer
from core.query import only_current_source
class VolumeFilter(django_filters.FilterSet):
min_size = django_filters.NumberFilter(name="size", lookup_type='gte')
max_size = django_filters.NumberFilter(name="size", lookup_type='lte')
class Meta:
model = Volume
fields = ['min_size', 'max_size', 'projects']
class VolumeViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows providers to be viewed or edited.
"""
queryset = Volume.objects.all()
serializer_class = VolumeSerializer
filter_class = VolumeFilter
def get_queryset(self):
"""
Filter projects by current user
"""
user = self.request.user
return Volume.objects.filter(only_current_source(), instance_source__created_by=user)
|
apache-2.0
|
Python
|
daee45e358f61d2e9cfef109efd9f474f7e91a4d
|
Add viz import to top level __init__
|
pycroscopy/pycroscopy
|
pycroscopy/__init__.py
|
pycroscopy/__init__.py
|
"""
The Pycroscopy package.
Submodules
----------
.. autosummary::
:toctree: _autosummary
core
"""
from . import core
from .core import *
from .io import translators
from . import analysis
from . import processing
from . import viz
from .__version__ import version as __version__
from .__version__ import time as __time__
__all__ = core.__all__
|
"""
The Pycroscopy package.
Submodules
----------
.. autosummary::
:toctree: _autosummary
core
"""
from . import core
from .core import *
from .io import translators
from . import analysis
from . import processing
from .__version__ import version as __version__
from .__version__ import time as __time__
__all__ = core.__all__
|
mit
|
Python
|
efa4aede4b9faa9f0fc8639e4495ca8e98127d15
|
Bump @graknlabs_verification
|
lolski/grakn,lolski/grakn,graknlabs/grakn,graknlabs/grakn,graknlabs/grakn,lolski/grakn,graknlabs/grakn,lolski/grakn
|
dependencies/graknlabs/dependencies.bzl
|
dependencies/graknlabs/dependencies.bzl
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_common():
git_repository(
name = "graknlabs_common",
remote = "https://github.com/graknlabs/common",
tag = "0.2.2" # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_common
)
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "04f9678403cdbde889b8e25cc74d16bf1751fd81", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_verification():
git_repository(
name = "graknlabs_verification",
remote = "https://github.com/graknlabs/verification",
commit = "0f831029010385cf584cfd6787f574e9f210dfe2"
)
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_common():
git_repository(
name = "graknlabs_common",
remote = "https://github.com/graknlabs/common",
tag = "0.2.2" # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_common
)
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "04f9678403cdbde889b8e25cc74d16bf1751fd81", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_verification():
git_repository(
name = "graknlabs_verification",
remote = "https://github.com/graknlabs/verification",
commit = "7221428a2315b2ec26438b7c99363fd36052e380"
)
|
agpl-3.0
|
Python
|
dd83da792fbe1c90da855fe7d298f446a839f8ca
|
change for localhost in door.py
|
cyplp/botanik,cyplp/botanik,cyplp/botanik,cyplp/botanik
|
paulla.ircbot/src/paulla/ircbot/plugins/door.py
|
paulla.ircbot/src/paulla/ircbot/plugins/door.py
|
import irc3
from irc3.plugins.cron import cron
import requests
from datetime import datetime
@irc3.plugin
class Door:
"""
Door state plugin
"""
def __init__(self, bot):
self.bot = bot
self.log = self.bot.log
@irc3.event(irc3.rfc.MY_PRIVMSG)
def question(self, mask, event, target, nick, data):
#TODO
pass
@irc3.event(irc3.rfc.PRIVMSG)
def question(self, mask, event, target, data):
#TODO
pass
@cron('*/1 * * * *')
def anoncement(self):
r = requests.get('http://localhost:2222').json()
last_change = datetime.strptime(r['lastchange'], "%d/%m/%Y %H:%M:%S")
if (datetime.now() - last_change).seconds < 60:
if "0" in r['state']:
self.bot.privmsg('#test-mika','Le lab est ouvert')
elif "1" in r['state']:
self.bot.privmsg('#test-mika','Le lab viens de fermer')
|
import irc3
from irc3.plugins.cron import cron
import requests
from datetime import datetime
@irc3.plugin
class Door:
"""
Door state plugin
"""
def __init__(self, bot):
self.bot = bot
self.log = self.bot.log
@irc3.event(irc3.rfc.MY_PRIVMSG)
def question(self, mask, event, target, nick, data):
#TODO
pass
@irc3.event(irc3.rfc.PRIVMSG)
def question(self, mask, event, target, data):
#TODO
pass
@cron('*/1 * * * *')
def anoncement(self):
r = requests.get('http://sd-36895.dedibox.fr:2222').json()
last_change = datetime.strptime(r['lastchange'], "%d/%m/%Y %H:%M:%S")
if (datetime.now() - last_change).seconds < 60:
if "0" in r['state']:
self.bot.privmsg('#test-mika','Le lab est ouvert')
elif "1" in r['state']:
self.bot.privmsg('#test-mika','Le lab viens de fermer')
|
bsd-3-clause
|
Python
|
1916f45ed5d6a77a585153a4daacc8a6ab48b3a3
|
fix conftest.py
|
dit/dit,Autoplectic/dit,dit/dit,Autoplectic/dit,dit/dit,dit/dit,Autoplectic/dit,dit/dit,Autoplectic/dit,Autoplectic/dit
|
dit/conftest.py
|
dit/conftest.py
|
"""
Configuration for tests.
"""
from hypothesis import settings
settings.register_profile("dit", deadline=None)
settings.load_profile("dit")
|
"""
Configuration for tests.
"""
from hypothesis import settings
settings.default.deadline = None
|
bsd-3-clause
|
Python
|
7d43e6fd794fa1ef942a39937a653d5b18e867de
|
reorganize automatic dashboard
|
probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml
|
.github/scripts/create_dashboard.py
|
.github/scripts/create_dashboard.py
|
import os
from glob import glob
statuses = glob("workflow_testing_indicator/notebooks/*/*/*.png")
user = "probml"
base_url = f"https://github.com/{user}/pyprobml/tree/"
get_url = lambda x: f'<img width="20" alt="image" src=https://raw.githubusercontent.com/{user}/pyprobml/{x}>'
get_nb_url = lambda x: os.path.join(base_url, "master", x.split("/", 1)[-1].replace(".png", ".ipynb"))
# sort statuses
def sort_key(x):
parts = x.split("/")
return (parts[-3], parts[-2])
statuses = sorted(statuses, key=sort_key)
# write an md file
log_counter = 0
file_counter = 0
with open("workflow_testing_indicator/README.md", "w") as f:
f.write(f"# PyProbML status\n")
f.write(f"\n")
f.write(f"## Status\n")
f.write(f"\n")
f.write(f"| Job | Status | Log |\n")
f.write(f"| --- | --- | --- |\n")
passing = []
failing = []
for status in statuses:
job = status.split("/", 2)[-1].split(".")[0]
url = get_url(status)
url_to_nb = get_nb_url(status)
if os.path.exists(status.replace(".png", ".log")):
log = os.path.join(base_url, status.replace(".png", ".log"))
failing.append(f"| [{job}]({url_to_nb}) | {url} | [log]({log}) |\n")
log_counter += 1
else:
log = "-"
passing.append(f"| [{job}]({url_to_nb}) | {url} | [log]({log}) |\n")
file_counter += 1
for entry in passing+failing:
f.write(entry)
f.write(f"\n")
f.write(f"## Summary\n")
f.write(f"\n")
final_log = f"In total, {file_counter} jobs were tested.\n{log_counter} jobs failed.\n"
f.write(final_log)
print(final_log)
|
import os
from glob import glob
statuses = glob("workflow_testing_indicator/notebooks/*/*/*.png")
user = "probml"
base_url = f"https://github.com/{user}/pyprobml/tree/"
get_url = lambda x: f'<img width="20" alt="image" src=https://raw.githubusercontent.com/{user}/pyprobml/{x}>'
get_nb_url = lambda x: os.path.join(base_url, "master", x.split("/", 1)[-1].replace(".png", ".ipynb"))
# sort statuses
def sort_key(x):
parts = x.split("/")
return (parts[-3], parts[-2])
statuses = sorted(statuses, key=sort_key)
# write an md file
log_counter = 0
file_counter = 0
with open("workflow_testing_indicator/README.md", "w") as f:
f.write(f"# PyProbML status\n")
f.write(f"\n")
f.write(f"## Status\n")
f.write(f"\n")
f.write(f"| Job | Status | Log |\n")
f.write(f"| --- | --- | --- |\n")
for status in statuses:
job = status.split("/", 2)[-1].split(".")[0]
url = get_url(status)
url_to_nb = get_nb_url(status)
if os.path.exists(status.replace(".png", ".log")):
log = os.path.join(base_url, status.replace(".png", ".log"))
log_counter += 1
else:
log = "-"
f.write(f"| [{job}]({url_to_nb}) | {url} | [log]({log}) |\n")
file_counter += 1
f.write(f"\n")
f.write(f"## Summary\n")
f.write(f"\n")
final_log = f"In total, {file_counter} jobs were tested.\n{log_counter} jobs failed.\n"
f.write(final_log)
print(final_log)
|
mit
|
Python
|
2bac8c8df7a6f99fdc8a4efbdf2a094d3c6a7bae
|
fix link type data
|
OCA/e-commerce,OCA/e-commerce,OCA/e-commerce
|
product_template_multi_link/__manifest__.py
|
product_template_multi_link/__manifest__.py
|
# Copyright 2017-Today GRAP (http://www.grap.coop).
# @author Sylvain LE GAL <https://twitter.com/legalsylvain>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Product Multi Links (Template)",
"version": "13.0.1.1.0",
"category": "Generic Modules",
"author": "GRAP, ACSONE SA/NV, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/e-commerce",
"license": "AGPL-3",
"depends": ["sale"],
"data": [
"data/product_template_link_type.xml",
"security/product_template_link_type.xml",
"views/product_template_link_type.xml",
"security/ir.model.access.csv",
"views/action.xml",
"views/product_template_view.xml",
"views/product_template_link_view.xml",
"views/menu.xml",
],
"demo": ["demo/product_template_link_type.xml", "demo/product_template_link.xml"],
"installable": True,
}
|
# Copyright 2017-Today GRAP (http://www.grap.coop).
# @author Sylvain LE GAL <https://twitter.com/legalsylvain>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Product Multi Links (Template)",
"version": "13.0.1.1.0",
"category": "Generic Modules",
"author": "GRAP, ACSONE SA/NV, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/e-commerce",
"license": "AGPL-3",
"depends": ["sale"],
"data": [
"security/product_template_link_type.xml",
"views/product_template_link_type.xml",
"security/ir.model.access.csv",
"views/action.xml",
"views/product_template_view.xml",
"views/product_template_link_view.xml",
"views/menu.xml",
],
"demo": [
"data/product_template_link_type.xml",
"demo/product_template_link_type.xml",
"demo/product_template_link.xml",
],
"installable": True,
}
|
agpl-3.0
|
Python
|
e19a99a555cd39cd380b7ede12da2190eb164eec
|
Make CSV errors into warnings
|
alephdata/ingestors
|
ingestors/tabular/csv.py
|
ingestors/tabular/csv.py
|
import io
import csv
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.encoding import EncodingSupport
from ingestors.support.table import TableSupport
from ingestors.exc import ProcessingException
log = logging.getLogger(__name__)
class CSVIngestor(Ingestor, EncodingSupport, TableSupport):
"""Decode and ingest a CSV file.
This expects a properly formatted CSV file with a header in the first row.
"""
MIME_TYPES = [
'text/csv',
'text/tsv',
'text/tab-separated-values'
]
EXTENSIONS = ['csv', 'tsv']
SCORE = 7
def ingest(self, file_path, entity):
entity.schema = model.get('Table')
with io.open(file_path, 'rb') as fh:
encoding = self.detect_stream_encoding(fh)
log.debug("Detected encoding [%r]: %s", entity, encoding)
fh = io.open(file_path, 'r', encoding=encoding, errors='replace')
try:
sample = fh.read(4096 * 10)
fh.seek(0)
dialect = csv.Sniffer().sniff(sample)
reader = csv.reader(fh, dialect=dialect)
self.emit_row_tuples(entity, reader)
except (Exception, UnicodeDecodeError, csv.Error) as err:
log.warning("CSV error: %s", err)
raise ProcessingException("Invalid CSV: %s" % err) from err
finally:
fh.close()
|
import io
import csv
import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.encoding import EncodingSupport
from ingestors.support.table import TableSupport
from ingestors.exc import ProcessingException
log = logging.getLogger(__name__)
class CSVIngestor(Ingestor, EncodingSupport, TableSupport):
"""Decode and ingest a CSV file.
This expects a properly formatted CSV file with a header in the first row.
"""
MIME_TYPES = [
'text/csv',
'text/tsv',
'text/tab-separated-values'
]
EXTENSIONS = ['csv', 'tsv']
SCORE = 7
def ingest(self, file_path, entity):
entity.schema = model.get('Table')
with io.open(file_path, 'rb') as fh:
encoding = self.detect_stream_encoding(fh)
log.debug("Detected encoding [%r]: %s", entity, encoding)
fh = io.open(file_path, 'r', encoding=encoding, errors='replace')
try:
sample = fh.read(4096 * 10)
fh.seek(0)
dialect = csv.Sniffer().sniff(sample)
reader = csv.reader(fh, dialect=dialect)
self.emit_row_tuples(entity, reader)
except UnicodeDecodeError as ude:
log.warning("Encoding error: %r", entity)
raise ProcessingException("Could not decode CSV (%s)" % encoding) from ude # noqa
except (Exception, csv.Error) as err:
log.exception("CSV error: %s", err)
raise ProcessingException("Invalid CSV: %s" % err) from err
finally:
fh.close()
|
mit
|
Python
|
3c0e18944c7ff712288ccb16e439e07d4db0b3c1
|
Fix init migration dependency
|
kelvan/cmsplugin-date,kelvan/cmsplugin-date
|
cmsplugin_date/migrations/0001_initial.py
|
cmsplugin_date/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0003_auto_20140926_2347'),
]
operations = [
migrations.CreateModel(
name='Date',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('date', models.DateField(verbose_name='Date')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Date',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('date', models.DateField(verbose_name='Date')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
mit
|
Python
|
43b992c09b092391e95b5a1893b6c19855482ff7
|
fix autoconf header
|
authmillenon/RIOT,RIOT-OS/RIOT,kYc0o/RIOT,miri64/RIOT,kYc0o/RIOT,authmillenon/RIOT,jasonatran/RIOT,kaspar030/RIOT,ant9000/RIOT,RIOT-OS/RIOT,authmillenon/RIOT,kaspar030/RIOT,RIOT-OS/RIOT,OTAkeys/RIOT,ant9000/RIOT,jasonatran/RIOT,kYc0o/RIOT,ant9000/RIOT,kYc0o/RIOT,kYc0o/RIOT,jasonatran/RIOT,ant9000/RIOT,OTAkeys/RIOT,OlegHahm/RIOT,miri64/RIOT,OlegHahm/RIOT,authmillenon/RIOT,miri64/RIOT,kaspar030/RIOT,OlegHahm/RIOT,OlegHahm/RIOT,authmillenon/RIOT,jasonatran/RIOT,OlegHahm/RIOT,OTAkeys/RIOT,kaspar030/RIOT,jasonatran/RIOT,miri64/RIOT,authmillenon/RIOT,RIOT-OS/RIOT,RIOT-OS/RIOT,ant9000/RIOT,OTAkeys/RIOT,miri64/RIOT,kaspar030/RIOT,OTAkeys/RIOT
|
dist/tools/kconfiglib/riot_kconfig.py
|
dist/tools/kconfiglib/riot_kconfig.py
|
""" RIOT customization of Kconfig """
import argparse
import sys
from kconfiglib import Kconfig, KconfigError
class RiotKconfig(Kconfig):
""" RIOT adaption of Kconfig class """
def _parse_help(self, node):
""" Parses the help section of a node, removing Doxygen markers """
doxygen_markers = ["@ref ", "@see "]
# call default parsing
super(RiotKconfig, self)._parse_help(node)
# remove Doxygen markers
for marker in doxygen_markers:
node.help = node.help.replace(marker, "")
def write_autoconf(self, filename=None, header="/* RIOT Configuration File */\n"):
""" Override to convert - to _ when writing autoconf.h """
tmp_unique_defined_syms = self.unique_defined_syms.copy()
for sym in self.unique_defined_syms:
if not sym._write_to_conf:
continue
sym.name = sym.name.replace('-', '_')
super(RiotKconfig, self).write_autoconf(filename, header)
self.unique_defined_syms = tmp_unique_defined_syms
def standard_riot_kconfig(description=None):
"""
Argument parsing helper for tools that take a single optional Kconfig file
argument (default: Kconfig). Returns the RiotKconfig instance for the parsed
configuration. Uses argparse internally.
Exits with sys.exit() (which raises SystemExit) on errors.
description (default: None):
The 'description' passed to argparse.ArgumentParser().
argparse.RawDescriptionHelpFormatter is used, so formatting is preserved.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
parser.add_argument(
"kconfig",
metavar="KCONFIG",
default="Kconfig",
nargs="?",
help="Kconfig file (default: Kconfig)")
args = parser.parse_args()
# Suppress backtraces for expected exceptions
try:
return RiotKconfig(args.kconfig)
except (EnvironmentError, KconfigError) as e:
# Some long exception messages have extra newlines for better
# formatting when reported as an unhandled exception. Strip them here.
sys.exit(str(e).strip())
|
""" RIOT customization of Kconfig """
import argparse
import sys
from kconfiglib import Kconfig, KconfigError
class RiotKconfig(Kconfig):
""" RIOT adaption of Kconfig class """
def _parse_help(self, node):
""" Parses the help section of a node, removing Doxygen markers """
doxygen_markers = ["@ref ", "@see "]
# call default parsing
super(RiotKconfig, self)._parse_help(node)
# remove Doxygen markers
for marker in doxygen_markers:
node.help = node.help.replace(marker, "")
def write_autoconf(self, filename=None, header=None):
""" Override to convert - to _ when writing autoconf.h """
tmp_unique_defined_syms = self.unique_defined_syms.copy()
for sym in self.unique_defined_syms:
if not sym._write_to_conf:
continue
sym.name = sym.name.replace('-', '_')
super(RiotKconfig, self).write_autoconf(filename, header)
self.unique_defined_syms = tmp_unique_defined_syms
def standard_riot_kconfig(description=None):
"""
Argument parsing helper for tools that take a single optional Kconfig file
argument (default: Kconfig). Returns the RiotKconfig instance for the parsed
configuration. Uses argparse internally.
Exits with sys.exit() (which raises SystemExit) on errors.
description (default: None):
The 'description' passed to argparse.ArgumentParser().
argparse.RawDescriptionHelpFormatter is used, so formatting is preserved.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
parser.add_argument(
"kconfig",
metavar="KCONFIG",
default="Kconfig",
nargs="?",
help="Kconfig file (default: Kconfig)")
args = parser.parse_args()
# Suppress backtraces for expected exceptions
try:
return RiotKconfig(args.kconfig)
except (EnvironmentError, KconfigError) as e:
# Some long exception messages have extra newlines for better
# formatting when reported as an unhandled exception. Strip them here.
sys.exit(str(e).strip())
|
lgpl-2.1
|
Python
|
6d7c21979a741e60053faf6d4e444ad4bf01dcde
|
Fix unittests
|
thoas/django-backward
|
backward/backends/session.py
|
backward/backends/session.py
|
try:
import cPickle as pickle
except ImportError:
import pickle
from .base import Backend
from backward import settings
class SessionBackend(Backend):
def get_url_redirect(self, request):
return request.session.get(settings.URL_REDIRECT_NAME, None)
def save_url_redirect(self, request, response, url_redirect):
request.session[settings.URL_REDIRECT_NAME] = url_redirect
def get_next_action(self, request):
if settings.NEXT_ACTION_NAME in request.session:
return pickle.loads(request.session[settings.NEXT_ACTION_NAME])
return {}
def save_next_action(self, request, response, data):
request.session[settings.NEXT_ACTION_NAME] = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
def delete_next_action(self, request, response):
try:
del request.session[settings.NEXT_ACTION_NAME]
except KeyError:
return False
return True
|
try:
import cPickle as pickle
except ImportError:
import pickle
from .base import Backend
from backward import settings
class SessionBackend(Backend):
def get_url_redirect(self, request):
return request.session.get(settings.URL_REDIRECT_NAME, None)
def save_url_redirect(self, request, response, url_redirect):
request.session[settings.URL_REDIRECT_NAME] = url_redirect
def get_next_action(self, request):
if settings.NEXT_ACTION_NAME in request.session:
return pickle.loads(request.session[settings.NEXT_ACTION_NAME])
return {}
def save_next_action(self, request, response, data):
request.session[settings.NEXT_ACTION_NAME] = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
def delete_next_action(self, request):
try:
del request.session[settings.NEXT_ACTION_NAME]
except KeyError:
return False
return True
|
mit
|
Python
|
5c87c2bba8a95db865c11545df6d0405abd8fbfd
|
Update demo for prediction. (#6789)
|
dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost
|
demo/guide-python/predict_first_ntree.py
|
demo/guide-python/predict_first_ntree.py
|
import os
import numpy as np
import xgboost as xgb
from sklearn.datasets import load_svmlight_file
CURRENT_DIR = os.path.dirname(__file__)
train = os.path.join(CURRENT_DIR, "../data/agaricus.txt.train")
test = os.path.join(CURRENT_DIR, "../data/agaricus.txt.test")
def native_interface():
# load data in do training
dtrain = xgb.DMatrix(train)
dtest = xgb.DMatrix(test)
param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"}
watchlist = [(dtest, "eval"), (dtrain, "train")]
num_round = 3
bst = xgb.train(param, dtrain, num_round, watchlist)
print("start testing prediction from first n trees")
# predict using first 1 tree
label = dtest.get_label()
ypred1 = bst.predict(dtest, iteration_range=(0, 1))
# by default, we predict using all the trees
ypred2 = bst.predict(dtest)
print("error of ypred1=%f" % (np.sum((ypred1 > 0.5) != label) / float(len(label))))
print("error of ypred2=%f" % (np.sum((ypred2 > 0.5) != label) / float(len(label))))
def sklearn_interface():
X_train, y_train = load_svmlight_file(train)
X_test, y_test = load_svmlight_file(test)
clf = xgb.XGBClassifier(n_estimators=3, max_depth=2, eta=1, use_label_encoder=False)
clf.fit(X_train, y_train, eval_set=[(X_test, y_test)])
assert clf.n_classes_ == 2
print("start testing prediction from first n trees")
# predict using first 1 tree
ypred1 = clf.predict(X_test, iteration_range=(0, 1))
# by default, we predict using all the trees
ypred2 = clf.predict(X_test)
print(
"error of ypred1=%f" % (np.sum((ypred1 > 0.5) != y_test) / float(len(y_test)))
)
print(
"error of ypred2=%f" % (np.sum((ypred2 > 0.5) != y_test) / float(len(y_test)))
)
if __name__ == "__main__":
native_interface()
sklearn_interface()
|
import os
import numpy as np
import xgboost as xgb
# load data in do training
CURRENT_DIR = os.path.dirname(__file__)
dtrain = xgb.DMatrix(os.path.join(CURRENT_DIR, '../data/agaricus.txt.train'))
dtest = xgb.DMatrix(os.path.join(CURRENT_DIR, '../data/agaricus.txt.test'))
param = {'max_depth': 2, 'eta': 1, 'objective': 'binary:logistic'}
watchlist = [(dtest, 'eval'), (dtrain, 'train')]
num_round = 3
bst = xgb.train(param, dtrain, num_round, watchlist)
print('start testing prediction from first n trees')
# predict using first 1 tree
label = dtest.get_label()
ypred1 = bst.predict(dtest, ntree_limit=1)
# by default, we predict using all the trees
ypred2 = bst.predict(dtest)
print('error of ypred1=%f' % (np.sum((ypred1 > 0.5) != label) / float(len(label))))
print('error of ypred2=%f' % (np.sum((ypred2 > 0.5) != label) / float(len(label))))
|
apache-2.0
|
Python
|
fdbaaa6c1f20a48d0891106455c91d600c8236f7
|
Change client.skia.fyi ports
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
masters/master.client.skia.fyi/master_site_config.py
|
masters/master.client.skia.fyi/master_site_config.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class SkiaFYI(Master.Master3):
project_name = 'SkiaFYI'
master_port = 8098
slave_port = 8198
master_port_alt = 8298
repo_url = 'https://skia.googlesource.com/skia.git'
buildbot_url = 'http://build.chromium.org/p/client.skia.fyi/'
code_review_site = 'https://codereview.chromium.org'
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class SkiaFYI(Master.Master3):
project_name = 'SkiaFYI'
master_port = 8094
slave_port = 8194
master_port_alt = 8294
repo_url = 'https://skia.googlesource.com/skia.git'
buildbot_url = 'http://build.chromium.org/p/client.skia.fyi/'
code_review_site = 'https://codereview.chromium.org'
|
bsd-3-clause
|
Python
|
9a688a0311cbf802bc541e267afac968bcf7ae2c
|
break down sample with a little bit extra info and show each step along the way
|
gilwo/ocv-dev-tests
|
ocv_image_artihmetic.py
|
ocv_image_artihmetic.py
|
import numpy as np
import cv2 as cv
def ims(img, key, windows_name='dummy'):
cv.destroyAllWindows()
cv.imshow(windows_name, img)
k = 0
while k != ord(key):
k = cv.waitKey(10)
cv.destroyAllWindows()
# blue = np.zeros((300,512,3), np.uint8)
# img = cv.imread('messi5.jpg')
# main image
img = cv.imread('messi5.jpg')
ims(img, 'q', 'img')
# overlay image
ovl = cv.imread('logo.png')
ims(ovl, 'q', 'ovl')
# create ROI (region of image) on top left corner at the size of the overlay
rows, cols,channels = ovl.shape
roi = img[0:rows, 0:cols]
ims(roi, 'q', 'roi of img')
# Now create a mask of logo and create its inverse mask also
# get overlay grayscale
ovlgray = cv.cvtColor(ovl,cv.COLOR_BGR2GRAY)
ims(ovlgray, 'q', 'ovl in grayscale')
# get bi-level image out of the grayscale
ret, ovlmask = cv.threshold(ovlgray, 10, 255, cv.THRESH_BINARY)
ims(ovlmask, 'q', 'mask after thresholding')
# invert and get the actual overlay mask
ovlmask_inv = cv.bitwise_not(ovlmask)
ims(ovlmask_inv, 'q', 'inverted mask')
# Now black-out the area of overlay in ROI
roi_bg = cv.bitwise_and(roi, roi, mask=ovlmask_inv)
ims(roi_bg, 'q', 'roi bitwise with inverted mask')
# Take only region of logo from logo image.
ovl_fg = cv.bitwise_and(ovl, ovl, mask=ovlmask)
ims(ovl_fg, 'q', 'overlay bitwise with mask')
# Put overlay in ROI and modify the main image
dst_roi = cv.add(roi_bg, ovl_fg)
ims(dst_roi, 'q', 'roi bg with ovl fg')
# and put it back on the img
img[0:rows, 0:cols] = dst_roi
ims(img, 'q', 'final result')
|
import numpy as np
import cv2 as cv
blue = np.zeros((300,512,3), np.uint8)
img = cv.imread('messi5.jpg')
cv.imshow('b', blue)
k = 0
while k != ord('q'):
k = cv.waitKey(100)
cv.destroyAllWindows()
|
mit
|
Python
|
4a12f00012b1a49d5a3b6876c563a58ab4583b26
|
Add comments and MSVS settings
|
stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib
|
lib/node_modules/@stdlib/math/base/blas/dasum/binding.gyp
|
lib/node_modules/@stdlib/math/base/blas/dasum/binding.gyp
|
{
'targets': [
{
# The target name should match the add-on export name:
'target_name': 'addon',
# Allow developer to choose whether to build a static or shared library:
'type': '<(library)',
# Settings that should be applied when a target's object files are used as linker input:
'link_settings': {
# List libraries (object files, etc) which should be linked during linking:
'libraries': [
# Note: `module_root_dir` is provided by `node-gyp`based on the current working directory.
'<(module_root_dir)/src/c_dasum.o',
'<(module_root_dir)/src/dasum.o',
'<(module_root_dir)/src/dasumsub.o'
]
},
# Define directories which contain relevant include headers:
'include_dirs': [
'<!(node -e \'require("nan")\')',
'include'
],
# List the source files to compile:
'sources': [
'./src/addon.cpp'
],
# C compiler flags:
'cflags': [
# Generate platform-independent code:
'-fPIC'
],
# Apply conditions based on the runtime environment:
'conditions': [
[
'OS=="win"',
{
'msvs_settings': {
'VCCLCompilerTool': {
'WholeProgramOptimization': 'true', # /GL, whole program optimization, needed for LTCG
'OmitFramePointers': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'RuntimeTypeInfo': 'false',
'ExceptionHandling': '1',
},
'VCLibrarianTool': {
'AdditionalOptions': [
'/LTCG', # link time code generation
],
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': 1, # link-time code generation
'OptimizeReferences': 2, # /OPT:REF
'EnableCOMDATFolding': 2, # /OPT:ICF
'LinkIncremental': 1, # disable incremental linking
},
}
}
] # end OS=="win"
] # end conditions
}
] # end targets
}
|
{
"targets": [
{
"target_name": "addon",
"link_settings": {
"libraries": [
"<(module_root_dir)/src/c_dasum.o",
"<(module_root_dir)/src/dasum.o",
"<(module_root_dir)/src/dasumsub.o"
]
},
"include_dirs": [
"<!(node -e \"require('nan')\")",
"include"
],
"sources": [
"./src/addon.cpp"
]
}
]
}
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.