commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
130663a47fe3c497aedd39acd12de70bab230dec
|
make things login free
|
datahuborg/datahub,dnsserver/datahub,RogerTangos/datahub-stub,anantb/datahub,zjsxzy/datahub,datahuborg/datahub,RogerTangos/datahub-stub,dnsserver/datahub,zjsxzy/datahub,anantb/datahub,RogerTangos/datahub-stub,zjsxzy/datahub,dnsserver/datahub,zjsxzy/datahub,dnsserver/datahub,zjsxzy/datahub,anantb/datahub,datahuborg/datahub,dnsserver/datahub,dnsserver/datahub,anantb/datahub,RogerTangos/datahub-stub,zjsxzy/datahub,anantb/datahub,datahuborg/datahub,datahuborg/datahub,RogerTangos/datahub-stub,dnsserver/datahub,anantb/datahub,datahuborg/datahub,RogerTangos/datahub-stub,datahuborg/datahub,anantb/datahub,zjsxzy/datahub,RogerTangos/datahub-stub
|
src/datahub/browser/views.py
|
src/datahub/browser/views.py
|
import json, sys, re, hashlib, smtplib, base64, urllib, os
from auth import *
from django.http import *
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.core.context_processors import csrf
from django.core.validators import email_re
from django.db.utils import IntegrityError
from django.utils.http import urlquote_plus
'''
@author: Anant Bhardwaj
@date: Mar 21, 2013
Datahub Web Handler
'''
@login_required
def home(request):
try:
user = request.session[kUsername]
return HttpResponseRedirect('/%s' %(user))
except KeyError:
return HttpResponseRedirect('/login')
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def user(request, username):
try:
if(username):
res = manager.list_repos(username)
repos = [t[0] for t in res['tuples']]
return render_to_response("user.html", {
'username': username,
'repos': repos})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def repo(request, username, repo):
try:
res = manager.list_tables(username, repo)
tables = [t[0] for t in res['tuples']]
return render_to_response("repo.html", {
'username': username,
'repo': repo,
'tables': tables})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def table(request, username, repo, table):
try:
res = manager.execute_sql(
username=username,
query='SELECT * from %s.%s.%s' %(username, repo, table))
column_names = res['column_names']
tuples = res['tuples']
return render_to_response("table.html", {
'username': username,
'repo': repo,
'table': table,
'column_names': column_names,
'tuples': tuples})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
|
import json, sys, re, hashlib, smtplib, base64, urllib, os
from auth import *
from django.http import *
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.core.context_processors import csrf
from django.core.validators import email_re
from django.db.utils import IntegrityError
from django.utils.http import urlquote_plus
'''
@author: Anant Bhardwaj
@date: Mar 21, 2013
Datahub Web Handler
'''
@login_required
def home(request):
try:
user = request.session[kUsername]
return HttpResponseRedirect(user)
except KeyError:
return HttpResponseRedirect('/login')
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def user(request, username):
try:
if(username):
res = manager.list_repos(username)
repos = [t[0] for t in res['tuples']]
return render_to_response("user.html", {
'username': username,
'repos': repos})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def repo(request, username, repo):
try:
res = manager.list_tables(username, repo)
tables = [t[0] for t in res['tuples']]
return render_to_response("repo.html", {
'username': username,
'repo': repo,
'tables': tables})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
def table(request, username, repo, table):
try:
res = manager.execute_sql(
username=username,
query='SELECT * from %s.%s.%s' %(username, repo, table))
column_names = res['column_names']
tuples = res['tuples']
return render_to_response("table.html", {
'username': username,
'repo': repo,
'table': table,
'column_names': column_names,
'tuples': tuples})
except Exception, e:
return HttpResponse(
{'error': str(e)},
mimetype="application/json")
|
mit
|
Python
|
7219ace435351fc20e4b7fd95d0befea24e545be
|
advance version to push to pypi
|
evernym/ledger
|
ledger/__metadata__.py
|
ledger/__metadata__.py
|
"""
Ledger package metadata
"""
__version_info__ = (0, 0, 30)
__version__ = '{}.{}.{}'.format(*__version_info__)
__author__ = "Evernym, Inc."
__license__ = "Apache 2.0"
__all__ = ['__version_info__', '__version__', '__author__', '__license__']
|
"""
Ledger package metadata
"""
__version_info__ = (0, 0, 29)
__version__ = '{}.{}.{}'.format(*__version_info__)
__author__ = "Evernym, Inc."
__license__ = "Apache 2.0"
__all__ = ['__version_info__', '__version__', '__author__', '__license__']
|
apache-2.0
|
Python
|
c9187cecbdb196343586378ca637d76079ff058f
|
Improve sub-package imports
|
hendrikx-itc/minerva,hendrikx-itc/minerva
|
src/minerva/storage/notification/__init__.py
|
src/minerva/storage/notification/__init__.py
|
# -*- coding: utf-8 -*-
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2011-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
from minerva.storage.notification.notificationstore import NotificationStore, \
NotificationStoreDescriptor
from minerva.storage.notification.attribute import Attribute, \
AttributeDescriptor
from minerva.storage.notification.package import Package
from minerva.storage.notification.record import Record
|
# -*- coding: utf-8 -*-
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2011-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
from engine import NotificationEngine
|
agpl-3.0
|
Python
|
334334c95a543de3e92c96ef807b2cad684f4362
|
Update URL construction from FPLX db_refs
|
bgyori/indra,pvtodorov/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,pvtodorov/indra
|
indra/databases/__init__.py
|
indra/databases/__init__.py
|
import logging
logger = logging.getLogger('databases')
def get_identifiers_url(db_name, db_id):
"""Return an identifiers.org URL for a given database name and ID.
Parameters
----------
db_name : str
An internal database name: HGNC, UP, CHEBI, etc.
db_id : str
An identifier in the given database.
Returns
-------
url : str
An identifiers.org URL corresponding to the given database name and ID.
"""
identifiers_url = 'http://identifiers.org/'
if db_name == 'UP':
url = identifiers_url + 'uniprot/%s' % db_id
elif db_name == 'HGNC':
url = identifiers_url + 'hgnc/HGNC:%s' % db_id
elif db_name == 'IP':
url = identifiers_url + 'interpro/%s' % db_id
elif db_name == 'CHEBI':
url = identifiers_url + 'chebi/%s' % db_id
elif db_name == 'NCIT':
url = identifiers_url + 'ncit/%s' % db_id
elif db_name == 'GO':
url = identifiers_url + 'go/%s' % db_id
elif db_name == 'PUBCHEM':
if db_id.startswith('PUBCHEM:'):
db_id = db_id[8:]
url = identifiers_url + 'pubchem.compound/%s' % db_id
elif db_name == 'PF':
url = identifiers_url + 'pfam/%s' % db_id
elif db_name == 'MIRBASEM':
url = identifiers_url + 'mirbase.mature/%s' % db_id
elif db_name == 'MIRBASE':
url = identifiers_url + 'mirbase/%s' % db_id
elif db_name == 'MESH':
url = identifiers_url + 'mesh/%s' % db_id
elif db_name == 'HMDB':
url = identifiers_url + 'hmdb/%s' % db_id
# Special cases with no identifiers entry
elif db_name == 'FPLX':
url = 'http://identifiers.org/fplx/%s' % db_id
elif db_name == 'NXPFA':
url = 'https://www.nextprot.org/term/FA-%s' % db_id
elif db_name == 'TEXT':
return None
else:
logger.warning('Unhandled name space %s' % db_name)
url = None
return url
|
import logging
logger = logging.getLogger('databases')
def get_identifiers_url(db_name, db_id):
"""Return an identifiers.org URL for a given database name and ID.
Parameters
----------
db_name : str
An internal database name: HGNC, UP, CHEBI, etc.
db_id : str
An identifier in the given database.
Returns
-------
url : str
An identifiers.org URL corresponding to the given database name and ID.
"""
identifiers_url = 'http://identifiers.org/'
if db_name == 'UP':
url = identifiers_url + 'uniprot/%s' % db_id
elif db_name == 'HGNC':
url = identifiers_url + 'hgnc/HGNC:%s' % db_id
elif db_name == 'IP':
url = identifiers_url + 'interpro/%s' % db_id
elif db_name == 'CHEBI':
url = identifiers_url + 'chebi/%s' % db_id
elif db_name == 'NCIT':
url = identifiers_url + 'ncit/%s' % db_id
elif db_name == 'GO':
url = identifiers_url + 'go/%s' % db_id
elif db_name == 'PUBCHEM':
if db_id.startswith('PUBCHEM:'):
db_id = db_id[8:]
url = identifiers_url + 'pubchem.compound/%s' % db_id
elif db_name == 'PF':
url = identifiers_url + 'pfam/%s' % db_id
elif db_name == 'MIRBASEM':
url = identifiers_url + 'mirbase.mature/%s' % db_id
elif db_name == 'MIRBASE':
url = identifiers_url + 'mirbase/%s' % db_id
elif db_name == 'MESH':
url = identifiers_url + 'mesh/%s' % db_id
elif db_name == 'HMDB':
url = identifiers_url + 'hmdb/%s' % db_id
# Special cases with no identifiers entry
elif db_name == 'FPLX':
url = 'http://sorger.med.harvard.edu/indra/entities/%s' % db_id
elif db_name == 'NXPFA':
url = 'https://www.nextprot.org/term/FA-%s' % db_id
elif db_name == 'TEXT':
return None
else:
logger.warning('Unhandled name space %s' % db_name)
url = None
return url
|
bsd-2-clause
|
Python
|
d60dea7b7b1fb073eef2c350177b3920f32de748
|
Add comments indicating source of formulae..
|
cveazey/ProjectEuler,cveazey/ProjectEuler
|
6/e6.py
|
6/e6.py
|
#!/usr/bin/env python
# http://www.proofwiki.org/wiki/Sum_of_Sequence_of_Squares
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
# http://www.regentsprep.org/regents/math/algtrig/ATP2/ArithSeq.htm
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
def sum_seq_squares(n):
return (n * (n+1) * ((2*n)+1)) / 6
def sum_seq(n):
return (n * (n + 1)) / 2
def main():
sum_seq_sq_100 = sum_seq_squares(100)
sum_seq_100 = sum_seq(100)
sq_sum_seq_100 = sum_seq_100**2
diff = sq_sum_seq_100 - sum_seq_sq_100
print('diff is {0}'.format(diff))
if __name__ == '__main__':
main()
|
mit
|
Python
|
36e8549053d28f51cc1e846e86bbdc8b32527cbe
|
Make app.py localhost only
|
pcchou/plant-rank,pcchou/plant-rank,pcchou/plant-rank
|
app.py
|
app.py
|
#!/usr/bin/python3
from json import dumps
from datetime import datetime
import os
from bottle import app as bottleapp
from bottle import route, run, static_file, template
from pymongo import MongoClient
import sprout
os.chdir(os.path.dirname(os.path.abspath(__file__)))
mongo = MongoClient('localhost', 27017)
col = mongo['plant-rank']['users']
def readable(obj):
obj['class_name'] = {0: '陌生人',
1: '算法班',
2: 'C語法',
3: 'Py語法'}[obj['category']]
obj['class'] = {0: '',
1: 'label-primary',
2: 'label-warning',
3: 'label-success'}[obj['category']]
obj['algopoints'] = len(obj['algoprobs'])
obj['points'] = len(obj['problems'])
obj['problems'] = ', '.join(map(str, sorted(obj['problems'])))
obj['updated_at'] = (datetime.fromtimestamp(obj['updated_at'])
.strftime('%Y/%m/%d %H:%M'))
return obj
@route('/assets/<filename:path>')
def assets(filename):
return static_file(filename, root='./assets/')
@route('/')
def index():
board = list(map(readable, col.find({})))
countboard = sorted(board, key=lambda x: (x['points'], x['rate']), reverse=True)
algocountboard = sorted(board, key=lambda x: (x['algopoints'], x['points']),
reverse=True)
algoboard = sorted(board, key=lambda x: (x['rate'] if x['category'] == 1 else 0,
x['points']),
reverse=True)
return template('index.html', locals())
@route('/users/<uid>')
def user(uid):
board = map(readable, col.find({'uid': int(uid)}).limit(1))
return template('user.html', locals())
@route('/users/<uid>', method="POST")
def refresh(uid):
try:
sprout.refresh(int(uid))
except:
return dumps({'status': False})
else:
return dumps({'status': True})
run(app=bottleapp(), port=8787, host="127.0.0.1", debug=False, server='meinheld')
|
#!/usr/bin/python3
from json import dumps
from datetime import datetime
import os
from bottle import app as bottleapp
from bottle import route, run, static_file, template
from pymongo import MongoClient
import sprout
os.chdir(os.path.dirname(os.path.abspath(__file__)))
mongo = MongoClient('localhost', 27017)
col = mongo['plant-rank']['users']
def readable(obj):
obj['class_name'] = {0: '陌生人',
1: '算法班',
2: 'C語法',
3: 'Py語法'}[obj['category']]
obj['class'] = {0: '',
1: 'label-primary',
2: 'label-warning',
3: 'label-success'}[obj['category']]
obj['algopoints'] = len(obj['algoprobs'])
obj['points'] = len(obj['problems'])
obj['problems'] = ', '.join(map(str, sorted(obj['problems'])))
obj['updated_at'] = (datetime.fromtimestamp(obj['updated_at'])
.strftime('%Y/%m/%d %H:%M'))
return obj
@route('/assets/<filename:path>')
def assets(filename):
return static_file(filename, root='./assets/')
@route('/')
def index():
board = list(map(readable, col.find({})))
countboard = sorted(board, key=lambda x: (x['points'], x['rate']), reverse=True)
algocountboard = sorted(board, key=lambda x: (x['algopoints'], x['points']),
reverse=True)
algoboard = sorted(board, key=lambda x: (x['rate'] if x['category'] == 1 else 0,
x['points']),
reverse=True)
return template('index.html', locals())
@route('/users/<uid>')
def user(uid):
board = map(readable, col.find({'uid': int(uid)}).limit(1))
return template('user.html', locals())
@route('/users/<uid>', method="POST")
def refresh(uid):
try:
sprout.refresh(int(uid))
except:
return dumps({'status': False})
else:
return dumps({'status': True})
run(app=bottleapp(), port=8787, host="0.0.0.0", debug=False, server='meinheld')
|
mit
|
Python
|
91ff11cde50ce2485c0a6725651931f88a085ca7
|
Update get_time to handle timeout errors.
|
danriti/short-circuit,danriti/short-circuit
|
app.py
|
app.py
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time')
except requests.exceptions.ConnectionError:
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
mit
|
Python
|
b2a1dcd25ecc9d50a975a41330a1620b52312857
|
add docstring
|
francois-berder/PyLetMeCreate
|
letmecreate/click/motion.py
|
letmecreate/click/motion.py
|
#!/usr/bin/env python3
"""Python binding of Motion Click wrapper of LetMeCreate library."""
import ctypes
_lib = ctypes.CDLL('libletmecreate_click.so')
callback_type = ctypes.CFUNCTYPE(None, ctypes.c_uint8)
callbacks = [None, None]
def enable(mikrobus_index):
"""Enable the motion click.
Configures the EN pin as an output and set it to high.
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
Note: An exception is thrown if it fails to enable the Motion Click.
"""
ret = _lib.motion_click_enable(mikrobus_index)
if ret < 0:
raise Exception("motion click enable failed")
def attach_callback(mikrobus_index, callback):
"""Attach a callback triggered if an event is detected.
Returns the callback ID. The callback must be removed by calling
letmecreate.core.gpio_monitor.remove_callback().
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
callback: function must have one argument which can be safely ignored. This
argument indicates if the GPIO is on a falling or raising edge. In this
case, it triggers an event only if the INT pin is raising so this argument
will always be equal to 1.
Note: An exception is thrown if it fails to attach a callback.
"""
ptr = callback_type(callback)
ret = _lib.motion_click_attach_callback(mikrobus_index, ptr)
if ret < 0:
raise Exception("motion click attach callback failed")
callbacks[mikrobus_index] = ptr;
def disable(mikrobus_index):
"""Disable the Motion Click.
Note: An exception is thrown if it fails to disable the Motion Click.
"""
ret = _lib.motion_click_disable(mikrobus_index)
if ret < 0:
raise Exception("motion click disable failed")
|
#!/usr/bin/env python3
import ctypes
_lib = ctypes.CDLL('libletmecreate_click.so')
callback_type = ctypes.CFUNCTYPE(None, ctypes.c_uint8)
callbacks = [None, None]
def enable(mikrobus_index):
ret = _lib.motion_click_enable(mikrobus_index)
if ret < 0:
raise Exception("motion click enable failed")
def attach_callback(mikrobus_index, callback):
ptr = callback_type(callback)
ret = _lib.motion_click_attach_callback(mikrobus_index, ptr)
if ret < 0:
raise Exception("motion click attach callback failed")
callbacks[mikrobus_index] = ptr;
def disable(mikrobus_index):
ret = _lib.motion_click_disable(mikrobus_index)
if ret < 0:
raise Exception("motion click disable failed")
|
bsd-3-clause
|
Python
|
460b48c10461df264a30ac26630d7299370988cd
|
Support alternative URLs
|
gregvonkuster/cargo-port,gregvonkuster/cargo-port,galaxyproject/cargo-port,galaxyproject/cargo-port,gregvonkuster/cargo-port,erasche/community-package-cache,erasche/community-package-cache,erasche/community-package-cache
|
gsl.py
|
gsl.py
|
#!/usr/bin/python
from urlparse import urlparse
import urllib
import urllib2
import click
import os
import hashlib
PACKAGE_SERVER = 'https://server-to-be-determined/'
@click.command()
@click.option('--package_id', help='Package ID', required=True)
@click.option('--download_location', default='./',
help='Location for the downloaded file')
def get(package_id, download_location):
package_found = False
for line in urllib2.urlopen(PACKAGE_SERVER + 'urls.tsv'):
if line.strip() and not line.startswith('#'):
iid, upstream_url, checksum, alternate_url = line.split('\t')
if iid == package_id.strip():
package_found = True
# I worry about this being unreliable. TODO: add target filename column?
pkg_name = urlparse(upstream_url).path.split('/')[-1]
storage_path = os.path.join(download_location, pkg_name)
if alternate_url.strip():
url = alternate_url
else:
url = PACKAGE_SERVER + checksum
urllib.urlretrieve(url, storage_path)
download_checksum = hashlib.sha256(open(storage_path, 'rb').read()).hexdigest()
if checksum != download_checksum:
print ('Checksum does not match, something seems to be wrong.\n'
'{expected}\t(expected)\n{actual}\t(downloaded)').format(
expected=checksum,
actual=download_checksum)
else:
print 'Download successful for %s.' % (pkg_name)
if not package_found:
print 'Package (%s) could not be found in this server.' % (package_id)
if __name__ == '__main__':
get()
|
#!/usr/bin/python
from urlparse import urlparse
import urllib
import urllib2
import click
import os
import hashlib
PACKAGE_SERVER = 'https://server-to-be-determined/'
@click.command()
@click.option('--package_id', help='Package ID', required=True)
@click.option('--download_location', default='./',
help='Location for the downloaded file')
def get(package_id, download_location):
package_found = False
for line in urllib2.urlopen(PACKAGE_SERVER + 'urls.tsv'):
if line.strip() and not line.startswith('#'):
iid, upstream_url, checksum = line.split('\t')
if iid.strip() == package_id.strip():
package_found = True
# I worry about this being unreliable. TODO: add target filename column?
pkg_name = urlparse(upstream_url).path.split('/')[-1]
storage_path = os.path.join(download_location, pkg_name)
url = PACKAGE_SERVER + checksum
urllib.urlretrieve(url, storage_path)
download_checksum = hashlib.sha256(open(storage_path, 'rb').read()).hexdigest()
if checksum.strip() != download_checksum:
print 'Checksum does not match, something seems to be wrong.\n'
print checksum, '\t(expected)'
print download_checksum, '\t(downloaded)'
else:
print 'Download sucessfull for %s.' % (pkg_name)
if not package_found:
print 'Package (%s) could not be found in this servive.' % (package_id)
if __name__ == '__main__':
get()
|
mit
|
Python
|
9c012f3b5609b557b9d14059f2b2a6412283e0ed
|
support option ax='new'
|
sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper
|
src/pyquickhelper/helpgen/graphviz_helper.py
|
src/pyquickhelper/helpgen/graphviz_helper.py
|
"""
@file
@brief Helper about graphviz.
"""
import os
from ..loghelper import run_cmd
from .conf_path_tools import find_graphviz_dot
def plot_graphviz(dot, ax=None, temp_dot=None, temp_img=None, dpi=300):
"""
Plots a dot graph into a :epkg:`matplotlib` plot.
@param dot dot language
@param ax existing ax
@param temp_dot temporary file, if None,
a file is created and removed
@param temp_img temporary image, if None,
a file is created and removed
@param dpi dpi
@return ax
"""
if temp_dot is None:
temp_dot = "temp_%d.dot" % id(dot)
clean_dot = True
else:
clean_dot = False
if temp_img is None:
temp_img = "temp_%d.png" % id(dot)
clean_img = True
else:
clean_img = False
with open(temp_dot, "w", encoding="utf-8") as f:
f.write(dot)
dot_path = find_graphviz_dot()
cmd = '"%s" -Gdpi=%d -Tpng -o "%s" "%s"' % (
dot_path, dpi, temp_img, temp_dot)
out, err = run_cmd(cmd, wait=True)
if err is not None:
err = err.strip("\r\n\t ")
if len(err) > 0:
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
raise RuntimeError(
"Unable to run command line"
"\n---CMD---\n{}\n---OUT---\n{}"
"\n---ERR---\n{}".format(
cmd, out, err))
if ax is None:
import matplotlib.pyplot as plt
ax = plt.gca()
elif isinstance(ax, str) and ax == 'new':
import matplotlib.pyplot as plt
_, ax = plt.subplots(1, 1)
image = plt.imread(temp_img)
else:
import matplotlib.pyplot as plt
image = plt.imread(temp_img)
ax.imshow(image)
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
return ax
|
"""
@file
@brief Helper about graphviz.
"""
import os
from ..loghelper import run_cmd
from .conf_path_tools import find_graphviz_dot
def plot_graphviz(dot, ax=None, temp_dot=None, temp_img=None, dpi=300):
"""
Plots a dot graph into a :epkg:`matplotlib` plot.
@param dot dot language
@param ax existing ax
@param temp_dot temporary file, if None,
a file is created and removed
@param temp_img temporary image, if None,
a file is created and removed
@param dpi dpi
@return ax
"""
if temp_dot is None:
temp_dot = "temp_%d.dot" % id(dot)
clean_dot = True
else:
clean_dot = False
if temp_img is None:
temp_img = "temp_%d.png" % id(dot)
clean_img = True
else:
clean_img = False
with open(temp_dot, "w", encoding="utf-8") as f:
f.write(dot)
dot_path = find_graphviz_dot()
cmd = '"%s" -Gdpi=%d -Tpng -o "%s" "%s"' % (
dot_path, dpi, temp_img, temp_dot)
out, err = run_cmd(cmd, wait=True)
if err is not None:
err = err.strip("\r\n\t ")
if len(err) > 0:
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
raise RuntimeError(
"Unable to run command line"
"\n---CMD---\n{}\n---OUT---\n{}"
"\n---ERR---\n{}".format(
cmd, out, err))
if ax is None:
import matplotlib.pyplot as plt
ax = plt.gca()
image = plt.imread(temp_img)
else:
import matplotlib.pyplot as plt
image = plt.imread(temp_img)
ax.imshow(image)
if clean_dot:
os.remove(temp_dot)
if clean_img and os.path.exists(temp_img):
os.remove(temp_img)
return ax
|
mit
|
Python
|
cbe379efeb7592e9c918fc4d092098b74a3b8c1a
|
Update Deck.py - Add shuffle method to shuffle the deck and then return the shuffled cards.
|
VictorLoren/card-deck-python
|
Deck.py
|
Deck.py
|
#Deck
class Deck:
'''Definition of a card deck.'''
from random import shuffle as rShuffle
def __init__(self,hasJoker=False):
self.suits = ['H','D','S','C']
self.values = [str(x) for x in range(2,10)] #2-9 cards
self.values.extend(['T','J','Q','K','A']) #Face cards (including the 10s)
#Assemble deck
self.cards = [(v,s) for v in self.values for s in self.suits]
#Add Joker cards (2) as 'WW' if needed
if(hasJoker):
self.cards.extend([('W','W'),('W','W')])
#Draw a card from the deck and return a card
def draw(self,fromTop=True):
#Remove from the front/top of deck
if fromTop:
return self.cards.pop(0)
#Remove from the back/bottom of deck
else:
return self.cards.pop()
#Return how many cards are in deck
def sizeOf(self):
return len(self.cards)
#Shuffle deck and return the newly shuffled deck
def shuffle(self):
#Use random.shuffle() method
rShuffle(self.cards)
return self.cards
|
#Deck
class Deck:
'''Definition of a card deck.'''
def __init__(self,hasJoker=False):
self.suits = ['H','D','S','C']
self.values = [str(x) for x in range(2,10)] #2-9 cards
self.values.extend(['T','J','Q','K','A']) #Face cards (including the 10s)
#Assemble deck
self.cards = [(v,s) for v in self.values for s in self.suits]
#Add Joker cards (2) as 'WW' if needed
if(hasJoker):
self.cards.extend([('W','W'),('W','W')])
#Draw a card from the deck and return a card
def draw(self,fromTop=True):
#Remove from the front/top of deck
if fromTop:
return self.cards.pop(0)
#Remove from the back/bottom of deck
else:
return self.cards.pop()
#Return how many cards are in deck
def sizeOf(self):
return len(self.cards)
|
mit
|
Python
|
3bd37ff8b91787da22f925ab858157bffa5698d7
|
Remove unnecessary import
|
Laserbear/Python-Scripts
|
Fibo.py
|
Fibo.py
|
import sys
def Fibo(num):
if num <= 2:
return 1
else:
return Fibo(num-1)+Fibo(num-2)
print(Fibo(int(sys.argv[1])))
|
import math
import sys
def Fibo(num):
if num <= 2:
return 1
else:
return Fibo(num-1)+Fibo(num-2)
print(Fibo(int(sys.argv[1])))
|
apache-2.0
|
Python
|
6855564716827546a5b68c154b0d95daba969119
|
add more user tests
|
RogerTangos/datahub-stub,anantb/datahub,anantb/datahub,RogerTangos/datahub-stub,datahuborg/datahub,datahuborg/datahub,datahuborg/datahub,datahuborg/datahub,anantb/datahub,datahuborg/datahub,RogerTangos/datahub-stub,anantb/datahub,RogerTangos/datahub-stub,datahuborg/datahub,RogerTangos/datahub-stub,RogerTangos/datahub-stub,datahuborg/datahub,anantb/datahub,RogerTangos/datahub-stub,anantb/datahub,anantb/datahub
|
src/inventory/tests/tests.py
|
src/inventory/tests/tests.py
|
from django.test import TestCase
from inventory.models import *
class UserTests(TestCase):
def test_for_fields(self):
""" saving and loading users"""
initial_user = User(id=10, username="user", password="pass", email="email",
f_name="f_name", l_name="l_name", active=True).save()
loaded_user = User.objects.get(id=10)
self.assertEqual(loaded_user.id, 10)
self.assertEqual(loaded_user.username, "user")
self.assertEqual(loaded_user.password, "pass")
self.assertEqual(loaded_user.email, "email")
self.assertEqual(loaded_user.f_name, "f_name")
self.assertEqual(loaded_user.l_name, "l_name")
self.assertEqual(loaded_user.active, True)
self.assertEqual(unicode(loaded_user), "user")
# class CardTests(TestCase):
# """test saving and loading cards"""
# initial_card = Card(id=1, repo_base="repo_base", repo_name="repo_name",
# card_name="card_name", query="query").save()
# loaded_card=Card.objects.get(id=1)
# self.assertEqual(loaded_card.card_name, "card_name")
|
from django.test import TestCase
from inventory.models import *
class UserTests(TestCase):
def test_for_fields(self):
""" saving and loading users"""
initial_user = User(username="user", password="pass", email="email",
f_name="fname", l_name="lname", active=True).save()
loaded_user = User.objects.get(username="user")
self.assertEqual(loaded_user.username, "user")
self.assertEqual(loaded_user.password, "pass")
self.assertEqual(loaded_user.email, "email")
self.assertEqual(loaded_user.f_name, "fname")
self.assertEqual(loaded_user.l_name, "lname")
self.assertEqual(loaded_user.active, True)
self.assertEqual(unicode(loaded_user), "user")
class CardTests(TestCase):
"""test cards"""
|
mit
|
Python
|
237b9d4577f004401c2385163b060c785692c8b6
|
add when_over and when_over_guessed fields to Event (db change)
|
Shrulik/Open-Knesset,ofri/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset,ofri/Open-Knesset,DanaOshri/Open-Knesset,Shrulik/Open-Knesset,OriHoch/Open-Knesset,jspan/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,otadmor/Open-Knesset,jspan/Open-Knesset,alonisser/Open-Knesset,daonb/Open-Knesset,noamelf/Open-Knesset,noamelf/Open-Knesset,Shrulik/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,DanaOshri/Open-Knesset,otadmor/Open-Knesset,habeanf/Open-Knesset,navotsil/Open-Knesset,daonb/Open-Knesset,jspan/Open-Knesset,alonisser/Open-Knesset,OriHoch/Open-Knesset,daonb/Open-Knesset,MeirKriheli/Open-Knesset,habeanf/Open-Knesset,OriHoch/Open-Knesset,habeanf/Open-Knesset,OriHoch/Open-Knesset,habeanf/Open-Knesset,MeirKriheli/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset,DanaOshri/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,daonb/Open-Knesset,ofri/Open-Knesset,DanaOshri/Open-Knesset
|
src/knesset/events/models.py
|
src/knesset/events/models.py
|
from datetime import datetime
from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from knesset.persons.models import Person
class Event(models.Model):
''' hold the when, who, what, where and which fields of events
and allows the users to contribute resources (through links)
and discuss upcoming events.
'''
when = models.DateTimeField()
when_over = models.DateTimeField(null=True)
# KNESSET_TODO the end time of a committee meeting is not recorded anywhere,
# so we are left to guess
when_over_guessed = models.BooleanField(default=True)
who = models.ManyToManyField(Person)
what = models.TextField()
where = models.TextField()
which_type = models.ForeignKey(ContentType,
verbose_name=_('content type'),
related_name="event_for_%(class)s", null=True)
which_pk = models.TextField(_('object ID'), null=True)
which_object = generic.GenericForeignKey(ct_field="which_type", fk_field="which_pk")
@property
def is_future(self):
return self.when > datetime.now()
@property
def which(self):
return self.which_objects and unicode(self.which_object) or self.what
|
from datetime import datetime
from django.db import models
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from knesset.persons.models import Person
class Event(models.Model):
''' hold the when, who, what, where and which fields of events
and allows the users to contribute resources (through links)
and discuss upcoming events.
'''
when = models.DateTimeField()
who = models.ManyToManyField(Person)
what = models.TextField()
where = models.TextField()
which_type = models.ForeignKey(ContentType,
verbose_name=_('content type'),
related_name="event_for_%(class)s", null=True)
which_pk = models.TextField(_('object ID'), null=True)
which_object = generic.GenericForeignKey(ct_field="which_type", fk_field="which_pk")
@property
def is_future(self):
return self.when > datetime.now()
@property
def which(self):
return self.which_objects and unicode(self.which_object) or self.what
|
bsd-3-clause
|
Python
|
d53dc67fc002448c7b94758843223a17d4623483
|
Allow IP to be blank
|
Ecotrust/madrona_addons,Ecotrust/madrona_addons
|
lingcod/bookmarks/models.py
|
lingcod/bookmarks/models.py
|
from django.contrib.gis.db import models
from lingcod.features import register
from lingcod.features.models import Feature
from django.utils.html import escape
from django.conf import settings
class Bookmark(Feature):
description = models.TextField(default="", null=True, blank=True)
latitude = models.FloatField()
longitude = models.FloatField()
altitude = models.FloatField()
heading = models.FloatField(default=0)
tilt = models.FloatField(default=0)
roll = models.FloatField(default=0)
altitudeMode = models.FloatField(default=1)
ip = models.IPAddressField(default="0.0.0.0", null=True, blank=True)
publicstate = models.TextField(default="{}")
@property
def kml(self):
camera = "<Camera>\n"
camera_params = ["latitude", "longitude", "altitude", "heading", "tilt", "roll", "altitudeMode"]
for p in camera_params:
val = self.__dict__[p]
if val is not None:
camera += " <%s>%s</%s>\n" % (p, val, p)
camera += " </Camera>\n"
return """
<Placemark id="%s">
<visibility>1</visibility>
<name>%s</name>
<description>%s</description>
<styleUrl>#%s-default</styleUrl>
%s
</Placemark>
""" % (self.uid, escape(self.name), escape(self.description), self.model_uid(),
camera)
@property
def kml_style(self):
return """
<Style id="%s-default">
<!-- invisible -->
<IconStyle>
<scale>0.0</scale>
</IconStyle>
<LabelStyle>
<scale>0.0</scale>
</LabelStyle>
</Style>
""" % (self.model_uid())
class Options:
manipulators = []
optional_manipulators = [ ]
verbose_name = 'Bookmark'
form = 'lingcod.bookmarks.forms.BookmarkForm'
icon_url = 'bookmarks/images/bookmark.png'
form_template = 'bookmarks/form.html'
show_template = 'bookmarks/show.html'
if settings.BOOKMARK_FEATURE:
Bookmark = register(Bookmark)
|
from django.contrib.gis.db import models
from lingcod.features import register
from lingcod.features.models import Feature
from django.utils.html import escape
from django.conf import settings
class Bookmark(Feature):
description = models.TextField(default="", null=True, blank=True)
latitude = models.FloatField()
longitude = models.FloatField()
altitude = models.FloatField()
heading = models.FloatField(default=0)
tilt = models.FloatField(default=0)
roll = models.FloatField(default=0)
altitudeMode = models.FloatField(default=1)
ip = models.IPAddressField(default="0.0.0.0")
publicstate = models.TextField(default="{}")
@property
def kml(self):
camera = "<Camera>\n"
camera_params = ["latitude", "longitude", "altitude", "heading", "tilt", "roll", "altitudeMode"]
for p in camera_params:
val = self.__dict__[p]
if val is not None:
camera += " <%s>%s</%s>\n" % (p, val, p)
camera += " </Camera>\n"
return """
<Placemark id="%s">
<visibility>1</visibility>
<name>%s</name>
<description>%s</description>
<styleUrl>#%s-default</styleUrl>
%s
</Placemark>
""" % (self.uid, escape(self.name), escape(self.description), self.model_uid(),
camera)
@property
def kml_style(self):
return """
<Style id="%s-default">
<!-- invisible -->
<IconStyle>
<scale>0.0</scale>
</IconStyle>
<LabelStyle>
<scale>0.0</scale>
</LabelStyle>
</Style>
""" % (self.model_uid())
class Options:
manipulators = []
optional_manipulators = [ ]
verbose_name = 'Bookmark'
form = 'lingcod.bookmarks.forms.BookmarkForm'
icon_url = 'bookmarks/images/bookmark.png'
form_template = 'bookmarks/form.html'
show_template = 'bookmarks/show.html'
if settings.BOOKMARK_FEATURE:
Bookmark = register(Bookmark)
|
bsd-3-clause
|
Python
|
d137005229e180b509f0a2f83f5d2472b40d8890
|
Set up Sentry if we're configured for it (so I don't lose this code again)
|
markpasc/makerbase,markpasc/makerbase
|
run.py
|
run.py
|
import os
from os.path import abspath, dirname, join
from makerbase import app
if 'MAKERBASE_SETTINGS' not in os.environ:
os.environ['MAKERBASE_SETTINGS'] = join(dirname(abspath(__file__)), 'settings.py')
app.config.from_envvar('MAKERBASE_SETTINGS')
if 'SENTRY_DSN' in app.config:
from raven.contrib.flask import Sentry
sentry = Sentry(app, dsn=app.config['SENTRY_DSN'])
if __name__ == '__main__':
app.run(debug=True)
|
import os
from os.path import abspath, dirname, join
from makerbase import app
if 'MAKERBASE_SETTINGS' not in os.environ:
os.environ['MAKERBASE_SETTINGS'] = join(dirname(abspath(__file__)), 'settings.py')
app.config.from_envvar('MAKERBASE_SETTINGS')
if __name__ == '__main__':
app.run(debug=True)
|
mit
|
Python
|
a4656021f6a97bf5ffccb3d6e522515769ba0d21
|
Remove unnecessary calls to disable_continuous_mode
|
illumenati/duwamish-sensor,tipsqueal/duwamish-sensor
|
run.py
|
run.py
|
import argparse
import serial
import threading
from io import BufferedRWPair, TextIOWrapper
from time import sleep
temp_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
parser = argparse.ArgumentParser()
parser.add_argument('oxygen', help='The USB port of the oxygen sensor.')
parser.add_argument('salinity', help='The USB port of the salinity sensor.')
parser.add_argument('server_ip', help='The IP address of the lighthouse node.')
parser.add_argument('port', help='The port of the lighthouse node.')
def init_db():
# TODO: initialize the sqlite database.
pass
def create_connection(usb_port):
print('Creating connection on {}'.format(usb_port))
ser = serial.Serial(usb_port, BAUD_RATE)
# disable_continuous_mode(ser)
return TextIOWrapper(BufferedRWPair(ser, ser), newline='\r', encoding='ascii', line_buffering=True)
def disable_continuous_mode(conn: serial.Serial):
# TODO: research if we need to send this command every time we connect to the sensors, or if it only
# needs to be sent once to disable continuous mode. If only once we should move this code into a
# separate python file.
print('Disabling continuous mode...')
conn.write(bytes('E\r', 'ascii'))
if conn.inWaiting() > 0:
# clear the buffer if there is anything waiting.
print('Clearing buffer...')
conn.read(conn.inWaiting())
def save_data(temperature, salinity, oxygen):
# TODO save data to database (sqlite)
pass
def push_data(temperature, salinity, oxygen, server_ip, server_port):
payload = {'temperature': temperature, 'salinity': salinity, 'oxygen': oxygen}
# TODO push data to lighthouse node.
def initialize_serial_connections(oxy_usb, sal_usb):
temp_conn = create_connection(temp_usb)
sal_conn = create_connection(sal_usb)
oxy_conn = create_connection(oxy_usb)
return temp_conn, sal_conn, oxy_conn
def run_loop(oxy_usb, sal_usb, server_ip, server_port):
temp_conn, sal_conn, oxy_conn = initialize_serial_connections()
# TODO: Catch serial.serialutil.SerialException on read?
while True:
temp.write('R\r')
temp = temp_conn.readline()
sal.write('R\r')
sal = sal_conn.readline()
# TODO: send temp and sal to oxy sensor first, then retrieve oxy value.
# oxy.write(<salinity command here>)
# oxy.write(<temp command here>)
oxy.write('R\r')
oxy = oxy_conn.readline()
print('Temperature: {}, Dissolved Oxygen: {}, Salinity: {}'.format(temp, oxy, sal))
save_data(temp, oxy, sal)
push_data(temp, oxy, sal, server_ip, server_port)
# TODO: Determine how often we should be grabbing data from sensors and pushing to other pi node.
time.sleep(5)
if __name__ == '__main__':
# TODO: Create supervisord script to keep run.py running.
# TODO: Parse command line args for database connection info.
args = parser.parse_args()
run_loop(args.oxygen, args.salinity, args.server_ip, args.port)
|
import argparse
import serial
import threading
from io import BufferedRWPair, TextIOWrapper
from time import sleep
temp_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
parser = argparse.ArgumentParser()
parser.add_argument('oxygen', help='The USB port of the oxygen sensor.')
parser.add_argument('salinity', help='The USB port of the salinity sensor.')
parser.add_argument('server_ip', help='The IP address of the lighthouse node.')
parser.add_argument('port', help='The port of the lighthouse node.')
def init_db():
# TODO: initialize the sqlite database.
pass
def create_connection(usb_port):
print('Creating connection on {}'.format(usb_port))
ser = serial.Serial(usb_port, BAUD_RATE)
# disable_continuous_mode(ser)
return TextIOWrapper(BufferedRWPair(ser, ser), newline='\r', encoding='ascii', line_buffering=True)
def disable_continuous_mode(conn: serial.Serial):
# TODO: research if we need to send this command every time we connect to the sensors, or if it only
# needs to be sent once to disable continuous mode. If only once we should move this code into a
# separate python file.
print('Disabling continuous mode...')
conn.write(bytes('E\r', 'ascii'))
if conn.inWaiting() > 0:
# clear the buffer if there is anything waiting.
print('Clearing buffer...')
conn.read(conn.inWaiting())
def save_data(temperature, salinity, oxygen):
# TODO save data to database (sqlite)
pass
def push_data(temperature, salinity, oxygen, server_ip, server_port):
payload = {'temperature': temperature, 'salinity': salinity, 'oxygen': oxygen}
# TODO push data to lighthouse node.
def initialize_serial_connections(oxy_usb, sal_usb):
temp_conn = create_connection(temp_usb)
sal_conn = create_connection(sal_usb)
oxy_conn = create_connection(oxy_usb)
disable_continuous_mode(temp_conn)
disable_continuous_mode(sal_conn)
disable_continuous_mode(oxy_conn)
return temp_conn, sal_conn, oxy_conn
def run_loop(oxy_usb, sal_usb, server_ip, server_port):
temp_conn, sal_conn, oxy_conn = initialize_serial_connections()
# TODO: Catch serial.serialutil.SerialException on read?
while True:
temp.write('R\r')
temp = temp_conn.readline()
sal.write('R\r')
sal = sal_conn.readline()
# TODO: send temp and sal to oxy sensor first, then retrieve oxy value.
# oxy.write(<salinity command here>)
# oxy.write(<temp command here>)
oxy.write('R\r')
oxy = oxy_conn.readline()
print('Temperature: {}, Dissolved Oxygen: {}, Salinity: {}'.format(temp, oxy, sal))
save_data(temp, oxy, sal)
push_data(temp, oxy, sal, server_ip, server_port)
# TODO: Determine how often we should be grabbing data from sensors and pushing to other pi node.
time.sleep(5)
if __name__ == '__main__':
# TODO: Create supervisord script to keep run.py running.
# TODO: Parse command line args for database connection info.
args = parser.parse_args()
run_loop(args.oxygen, args.salinity, args.server_ip, args.port)
|
mit
|
Python
|
1c8a1bfeef8206267a45562d4932cece1cbea1b4
|
Fix some pylint issues
|
lubomir/libtrie,lubomir/libtrie,lubomir/libtrie
|
Trie.py
|
Trie.py
|
#! /usr/bin/env python
# vim: set encoding=utf-8
from ctypes import cdll, c_char_p, c_void_p, create_string_buffer
libtrie = cdll.LoadLibrary("./libtrie.so")
libtrie.trie_load.argtypes = [c_char_p]
libtrie.trie_load.restype = c_void_p
libtrie.trie_lookup.argtypes = [c_void_p, c_char_p, c_char_p]
libtrie.trie_lookup.restype = c_void_p
libtrie.trie_get_last_error.restype = c_char_p
class Trie(object):
def __init__(self, filename):
self.free_func = libtrie.trie_free
self.ptr = libtrie.trie_load(filename)
if self.ptr == 0:
err = libtrie.trie_get_last_error()
raise IOError(str(err))
def __del__(self):
if self:
self.free_func(self.ptr)
def lookup(self, key):
s = create_string_buffer('\000' * 256)
res = libtrie.trie_lookup(self.ptr, key, s)
if res:
return [s.decode('utf8') for s in s.value.split('\n')]
else:
return []
def test_main():
"""
This function creates a storage backed by a file and tests it by retrieving
a couple of records.
"""
import sys
t = Trie('prijmeni5.trie')
for name in sys.stdin.readlines():
name = name.strip()
for s in t.lookup(name):
print s
if __name__ == '__main__':
test_main()
|
#! /usr/bin/env python
# vim: set encoding=utf-8
from ctypes import *
libtrie = cdll.LoadLibrary("./libtrie.so")
libtrie.trie_load.argtypes = [c_char_p]
libtrie.trie_load.restype = c_void_p
libtrie.trie_lookup.argtypes = [ c_void_p, c_char_p, c_char_p ]
libtrie.trie_lookup.restype = c_void_p
libtrie.trie_get_last_error.restype = c_char_p
class Trie(object):
def __init__(self, filename):
self.free_func = libtrie.trie_free
self.ptr = libtrie.trie_load(filename)
if self.ptr == 0:
err = libtrie.trie_get_last_error()
raise IOError(str(err))
def __del__(self):
if self:
self.free_func(self.ptr)
def lookup(self, key):
s = create_string_buffer('\000' * 256)
res = libtrie.trie_lookup(self.ptr, key, s)
if res:
return [s.decode('utf8') for s in s.value.split('\n')]
else:
return []
def test_main():
"""
This function creates a storage backed by a file and tests it by retrieving
a couple of records.
"""
import sys
t = Trie('prijmeni5.trie')
for name in sys.stdin.readlines():
name = name.strip()
for s in t.lookup(name):
print s
if __name__ == '__main__':
test_main()
|
bsd-3-clause
|
Python
|
e62db9661295ff3912dbaaaff0d9f267f0b7ffe1
|
Add url callback on custom login
|
AndrzejR/mining,mining/mining,mlgruby/mining,mining/mining,mlgruby/mining,mlgruby/mining,jgabriellima/mining,avelino/mining,jgabriellima/mining,seagoat/mining,chrisdamba/mining,seagoat/mining,avelino/mining,AndrzejR/mining,chrisdamba/mining
|
auth.py
|
auth.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bottle.ext import auth
from utils import conf
try:
auth_import = conf('auth')['engine'].split('.')[-1]
auth_from = u".".join(conf('auth')['engine'].split('.')[:-1])
auth_engine = getattr(__import__(auth_from, fromlist=[auth_import]),
auth_import)
except:
print 'Set valid auth engine'
exit(0)
callback = u"{}://{}".format(
conf('openmining')['protocol'],
conf('openmining')['domain'])
if conf('openmining')['domain_port'] not in ['80', '443']:
callback = "{}:{}".format(callback, conf('openmining')['domain_port'])
if auth_import == 'Google':
engine = auth_engine(
conf('auth')['key'], conf('auth')['secret'], callback)
elif auth_import == 'Facebook':
# Not working requered parans
engine = auth_engine()
elif auth_import == 'Twitter':
# Not working requered parans
engine = auth_engine()
else:
engine = auth_engine(callback_url=callback)
auth = auth.AuthPlugin(engine)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bottle.ext import auth
from utils import conf
try:
auth_import = conf('auth')['engine'].split('.')[-1]
auth_from = u".".join(conf('auth')['engine'].split('.')[:-1])
auth_engine = getattr(__import__(auth_from, fromlist=[auth_import]),
auth_import)
except:
print 'Set valid auth engine'
exit(0)
callback = u"{}://{}".format(
conf('openmining')['protocol'],
conf('openmining')['domain'])
if conf('openmining')['domain_port'] not in ['80', '443']:
callback = "{}:{}".format(callback, conf('openmining')['domain_port'])
if auth_import == 'Google':
engine = auth_engine(
conf('auth')['key'], conf('auth')['secret'], callback)
elif auth_import == 'Facebook':
# Not working requered parans
engine = auth_engine()
elif auth_import == 'Twitter':
# Not working requered parans
engine = auth_engine()
else:
engine = auth_engine()
auth = auth.AuthPlugin(engine)
|
mit
|
Python
|
95723719050aa08119ed2478c0bb40253a2b0b3e
|
Remove methods with unnecessary super delegation.
|
ramnes/qtile,ramnes/qtile,qtile/qtile,qtile/qtile
|
libqtile/layout/max.py
|
libqtile/layout/max.py
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
# Copyright (c) 2017, Dirk Hartmann.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile.layout.base import _SimpleLayoutBase
class Max(_SimpleLayoutBase):
"""Maximized layout
A simple layout that only displays one window at a time, filling the
screen_rect. This is suitable for use on laptops and other devices with
small screens. Conceptually, the windows are managed as a stack, with
commands to switch to next and previous windows in the stack.
"""
defaults = [("name", "max", "Name of this layout.")]
def __init__(self, **config):
super().__init__(**config)
self.add_defaults(Max.defaults)
def add(self, client):
return super().add(client, 1)
def configure(self, client, screen_rect):
if self.clients and client is self.clients.current_client:
client.place(
screen_rect.x,
screen_rect.y,
screen_rect.width,
screen_rect.height,
0,
None
)
client.unhide()
else:
client.hide()
cmd_previous = _SimpleLayoutBase.previous
cmd_next = _SimpleLayoutBase.next
cmd_up = cmd_previous
cmd_down = cmd_next
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
# Copyright (c) 2017, Dirk Hartmann.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile.layout.base import _SimpleLayoutBase
class Max(_SimpleLayoutBase):
"""Maximized layout
A simple layout that only displays one window at a time, filling the
screen_rect. This is suitable for use on laptops and other devices with
small screens. Conceptually, the windows are managed as a stack, with
commands to switch to next and previous windows in the stack.
"""
defaults = [("name", "max", "Name of this layout.")]
def __init__(self, **config):
super().__init__(**config)
self.add_defaults(Max.defaults)
def clone(self, group):
return super().clone(group)
def add(self, client):
return super().add(client, 1)
def configure(self, client, screen_rect):
if self.clients and client is self.clients.current_client:
client.place(
screen_rect.x,
screen_rect.y,
screen_rect.width,
screen_rect.height,
0,
None
)
client.unhide()
else:
client.hide()
cmd_previous = _SimpleLayoutBase.previous
cmd_next = _SimpleLayoutBase.next
cmd_up = cmd_previous
cmd_down = cmd_next
|
mit
|
Python
|
5ebf34e1c572e5db9012af4228eaca2a8461b8d9
|
add some extra debug logging to smr-reduce
|
codebynumbers/smr,50onRed/smr
|
smr/reduce.py
|
smr/reduce.py
|
#!/usr/bin/env python
import sys
from .shared import get_config, configure_logging
def main():
if len(sys.argv) < 2:
sys.stderr.write("usage: smr-reduce config.py\n")
sys.exit(1)
config = get_config(sys.argv[1])
configure_logging(config)
try:
for result in iter(sys.stdin.readline, ""):
result = result.rstrip() # remove trailing linebreak
logging.debug("smr-reduce got %s", result)
config.REDUCE_FUNC(result)
except (KeyboardInterrupt, SystemExit):
# we want to output results even if user aborted
config.OUTPUT_RESULTS_FUNC()
else:
config.OUTPUT_RESULTS_FUNC()
|
#!/usr/bin/env python
import sys
from .shared import get_config, configure_logging
def main():
if len(sys.argv) < 2:
sys.stderr.write("usage: smr-reduce config.py\n")
sys.exit(1)
config = get_config(sys.argv[1])
configure_logging(config)
try:
for result in iter(sys.stdin.readline, ""):
config.REDUCE_FUNC(result.rstrip()) # remove trailing linebreak
except (KeyboardInterrupt, SystemExit):
# we want to output results even if user aborted
config.OUTPUT_RESULTS_FUNC()
else:
config.OUTPUT_RESULTS_FUNC()
|
mit
|
Python
|
dd020b279f011ff78a6a41571a839e4c57333e93
|
Rename username field to userspec (#196).
|
devilry/devilry-django,vegarang/devilry-django,devilry/devilry-django,vegarang/devilry-django,devilry/devilry-django,devilry/devilry-django
|
devilry/apps/core/models/relateduser.py
|
devilry/apps/core/models/relateduser.py
|
import re
from django.db import models
from django.db.models import Q
from django.core.exceptions import ValidationError
from period import Period
from node import Node
from abstract_is_admin import AbstractIsAdmin
class RelatedUserBase(models.Model, AbstractIsAdmin):
"""
Base class for :cls:`RelatedExaminer` and cls:`RelatedStudent`.
This is used to generate AssignmentGroups and
.. attribute:: userspec
One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN).
For RelatedExaminer, only a single username is allowed.
"""
usersandtags_patt = r'((?:\w+\s*,\s*)*\w+)\s*\(((?:\w+\s*,\s*)*\w+)\)$'
userspec = models.CharField(max_length=200,
help_text='One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN). For RelatedExaminer, only a single username is allowed.')
class Meta:
abstract = True # This model will then not be used to create any database table. Instead, when it is used as a base class for other models, its fields will be added to those of the child class.
unique_together = ('period', 'userspec')
app_label = 'core'
@classmethod
def q_is_admin(cls, user_obj):
return Q(period__admins=user_obj) | \
Q(period__parentnode__admins=user_obj) | \
Q(period__parentnode__parentnode__pk__in=Node._get_nodepks_where_isadmin(user_obj))
def clean(self, *args, **kwargs):
super(RelatedUserBase, self).clean(*args, **kwargs)
if not self.patt.match(self.userspec):
raise ValidationError('Invaid related user.')
def __unicode__(self):
return '{0}:{1}'.format(self.period, self.userspec)
class RelatedExaminer(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile('^' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedexaminers',
help_text='The related period.')
class RelatedStudent(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile(r'^(?:(.+?)\s*::\s*)?' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedstudents',
help_text='The related period.')
|
import re
from django.db import models
from django.db.models import Q
from django.core.exceptions import ValidationError
from period import Period
from node import Node
from abstract_is_admin import AbstractIsAdmin
class RelatedUserBase(models.Model, AbstractIsAdmin):
"""
Base class for :cls:`RelatedExaminer` and cls:`RelatedStudent`.
This is used to generate AssignmentGroups and
.. attribute:: username
One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN).
For RelatedExaminer, only a single username is allowed.
"""
usersandtags_patt = r'((?:\w+\s*,\s*)*\w+)\s*\(((?:\w+\s*,\s*)*\w+)\)$'
username = models.CharField(max_length=200,
help_text='One or more usernames followed by optional tags. Format: usernameA, ...., usernameN (tag1, tag2, ..., tagN). For RelatedExaminer, only a single username is allowed.')
class Meta:
abstract = True # This model will then not be used to create any database table. Instead, when it is used as a base class for other models, its fields will be added to those of the child class.
unique_together = ('period', 'username')
app_label = 'core'
@classmethod
def q_is_admin(cls, user_obj):
return Q(period__admins=user_obj) | \
Q(period__parentnode__admins=user_obj) | \
Q(period__parentnode__parentnode__pk__in=Node._get_nodepks_where_isadmin(user_obj))
def clean(self, *args, **kwargs):
super(RelatedUserBase, self).clean(*args, **kwargs)
if not self.patt.match(self.username):
raise ValidationError('Invaid related user.')
def __unicode__(self):
return '{0}:{1}'.format(self.period, self.username)
class RelatedExaminer(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile('^' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedexaminers',
help_text='The related period.')
class RelatedStudent(RelatedUserBase):
"""
.. attribute:: period
A django.db.models.ForeignKey_ that points to the `Period`_.
"""
patt = re.compile(r'^(?:(.+?)\s*::\s*)?' + RelatedUserBase.usersandtags_patt)
period = models.ForeignKey(Period, related_name='relatedstudents',
help_text='The related period.')
|
bsd-3-clause
|
Python
|
4b330755edab7a57de6d39a7e365c5f79df81065
|
Update config.py
|
nicholas-moreles/blaspy
|
blaspy/config.py
|
blaspy/config.py
|
"""
Copyright (c) 2014, The University of Texas at Austin.
All rights reserved.
This file is part of BLASpy and is available under the 3-Clause
BSD License, which can be found in the LICENSE file at the top-level
directory or at http://opensource.org/licenses/BSD-3-Clause
"""
from .errors import raise_blas_os_error
from ctypes import cdll
from os import chdir, path
from platform import system
from struct import calcsize
# The name of the BLAS .so or .dll file. By default this is the OpenBLAS reference
# implementation bundled with BLASpy. Only modify if you wish to use a different version of BLAS
# or if your operating system is not supported by BLASpy out of the box.
BLAS_NAME = "" # default is ""
# True if the BLAS .so or .dll file is in the blaspy/lib subdirectory,
# False if Python should search for it.
IN_BLASPY_SUBDIRECTORY = True # default is True
###############################
# DO NOT EDIT BELOW THIS LINE #
###############################
# find the appropriate BLAS to use
if BLAS_NAME == "": # try to use included OpenBLAS
PREPEND = str(path.dirname(__file__))[:-6] + "lib/"
if system() == "Windows":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-win64-int32.dll"
chdir(PREPEND + "win64")
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-win32.dll"
chdir(PREPEND + "win32")
PREPEND = ""
elif system() == "Linux":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-linux64.so"
PREPEND += "linux64/"
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-linux32.so"
PREPEND += "linux32/"
else: # no appropriate OpenBLAS included, BLAS_NAME_OVERRIDE must be used
raise_blas_os_error()
else:
PREPEND = ""
# Change the directory and load the library
_libblas = cdll.LoadLibrary(PREPEND + BLAS_NAME)
|
"""
Copyright (c) 2014, The University of Texas at Austin.
All rights reserved.
This file is part of BLASpy and is available under the 3-Clause
BSD License, which can be found in the LICENSE file at the top-level
directory or at http://opensource.org/licenses/BSD-3-Clause
"""
from .errors import raise_blas_os_error
from ctypes import cdll
from os import chdir, path
from platform import system
from struct import calcsize
# The name of the BLAS .so or .dll file. By default this is the OpenBLAS reference
# implementation bundled with BLASpy. Only modify if you wish to use a different version of BLAS
# or if your operating system is not supported by BLASpy out of the box.
BLAS_NAME = "" # default is ""
# True if the BLAS .so or .dll file is in the blaspy/lib subdirectory,
# False if Python should search for it.
IN_BLASPY_SUBDIRECTORY = True # default is True
###############################
# DO NOT EDIT BELOW THIS LINE #
###############################
# find the appropriate BLAS to use
if BLAS_NAME == "": # try to use included OpenBLAS
if system() == "Windows":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-win64-int32.dll"
SUB_DIRECTORY = "win64"
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-win32.dll"
SUB_DIRECTORY = "win32"
elif system() == "Linux":
if calcsize("P") == 8: # 64-bit
BLAS_NAME = "libopenblas-0.2.13-linux64.so"
SUB_DIRECTORY = "linux64"
else: # 32-bit
BLAS_NAME = "libopenblas-0.2.13-linux32.so"
SUB_DIRECTORY = "linux32"
else: # no appropriate OpenBLAS included, BLAS_NAME_OVERRIDE must be used
raise_blas_os_error()
else:
SUB_DIRECTORY = ""
# Change the directory and load the library
if IN_BLASPY_SUBDIRECTORY:
chdir(str(path.dirname(__file__))[:-6] + "lib/" + SUB_DIRECTORY)
_libblas = cdll.LoadLibrary(BLAS_NAME)
|
bsd-3-clause
|
Python
|
1809df6d5886ac6c0c35c8e879d9eda334606f4e
|
Simplify handling from_db_value across django versions
|
Niklas9/django-unixdatetimefield,Niklas9/django-unixdatetimefield
|
django_unixdatetimefield/fields.py
|
django_unixdatetimefield/fields.py
|
import datetime
import time
import django.db.models as models
class UnixDateTimeField(models.DateTimeField):
# TODO(niklas9):
# * should we take care of transforming between time zones in any way here ?
# * get default datetime format from settings ?
DEFAULT_DATETIME_FMT = '%Y-%m-%d %H:%M:%S'
TZ_CONST = '+'
# TODO(niklas9):
# * metaclass below just for Django < 1.9, fix a if stmt for it?
#__metaclass__ = models.SubfieldBase
description = "Unix timestamp integer to datetime object"
def get_internal_type(self):
return 'PositiveIntegerField'
def to_python(self, val):
if val is None or isinstance(val, datetime.datetime):
return val
if isinstance(val, datetime.date):
return datetime.datetime(val.year, val.month, val.day)
elif self._is_string(val):
# TODO(niklas9):
# * not addressing time zone support as todo above for now
if self.TZ_CONST in val:
val = val.split(self.TZ_CONST)[0]
return datetime.datetime.strptime(val, self.DEFAULT_DATETIME_FMT)
else:
return datetime.datetime.fromtimestamp(float(val))
def _is_string(value, val):
try:
return isinstance(val, unicode)
except NameError:
return isinstance(val, str)
def get_db_prep_value(self, val, *args, **kwargs):
if val is None:
if self.default == models.fields.NOT_PROVIDED: return None
return self.default
return int(time.mktime(val.timetuple()))
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return self.to_python(val).strftime(self.DEFAULT_DATETIME_FMT)
def from_db_value(self, val, *args, **kwargs):
return self.to_python(val)
|
import datetime
import time
import django
import django.db.models as models
class UnixDateTimeField(models.DateTimeField):
# TODO(niklas9):
# * should we take care of transforming between time zones in any way here ?
# * get default datetime format from settings ?
DEFAULT_DATETIME_FMT = '%Y-%m-%d %H:%M:%S'
TZ_CONST = '+'
# TODO(niklas9):
# * metaclass below just for Django < 1.9, fix a if stmt for it?
#__metaclass__ = models.SubfieldBase
description = "Unix timestamp integer to datetime object"
def get_internal_type(self):
return 'PositiveIntegerField'
def to_python(self, val):
if val is None or isinstance(val, datetime.datetime):
return val
if isinstance(val, datetime.date):
return datetime.datetime(val.year, val.month, val.day)
elif self._is_string(val):
# TODO(niklas9):
# * not addressing time zone support as todo above for now
if self.TZ_CONST in val:
val = val.split(self.TZ_CONST)[0]
return datetime.datetime.strptime(val, self.DEFAULT_DATETIME_FMT)
else:
return datetime.datetime.fromtimestamp(float(val))
def _is_string(value, val):
try:
return isinstance(val, unicode)
except NameError:
return isinstance(val, str)
def get_db_prep_value(self, val, *args, **kwargs):
if val is None:
if self.default == models.fields.NOT_PROVIDED: return None
return self.default
return int(time.mktime(val.timetuple()))
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return self.to_python(val).strftime(self.DEFAULT_DATETIME_FMT)
# Django 2.0 updates the signature of from_db_value.
# https://docs.djangoproject.com/en/2.0/releases/2.0/#context-argument-of-field-from-db-value-and-expression-convert-value
if django.VERSION < (2,):
def from_db_value(self, val, expression, connection, context):
return self.to_python(val)
else:
def from_db_value(self, val, expression, connection):
return self.to_python(val)
|
bsd-3-clause
|
Python
|
20053951b3036d0ae49f7f1ae25d600848872c82
|
Bump version
|
markstory/lint-review,markstory/lint-review,markstory/lint-review
|
lintreview/__init__.py
|
lintreview/__init__.py
|
__version__ = '2.36.2'
|
__version__ = '2.36.1'
|
mit
|
Python
|
f426d44f82a4f1855cb180b5aff98221c14537f1
|
Update version.py
|
elvandy/nltools,ljchang/neurolearn,ljchang/nltools
|
nltools/version.py
|
nltools/version.py
|
"""Specifies current version of nltools to be used by setup.py and __init__.py
"""
__version__ = '0.3.7'
|
"""Specifies current version of nltools to be used by setup.py and __init__.py
"""
__version__ = '0.3.6'
|
mit
|
Python
|
fcf5d1f33026069d69690c67f7ddcc8c77f15626
|
add exception handingling for debug
|
XertroV/opreturn-ninja,XertroV/opreturn-ninja,XertroV/opreturn-ninja
|
opreturnninja/views.py
|
opreturnninja/views.py
|
import json
import random
from pyramid.view import view_config
from .constants import ELECTRUM_SERVERS
from bitcoin.rpc import RawProxy, DEFAULT_USER_AGENT
import socket
@view_config(route_name='api', renderer='json')
def api_view(request):
global rpc
assert hasattr(request, 'json_body')
assert 'method' in request.json_body and 'params' in request.json_body
method = request.json_body['method']
params = request.json_body['params']
assert type(params) == list
if method == 'sendrawtransaction':
assert len(params) == 1
sent = False
while not sent:
try:
server = random.choice(list(ELECTRUM_SERVERS.items()))
s = socket.create_connection(server)
s.send(b'{"id":"0", "method":"blockchain.transaction.broadcast", "params":["' + params[0].encode() + b'"]}\n')
r = {'result': s.recv(1024)[:-1].decode(), 'error': None, 'id': request.json_body['id']} # the slice is to remove the trailing new line
print(r)
return r
except ConnectionRefusedError as e:
print(e, server)
except socket.gaierror as e:
print(e, server)
except Exception as e:
print(e, server)
return {
'result': None,
'error': 'RPC Request Unknown',
'id': request.json_body['id'],
}
@view_config(route_name='index', renderer='templates/index.pt')
def index_view(request):
return {}
|
import json
import random
from pyramid.view import view_config
from .constants import ELECTRUM_SERVERS
from bitcoin.rpc import RawProxy, DEFAULT_USER_AGENT
import socket
@view_config(route_name='api', renderer='json')
def api_view(request):
global rpc
assert hasattr(request, 'json_body')
assert 'method' in request.json_body and 'params' in request.json_body
method = request.json_body['method']
params = request.json_body['params']
assert type(params) == list
if method == 'sendrawtransaction':
assert len(params) == 1
sent = False
while not sent:
try:
s = socket.create_connection(random.choice(list(ELECTRUM_SERVERS.items())))
s.send(b'{"id":"0", "method":"blockchain.transaction.broadcast", "params":["' + params[0].encode() + b'"]}\n')
r = {'result': s.recv(1024)[:-1].decode(), 'error': None, 'id': request.json_body['id']} # the slice is to remove the trailing new line
print(r)
return r
except ConnectionRefusedError as e:
print(e)
except socket.gaierror as e:
print(e)
return {
'result': None,
'error': 'RPC Request Unknown',
'id': request.json_body['id'],
}
@view_config(route_name='index', renderer='templates/index.pt')
def index_view(request):
return {}
|
mit
|
Python
|
43d7850403e1e98951909bcb0c441098c3221bde
|
Update ipc_lista1.4.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.4.py
|
lista1/ipc_lista1.4.py
|
#ipc_lista1.4
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça as 4 notas bimestrais e mostre a media
nota1 = int(input("Digite a primeira nota do bimestre: "))
nota2 = int(input("Digite a segunda nota do bimestre: "))
nota3 = int(input("Digite a terceira nota do bismestre: "))
nota4 - int(input("Digite a quarta note do bismestre: "))
print
media = (nota1+nota2+nota3+nota4)/ 4.0
print" A sua média é: %s" %media
|
#ipc_lista1.4
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça as 4 notas bimestrais e mostre a media
nota1 = int(input("Digite a primeira nota do bimestre: "))
nota2 = int(input("Digite a segunda nota do bimestre: "))
nota3 = int(input("Digite a terceira nota do bismestre: "))
nota4 - int(input("Digite a quarta note do bismestre: "))
print
media = (nota1+nota2+nota3+nota4)/4.0
print" A sua média é: %s" %media
|
apache-2.0
|
Python
|
fb772e5e597082a119348efa68f70e60c11506cd
|
clean up
|
timotheus/python-patterns
|
lists/gift_exchange.py
|
lists/gift_exchange.py
|
import random
import itertools
givers = [('tim', 'shirt'), ('jim', 'shoe'), ('john', 'ball'), ('joe', 'fruit')]
if len(givers) < 2:
print "must have more than 1 givers"
else:
a = list(givers)
b = list(givers)
while a == b:
random.shuffle(a)
random.shuffle(b)
for i, j in itertools.izip(a, b):
print '%s gives %s to %s.' % (i[0], i[1], j[0])
|
import random
import itertools
givers = [('tim', 'shirt'), ('jim', 'shoe'), ('joe', 'fruit'), ('john', 'ball')]
def valid(a, b):
if a == b:
return False
else:
return True
if len(givers) < 2:
print "must have more than 1 givers"
else:
a = list(givers)
b = list(givers)
while not valid(a, b):
random.shuffle(a)
random.shuffle(b)
for i, j in itertools.izip(a, b):
print '%s gives %s to %s.' % (i[0], i[1], j[0])
|
unlicense
|
Python
|
b76e1697b92565ca3fc8a7ee2961adf894095e04
|
Add User as foreign key in Bill
|
ioO/billjobs
|
billing/models.py
|
billing/models.py
|
from django.db import models
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, pre_init
import datetime
class Bill(models.Model):
user = models.ForeignKey(User)
number = models.CharField(max_length=10, unique=True, blank=True)
isPaid = models.BooleanField(default=False)
billing_date = models.DateField()
class Service(models.Model):
reference = models.CharField(max_length=5)
name = models.CharField(max_length=128)
description = models.CharField(max_length=1024)
price = models.FloatField()
def __unicode__(self):
""" Return name as object representation """
return self.name
class BillLine(models.Model):
bill = models.ForeignKey(Bill)
service = models.ForeignKey(Service)
quantity = models.SmallIntegerField(default=1)
total = models.FloatField(blank=True)
class UserProfile(models.Model):
""" extend User class """
user = models.OneToOneField(User)
billing_address = models.CharField(max_length=1024)
@receiver(pre_save, sender=BillLine)
def compute_total(sender, instance, **kwargs):
""" set total of line automatically """
if not instance.total:
instance.total = instance.service.price * instance.quantity
@receiver(pre_save, sender=Bill)
def define_number(sender, instance, **kwargs):
""" set bill number incrementally """
# only when we create record for the first time
if not instance.number:
today = datetime.date.today()
# get last id in base, we assume it's the last record
try:
last_record = sender.objects.latest('id')
#get last bill number and increment it
last_num = '%03d' % (int(last_record.number[-3:])+1)
# no Bill in db
except sender.DoesNotExist:
last_num = '001'
instance.number = 'F%s%s' % (today.strftime('%Y%m'), last_num)
|
from django.db import models
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, pre_init
import datetime
class Bill(models.Model):
number = models.CharField(max_length=10, unique=True, blank=True)
isPaid = models.BooleanField(default=False)
billing_date = models.DateField()
class Service(models.Model):
reference = models.CharField(max_length=5)
name = models.CharField(max_length=128)
description = models.CharField(max_length=1024)
price = models.FloatField()
def __unicode__(self):
""" Return name as object representation """
return self.name
class BillLine(models.Model):
bill = models.ForeignKey(Bill)
service = models.ForeignKey(Service)
quantity = models.SmallIntegerField(default=1)
total = models.FloatField(blank=True)
class UserProfile(models.Model):
""" extend User class """
user = models.OneToOneField(User)
billing_address = models.CharField(max_length=1024)
@receiver(pre_save, sender=BillLine)
def compute_total(sender, instance, **kwargs):
""" set total of line automatically """
if not instance.total:
instance.total = instance.service.price * instance.quantity
@receiver(pre_save, sender=Bill)
def define_number(sender, instance, **kwargs):
""" set bill number incrementally """
# only when we create record for the first time
if not instance.number:
today = datetime.date.today()
# get last id in base, we assume it's the last record
try:
last_record = sender.objects.latest('id')
#get last bill number and increment it
last_num = '%03d' % (int(last_record.number[-3:])+1)
# no Bill in db
except sender.DoesNotExist:
last_num = '001'
instance.number = 'F%s%s' % (today.strftime('%Y%m'), last_num)
|
mit
|
Python
|
17d3d63564798cd03788ce579227d5425cd866c0
|
Make fake uploader use zlib compression
|
orlissenberg/eve-market-data-relay,gtaylor/EVE-Market-Data-Relay
|
bin/fake_order.py
|
bin/fake_order.py
|
#!/usr/bin/env python
"""
A fake order upload script, used to manually test the whole stack.
"""
import simplejson
import requests
import zlib
data = """
{
"resultType" : "orders",
"version" : "0.1alpha",
"uploadKeys" : [
{ "name" : "emk", "key" : "abc" },
{ "name" : "ec" , "key" : "def" }
],
"generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
"currentTime" : "2011-10-22T15:46:00+00:00",
"columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
"rowsets" : [
{
"generatedAt" : "2011-10-22T15:43:00+00:00",
"regionID" : 10000065,
"typeID" : 11134,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
},
{
"generatedAt" : "2011-10-22T15:42:00+00:00",
"regionID" : null,
"typeID" : 11135,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
}
]
}
"""
data = simplejson.loads(data)
data = zlib.compress(simplejson.dumps(data))
headers = {
'Content-Encoding': 'gzip',
}
r = requests.post(
'http://eve-emdr.local/upload/unified/',
#'http://localhost:8080/upload/unified/',
data=data,
headers=headers,
)
print "Sent fake order."
|
#!/usr/bin/env python
"""
A fake order upload script, used to manually test the whole stack.
"""
import simplejson
import requests
data = """
{
"resultType" : "orders",
"version" : "0.1alpha",
"uploadKeys" : [
{ "name" : "emk", "key" : "abc" },
{ "name" : "ec" , "key" : "def" }
],
"generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
"currentTime" : "2011-10-22T15:46:00+00:00",
"columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
"rowsets" : [
{
"generatedAt" : "2011-10-22T15:43:00+00:00",
"regionID" : 10000065,
"typeID" : 11134,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
},
{
"generatedAt" : "2011-10-22T15:42:00+00:00",
"regionID" : null,
"typeID" : 11135,
"rows" : [
[8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
[11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
[11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
]
}
]
}
"""
data = simplejson.loads(data)
data = simplejson.dumps(data)
r = requests.post(
'http://localhost:8080/upload/unified/',
data=data,
)
print "RESPONSE"
print r.text
|
mit
|
Python
|
348896e6f9318755d9bbefdf94de18ed32b17d1d
|
Update item.py
|
Lincoln-Cybernetics/Explore-
|
item.py
|
item.py
|
import pygame
class Item(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Item, self).__init__(*groups)
#the game level
self.level = level
#base image
self.level.animator.set_Img(0,5)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
self.level.animator.set_Img(6,0)
self.secretimage = self.level.animator.get_Img().convert()
self.secretimage.set_colorkey((255,0,0))
#type
self.flavor_saver = ['gem', 'axe', 'sammich', 'telescope']
self.flavor = 'gem'
#location
self.firstflag = True
self.scrnx = 0
self.scrny = 0
self.mapx = 0
self.mapy = 0
def spawn(self,x,y):
self.scrnx = x
self.scrny = y
if self.firstflag:
self.mapx = x
self.mapy = y
self.firstflag = False
self.rect = pygame.rect.Rect((x * self.level.tilex, y * self.level.tiley), self.image.get_size())
def set_type(self, itype):
self.flavor = self.flavor_saver[itype]
if itype == 0:
xind = 6
yind = 0
if itype == 1:
xind = 6
yind = 5
if itype == 2:
xind = 6
yind = 4
if itype == 3:
xind = 6
yind = 3
self.level.animator.set_Img(xind,yind)
self.secretimage = self.level.animator.get_Img().convert()
self.secretimage.set_colorkey((255,0,0))
def reveal(self):
self.image = self.secretimage
def set_Index(self, x, y):
self.scrnx = x
self.rect.x = x*self.level.tilex
self.scrny = y
self.rect.y = y*self.level.tiley
def get_Index(self, axis):
if axis == 'X':
return self.scrnx
if axis == 'Y':
return self.scrny
return -1
|
import pygame
class Item(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Item, self).__init__(*groups)
#the game level
self.level = level
#base image
self.level.animator.set_Img(6,0)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
#type
self.flavor_saver = ['gem', 'axe', 'sammich']
self.flavor = 'gem'
#location
self.firstflag = True
self.scrnx = 0
self.scrny = 0
self.mapx = 0
self.mapy = 0
def spawn(self,x,y):
self.scrnx = x
self.scrny = y
if self.firstflag:
self.mapx = x
self.mapy = y
self.firstflag = False
self.rect = pygame.rect.Rect((x * self.level.tilex, y * self.level.tiley), self.image.get_size())
def set_type(self, itype):
self.flavor = self.flavor_saver[itype]
if itype == 0:
xind = 6
yind = 0
if itype == 1:
xind = 6
yind = 5
if itype == 2:
xind = 6
yind = 4
self.level.animator.set_Img(xind,yind)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
def set_Index(self, x, y):
self.scrnx = x
self.rect.x = x*self.level.tilex
self.scrny = y
self.rect.y = y*self.level.tiley
def get_Index(self, axis):
if axis == 'X':
return self.scrnx
if axis == 'Y':
return self.scrny
return -1
|
unlicense
|
Python
|
5fc8258c4d3819b6a4b23819fd3c4578510dd633
|
Allow www.lunahealing.ca as a domain
|
jessamynsmith/lunahealing,jessamynsmith/lunahealing,jessamynsmith/lunahealing
|
lunahealing/site_settings/prod.py
|
lunahealing/site_settings/prod.py
|
# Django settings for quotations project.
import os
from lunahealing.site_settings.common import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY')
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {
'default': dj_database_url.config()
}
DEFAULT_FILE_STORAGE = 's3_folder_storage.s3.DefaultStorage'
DEFAULT_S3_PATH = 'media'
STATICFILES_STORAGE = 's3_folder_storage.s3.StaticStorage'
STATIC_S3_PATH = 'static'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_QUERYSTRING_AUTH = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '//s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '//s3.amazonaws.com/%s/static/' % AWS_STORAGE_BUCKET_NAME
INSTALLED_APPS.extend([
's3_folder_storage',
'storages',
])
ALLOWED_HOSTS = ['lunahealing.herokuapp.com', 'www.lunahealing.ca']
|
# Django settings for quotations project.
import os
from lunahealing.site_settings.common import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY')
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {
'default': dj_database_url.config()
}
DEFAULT_FILE_STORAGE = 's3_folder_storage.s3.DefaultStorage'
DEFAULT_S3_PATH = 'media'
STATICFILES_STORAGE = 's3_folder_storage.s3.StaticStorage'
STATIC_S3_PATH = 'static'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_QUERYSTRING_AUTH = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '//s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '//s3.amazonaws.com/%s/static/' % AWS_STORAGE_BUCKET_NAME
INSTALLED_APPS.extend([
's3_folder_storage',
'storages',
])
ALLOWED_HOSTS = ['lunahealing.herokuapp.com', '*.lunahealing.ca']
|
mit
|
Python
|
ed472902f71f39cf09eca5ee9193bcf99283b566
|
Remove unused code
|
QuiteQuiet/PokemonShowdownBot
|
room.py
|
room.py
|
# Each PS room joined creates an object here.
# Objects control settings on a room-per-room basis, meaning every room can
# be treated differently.
from plugins.tournaments import Tournament
class Room:
def __init__(self, room, data):
if not data:
# This is a hack to support both strings and dicts as input to the class
data = {'moderate':False, 'allow games':False}
self.users = {}
self.loading = True
self.title = room
self.moderate = data['moderate']
self.allowGames = data['allow games']
self.tour = None
self.game = None
def doneLoading(self):
self.loading = False
def addUserlist(self, users):
self.users = {u[1:]:u[0] for u in users.split(',')}
def addUser(self, user, auth):
if user not in self.users:
self.users[user] = auth
def removeUser(self, user):
if user in self.users:
self.users.pop(user)
def renamedUser(self, old, new):
self.removeUser(old)
self.addUser(new[1:], new[0])
def createTour(self, ws):
self.tour = Tournament(ws, self.title)
def endTour(self):
self.tour = None
|
# Each PS room joined creates an object here.
# Objects control settings on a room-per-room basis, meaning every room can
# be treated differently.
from plugins.tournaments import Tournament
class Room:
def __init__(self, room, data):
if not data:
# This is a hack to support both strings and dicts as input to the class
data = {'moderate':False, 'allow games':False}
self.users = {}
self.loading = True
self.title = room
self.moderate = data['moderate']
self.allowGames = data['allow games']
self.tour = None
self.game = None
def doneLoading(self):
self.loading = False
def addUserlist(self, users):
self.users = {u[1:]:u[0] for u in users.split(',')}
def addUser(self, user, auth):
if user not in self.users:
self.users[user] = auth
def removeUser(self, user):
if user in self.users:
self.users.pop(user)
def renamedUser(self, old, new):
self.removeUser(old)
self.addUser(new[1:], new[0])
def allowGames(self, yesNo):
self.allowGames = yesNo
def createTour(self, ws):
self.tour = Tournament(ws, self.title)
def endTour(self):
self.tour = None
|
mit
|
Python
|
bd313ff4ce69e7b9a9765672442ef6cf9fa00dba
|
Fix parameter validation tests
|
openfisca/openfisca-core,openfisca/openfisca-core
|
tests/core/parameter_validation/test_parameter_clone.py
|
tests/core/parameter_validation/test_parameter_clone.py
|
import os
from openfisca_core.parameters import ParameterNode
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
year = 2016
def test_clone():
path = os.path.join(BASE_DIR, 'filesystem_hierarchy')
parameters = ParameterNode('', directory_path = path)
parameters_at_instant = parameters('2016-01-01')
assert parameters_at_instant.node1.param == 1.0
clone = parameters.clone()
clone_at_instant = clone('2016-01-01')
assert clone_at_instant.node1.param == 1.0
assert id(clone) != id(parameters)
assert id(clone.node1) != id(parameters.node1)
assert id(clone.node1.param) != id(parameters.node1.param)
def test_clone_parameter(tax_benefit_system):
param = tax_benefit_system.parameters.taxes.income_tax_rate
clone = param.clone()
assert clone is not param
assert clone.values_list is not param.values_list
assert clone.values_list[0] is not param.values_list[0]
assert clone.values_list == param.values_list
def test_clone_parameter_node(tax_benefit_system):
node = tax_benefit_system.parameters.taxes
clone = node.clone()
assert clone is not node
assert clone.income_tax_rate is not node.income_tax_rate
assert clone.children['income_tax_rate'] is not node.children['income_tax_rate']
def test_clone_scale(tax_benefit_system):
scale = tax_benefit_system.parameters.taxes.social_security_contribution
clone = scale.clone()
assert clone.brackets[0] is not scale.brackets[0]
assert clone.brackets[0].rate is not scale.brackets[0].rate
def test_deep_edit(tax_benefit_system):
parameters = tax_benefit_system.parameters
clone = parameters.clone()
param = parameters.taxes.income_tax_rate
clone_param = clone.taxes.income_tax_rate
original_value = param.values_list[0].value
clone_param.values_list[0].value = 100
assert param.values_list[0].value == original_value
scale = parameters.taxes.social_security_contribution
clone_scale = clone.taxes.social_security_contribution
original_scale_value = scale.brackets[0].rate.values_list[0].value
clone_scale.brackets[0].rate.values_list[0].value = 10
assert scale.brackets[0].rate.values_list[0].value == original_scale_value
|
# -*- coding: utf-8 -*-
from ..test_countries import tax_benefit_system
import os
from openfisca_core.parameters import ParameterNode
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
year = 2016
def test_clone():
path = os.path.join(BASE_DIR, 'filesystem_hierarchy')
parameters = ParameterNode('', directory_path = path)
parameters_at_instant = parameters('2016-01-01')
assert parameters_at_instant.node1.param == 1.0
clone = parameters.clone()
clone_at_instant = clone('2016-01-01')
assert clone_at_instant.node1.param == 1.0
assert id(clone) != id(parameters)
assert id(clone.node1) != id(parameters.node1)
assert id(clone.node1.param) != id(parameters.node1.param)
def test_clone_parameter():
param = tax_benefit_system.parameters.taxes.income_tax_rate
clone = param.clone()
assert clone is not param
assert clone.values_list is not param.values_list
assert clone.values_list[0] is not param.values_list[0]
assert clone.values_list == param.values_list
def test_clone_parameter_node():
node = tax_benefit_system.parameters.taxes
clone = node.clone()
assert clone is not node
assert clone.income_tax_rate is not node.income_tax_rate
assert clone.children['income_tax_rate'] is not node.children['income_tax_rate']
def test_clone_scale():
scale = tax_benefit_system.parameters.taxes.social_security_contribution
clone = scale.clone()
assert clone.brackets[0] is not scale.brackets[0]
assert clone.brackets[0].rate is not scale.brackets[0].rate
def test_deep_edit():
parameters = tax_benefit_system.parameters
clone = parameters.clone()
param = parameters.taxes.income_tax_rate
clone_param = clone.taxes.income_tax_rate
original_value = param.values_list[0].value
clone_param.values_list[0].value = 100
assert param.values_list[0].value == original_value
scale = parameters.taxes.social_security_contribution
clone_scale = clone.taxes.social_security_contribution
original_scale_value = scale.brackets[0].rate.values_list[0].value
clone_scale.brackets[0].rate.values_list[0].value = 10
assert scale.brackets[0].rate.values_list[0].value == original_scale_value
|
agpl-3.0
|
Python
|
1f752237d83c486b94ddcc7f5e3b42eb5951a60b
|
remove unused imports
|
mkorpela/pabot,mkorpela/pabot
|
pabot/SharedLibrary.py
|
pabot/SharedLibrary.py
|
from robot.libraries.BuiltIn import BuiltIn
from robot.libraries.Remote import Remote
from robot.api import logger
from robot.running.testlibraries import TestLibrary
from robotremoteserver import RemoteLibraryFactory
from .pabotlib import PABOT_QUEUE_INDEX
class SharedLibrary(object):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self, name):
"""
Import a library so that the library instance is shared between executions.
[https://pabot.org/PabotLib.html?ref=log#import-shared-library|Open online docs.]
"""
# FIXME: RELATIVE IMPORTS WITH FILE NAME
self._remote = None
if BuiltIn().get_variable_value('${%s}' % PABOT_QUEUE_INDEX) is None:
logger.debug("Not currently running pabot. Importing library for this process.")
self._lib = RemoteLibraryFactory(TestLibrary(name).get_instance())
return
uri = BuiltIn().get_variable_value('${PABOTLIBURI}')
logger.debug('PabotLib URI %r' % uri)
remotelib = Remote(uri) if uri else None
if remotelib:
try:
port = remotelib.run_keyword("import_shared_library", [name], {})
except RuntimeError:
logger.error('No connection - is pabot called with --pabotlib option?')
raise
self._remote = Remote("http://127.0.0.1:%s" % port)
logger.debug("Lib imported with name %s from http://127.0.0.1:%s" % (name, port))
else:
logger.error('No connection - is pabot called with --pabotlib option?')
raise AssertionError('No connection to pabotlib')
def get_keyword_names(self):
if self._remote:
return self._remote.get_keyword_names()
return self._lib.get_keyword_names()
def run_keyword(self, name, args, kwargs):
if self._remote:
return self._remote.run_keyword(name, args, kwargs)
result = self._lib.run_keyword(name, args, kwargs)
if result['status'] == 'FAIL':
raise AssertionError(result['error'])
return result['return']
|
from robot.libraries.BuiltIn import BuiltIn
from robot.libraries.Remote import Remote
from robot.api import logger
from robot.running.testlibraries import TestLibrary
from robot.running.context import EXECUTION_CONTEXTS
from robot.running.model import Keyword
from robotremoteserver import RemoteLibraryFactory
from .pabotlib import PABOT_QUEUE_INDEX
class SharedLibrary(object):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self, name):
"""
Import a library so that the library instance is shared between executions.
[https://pabot.org/PabotLib.html?ref=log#import-shared-library|Open online docs.]
"""
# FIXME: RELATIVE IMPORTS WITH FILE NAME
self._remote = None
if BuiltIn().get_variable_value('${%s}' % PABOT_QUEUE_INDEX) is None:
logger.debug("Not currently running pabot. Importing library for this process.")
self._lib = RemoteLibraryFactory(TestLibrary(name).get_instance())
return
uri = BuiltIn().get_variable_value('${PABOTLIBURI}')
logger.debug('PabotLib URI %r' % uri)
remotelib = Remote(uri) if uri else None
if remotelib:
try:
port = remotelib.run_keyword("import_shared_library", [name], {})
except RuntimeError:
logger.error('No connection - is pabot called with --pabotlib option?')
raise
self._remote = Remote("http://127.0.0.1:%s" % port)
logger.debug("Lib imported with name %s from http://127.0.0.1:%s" % (name, port))
else:
logger.error('No connection - is pabot called with --pabotlib option?')
raise AssertionError('No connection to pabotlib')
def get_keyword_names(self):
if self._remote:
return self._remote.get_keyword_names()
return self._lib.get_keyword_names()
def run_keyword(self, name, args, kwargs):
if self._remote:
return self._remote.run_keyword(name, args, kwargs)
result = self._lib.run_keyword(name, args, kwargs)
if result['status'] == 'FAIL':
raise AssertionError(result['error'])
return result['return']
|
apache-2.0
|
Python
|
1f4ef496f932ec2a12d348b0c90b1f57d6ef9e20
|
update version number
|
nutils/nutils,CVerhoosel/nutils,joostvanzwieten/nutils,timovanopstal/nutils,wijnandhoitinga/nutils
|
nutils/__init__.py
|
nutils/__init__.py
|
import numpy
from distutils.version import LooseVersion
assert LooseVersion(numpy.version.version) >= LooseVersion('1.8'), 'nutils requires numpy 1.8 or higher, got %s' % numpy.version.version
version = '2.0beta'
_ = numpy.newaxis
__all__ = [ '_', 'numpy', 'core', 'numeric', 'element', 'function',
'mesh', 'plot', 'library', 'topology', 'util', 'matrix', 'parallel', 'log',
'debug', 'cache', 'transform', 'rational' ]
|
import numpy
from distutils.version import LooseVersion
assert LooseVersion(numpy.version.version) >= LooseVersion('1.8'), 'nutils requires numpy 1.8 or higher, got %s' % numpy.version.version
version = '1.dev'
_ = numpy.newaxis
__all__ = [ '_', 'numpy', 'core', 'numeric', 'element', 'function',
'mesh', 'plot', 'library', 'topology', 'util', 'matrix', 'parallel', 'log',
'debug', 'cache', 'transform', 'rational' ]
|
mit
|
Python
|
577697301f8682293a00a793807687df9d0ce679
|
Fix fetch_ceph_keys to run in python3
|
openstack/kolla,rahulunair/kolla,stackforge/kolla,stackforge/kolla,stackforge/kolla,openstack/kolla,rahulunair/kolla
|
docker/ceph/ceph-mon/fetch_ceph_keys.py
|
docker/ceph/ceph-mon/fetch_ceph_keys.py
|
#!/usr/bin/python
# Copyright 2015 Sam Yaple
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a stripped down version of an ansible module I wrote in Yaodu to
# achieve the same goals we have for Kolla. I have relicensed it for Kolla
# https://github.com/SamYaple/yaodu/blob/master/ansible/library/bslurp
# Basically this module will fetch the admin and mon keyrings as well as the
# monmap file. It then hashes the content, compresses them, and finally it
# converts them to base64 to be safely transported around with ansible
import base64
import hashlib
import json
import os
import sys
import zlib
def json_exit(msg=None, failed=False, changed=False):
if type(msg) is not dict:
msg = {'msg': str(msg)}
msg.update({'failed': failed, 'changed': changed})
print(json.dumps(msg))
sys.exit()
def read_file(filename):
filename_path = os.path.join('/etc/ceph', filename)
if not os.path.exists(filename_path):
json_exit("file not found: {}".format(filename_path), failed=True)
if not os.access(filename_path, os.R_OK):
json_exit("file not readable: {}".format(filename_path), failed=True)
with open(filename_path, 'rb') as f:
raw_data = f.read()
# TODO(mnasiadka): Remove sha1 in U
return {'content': (base64.b64encode(zlib.compress(raw_data))).decode(),
'sha1': hashlib.sha1(raw_data).hexdigest(),
'sha256': hashlib.sha256(raw_data).hexdigest(),
'filename': filename}
def main():
admin_keyring = 'ceph.client.admin.keyring'
mon_keyring = 'ceph.client.mon.keyring'
rgw_keyring = 'ceph.client.radosgw.keyring'
monmap = 'ceph.monmap'
files = [admin_keyring, mon_keyring, rgw_keyring, monmap]
json_exit({filename: read_file(filename) for filename in files})
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# Copyright 2015 Sam Yaple
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a stripped down version of an ansible module I wrote in Yaodu to
# achieve the same goals we have for Kolla. I have relicensed it for Kolla
# https://github.com/SamYaple/yaodu/blob/master/ansible/library/bslurp
# Basically this module will fetch the admin and mon keyrings as well as the
# monmap file. It then hashes the content, compresses them, and finally it
# converts them to base64 to be safely transported around with ansible
import base64
import hashlib
import json
import os
import sys
import zlib
def json_exit(msg=None, failed=False, changed=False):
if type(msg) is not dict:
msg = {'msg': str(msg)}
msg.update({'failed': failed, 'changed': changed})
print(json.dumps(msg))
sys.exit()
def read_file(filename):
filename_path = os.path.join('/etc/ceph', filename)
if not os.path.exists(filename_path):
json_exit("file not found: {}".format(filename_path), failed=True)
if not os.access(filename_path, os.R_OK):
json_exit("file not readable: {}".format(filename_path), failed=True)
with open(filename_path, 'rb') as f:
raw_data = f.read()
# TODO(mnasiadka): Remove sha1 in U
return {'content': base64.b64encode(zlib.compress(raw_data)),
'sha1': hashlib.sha1(raw_data).hexdigest(),
'sha256': hashlib.sha256(raw_data).hexdigest(),
'filename': filename}
def main():
admin_keyring = 'ceph.client.admin.keyring'
mon_keyring = 'ceph.client.mon.keyring'
rgw_keyring = 'ceph.client.radosgw.keyring'
monmap = 'ceph.monmap'
files = [admin_keyring, mon_keyring, rgw_keyring, monmap]
json_exit({filename: read_file(filename) for filename in files})
if __name__ == '__main__':
main()
|
apache-2.0
|
Python
|
b4acd028b613a721ffbe5a3136700f190635f7c9
|
Fix import.
|
supergis/micropython,xuxiaoxin/micropython,feilongfl/micropython,Vogtinator/micropython,oopy/micropython,pfalcon/micropython,rubencabrera/micropython,mpalomer/micropython,noahchense/micropython,pozetroninc/micropython,paul-xxx/micropython,Timmenem/micropython,dmazzella/micropython,cwyark/micropython,micropython/micropython-esp32,stonegithubs/micropython,tdautc19841202/micropython,tdautc19841202/micropython,ceramos/micropython,feilongfl/micropython,alex-march/micropython,jimkmc/micropython,hiway/micropython,rubencabrera/micropython,dxxb/micropython,puuu/micropython,Vogtinator/micropython,dinau/micropython,emfcamp/micropython,warner83/micropython,mgyenik/micropython,infinnovation/micropython,kostyll/micropython,omtinez/micropython,pozetroninc/micropython,selste/micropython,swegener/micropython,ericsnowcurrently/micropython,pfalcon/micropython,lbattraw/micropython,tdautc19841202/micropython,dxxb/micropython,danicampora/micropython,turbinenreiter/micropython,dhylands/micropython,TDAbboud/micropython,omtinez/micropython,trezor/micropython,martinribelotta/micropython,AriZuu/micropython,micropython/micropython-esp32,cnoviello/micropython,skybird6672/micropython,galenhz/micropython,adafruit/micropython,jlillest/micropython,galenhz/micropython,mgyenik/micropython,ceramos/micropython,emfcamp/micropython,jlillest/micropython,warner83/micropython,ganshun666/micropython,infinnovation/micropython,xyb/micropython,ernesto-g/micropython,blmorris/micropython,noahchense/micropython,pfalcon/micropython,mpalomer/micropython,Peetz0r/micropython-esp32,hosaka/micropython,lowRISC/micropython,mianos/micropython,mianos/micropython,selste/micropython,praemdonck/micropython,MrSurly/micropython-esp32,suda/micropython,emfcamp/micropython,suda/micropython,henriknelson/micropython,methoxid/micropystat,mianos/micropython,noahwilliamsson/micropython,supergis/micropython,vriera/micropython,noahwilliamsson/micropython,ericsnowcurrently/micropython,lowRISC/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,adafruit/micropython,puuu/micropython,kerneltask/micropython,tralamazza/micropython,utopiaprince/micropython,ernesto-g/micropython,EcmaXp/micropython,warner83/micropython,mpalomer/micropython,dhylands/micropython,ahotam/micropython,ChuckM/micropython,xhat/micropython,ruffy91/micropython,alex-robbins/micropython,PappaPeppar/micropython,vriera/micropython,dmazzella/micropython,MrSurly/micropython,turbinenreiter/micropython,alex-robbins/micropython,mhoffma/micropython,mpalomer/micropython,ganshun666/micropython,hiway/micropython,orionrobots/micropython,turbinenreiter/micropython,aethaniel/micropython,PappaPeppar/micropython,toolmacher/micropython,SungEun-Steve-Kim/test-mp,aethaniel/micropython,trezor/micropython,dmazzella/micropython,mianos/micropython,ruffy91/micropython,cloudformdesign/micropython,hiway/micropython,utopiaprince/micropython,matthewelse/micropython,praemdonck/micropython,slzatz/micropython,MrSurly/micropython-esp32,tuc-osg/micropython,torwag/micropython,MrSurly/micropython,ahotam/micropython,noahwilliamsson/micropython,AriZuu/micropython,Vogtinator/micropython,adafruit/circuitpython,SungEun-Steve-Kim/test-mp,jmarcelino/pycom-micropython,utopiaprince/micropython,blmorris/micropython,cnoviello/micropython,slzatz/micropython,heisewangluo/micropython,ruffy91/micropython,xuxiaoxin/micropython,jmarcelino/pycom-micropython,ernesto-g/micropython,selste/micropython,ceramos/micropython,MrSurly/micropython-esp32,pfalcon/micropython,misterdanb/micropython,stonegithubs/micropython,hiway/micropython,cloudformdesign/micropython,mgyenik/micropython,Peetz0r/micropython-esp32,martinribelotta/micropython,micropython/micropython-esp32,methoxid/micropystat,xyb/micropython,misterdanb/micropython,HenrikSolver/micropython,deshipu/micropython,adamkh/micropython,dxxb/micropython,feilongfl/micropython,micropython/micropython-esp32,matthewelse/micropython,hosaka/micropython,puuu/micropython,ChuckM/micropython,deshipu/micropython,adafruit/micropython,aethaniel/micropython,SHA2017-badge/micropython-esp32,puuu/micropython,puuu/micropython,SungEun-Steve-Kim/test-mp,xhat/micropython,tralamazza/micropython,MrSurly/micropython-esp32,vitiral/micropython,Timmenem/micropython,adafruit/micropython,stonegithubs/micropython,orionrobots/micropython,redbear/micropython,adamkh/micropython,alex-march/micropython,aethaniel/micropython,Vogtinator/micropython,methoxid/micropystat,vriera/micropython,ceramos/micropython,mhoffma/micropython,noahchense/micropython,Peetz0r/micropython-esp32,oopy/micropython,alex-march/micropython,lbattraw/micropython,paul-xxx/micropython,cnoviello/micropython,firstval/micropython,dhylands/micropython,chrisdearman/micropython,cloudformdesign/micropython,neilh10/micropython,paul-xxx/micropython,stonegithubs/micropython,lbattraw/micropython,torwag/micropython,ernesto-g/micropython,tuc-osg/micropython,mhoffma/micropython,henriknelson/micropython,dxxb/micropython,noahwilliamsson/micropython,kostyll/micropython,dinau/micropython,pfalcon/micropython,swegener/micropython,ericsnowcurrently/micropython,methoxid/micropystat,cwyark/micropython,turbinenreiter/micropython,mgyenik/micropython,ChuckM/micropython,pramasoul/micropython,skybird6672/micropython,feilongfl/micropython,HenrikSolver/micropython,utopiaprince/micropython,HenrikSolver/micropython,AriZuu/micropython,chrisdearman/micropython,suda/micropython,MrSurly/micropython,deshipu/micropython,trezor/micropython,AriZuu/micropython,xyb/micropython,ceramos/micropython,Vogtinator/micropython,KISSMonX/micropython,toolmacher/micropython,drrk/micropython,xyb/micropython,TDAbboud/micropython,blmorris/micropython,rubencabrera/micropython,oopy/micropython,Timmenem/micropython,jlillest/micropython,tobbad/micropython,omtinez/micropython,mpalomer/micropython,blazewicz/micropython,ahotam/micropython,mgyenik/micropython,vriera/micropython,Timmenem/micropython,neilh10/micropython,alex-robbins/micropython,paul-xxx/micropython,KISSMonX/micropython,kostyll/micropython,MrSurly/micropython,lowRISC/micropython,heisewangluo/micropython,mhoffma/micropython,swegener/micropython,Peetz0r/micropython-esp32,utopiaprince/micropython,bvernoux/micropython,adafruit/circuitpython,ryannathans/micropython,skybird6672/micropython,vriera/micropython,toolmacher/micropython,praemdonck/micropython,dinau/micropython,swegener/micropython,pozetroninc/micropython,adafruit/circuitpython,pozetroninc/micropython,mianos/micropython,xyb/micropython,tdautc19841202/micropython,blmorris/micropython,blazewicz/micropython,lowRISC/micropython,tuc-osg/micropython,alex-robbins/micropython,micropython/micropython-esp32,vitiral/micropython,PappaPeppar/micropython,ericsnowcurrently/micropython,aethaniel/micropython,ahotam/micropython,xuxiaoxin/micropython,lbattraw/micropython,danicampora/micropython,jmarcelino/pycom-micropython,ahotam/micropython,infinnovation/micropython,misterdanb/micropython,heisewangluo/micropython,noahchense/micropython,bvernoux/micropython,supergis/micropython,ryannathans/micropython,warner83/micropython,cwyark/micropython,neilh10/micropython,hosaka/micropython,torwag/micropython,jmarcelino/pycom-micropython,noahwilliamsson/micropython,slzatz/micropython,omtinez/micropython,galenhz/micropython,selste/micropython,praemdonck/micropython,tralamazza/micropython,ChuckM/micropython,cwyark/micropython,infinnovation/micropython,rubencabrera/micropython,matthewelse/micropython,deshipu/micropython,bvernoux/micropython,ryannathans/micropython,alex-robbins/micropython,pramasoul/micropython,turbinenreiter/micropython,jlillest/micropython,tuc-osg/micropython,redbear/micropython,ryannathans/micropython,adafruit/micropython,infinnovation/micropython,neilh10/micropython,slzatz/micropython,martinribelotta/micropython,EcmaXp/micropython,orionrobots/micropython,SHA2017-badge/micropython-esp32,TDAbboud/micropython,KISSMonX/micropython,ericsnowcurrently/micropython,skybird6672/micropython,oopy/micropython,xhat/micropython,redbear/micropython,dxxb/micropython,dinau/micropython,drrk/micropython,TDAbboud/micropython,slzatz/micropython,EcmaXp/micropython,ruffy91/micropython,MrSurly/micropython-esp32,EcmaXp/micropython,matthewelse/micropython,methoxid/micropystat,blazewicz/micropython,emfcamp/micropython,KISSMonX/micropython,AriZuu/micropython,tobbad/micropython,danicampora/micropython,swegener/micropython,blmorris/micropython,lowRISC/micropython,misterdanb/micropython,pramasoul/micropython,jimkmc/micropython,stonegithubs/micropython,PappaPeppar/micropython,dhylands/micropython,kerneltask/micropython,chrisdearman/micropython,ruffy91/micropython,hosaka/micropython,adamkh/micropython,jmarcelino/pycom-micropython,pramasoul/micropython,TDAbboud/micropython,bvernoux/micropython,hiway/micropython,drrk/micropython,SungEun-Steve-Kim/test-mp,jimkmc/micropython,cloudformdesign/micropython,kostyll/micropython,alex-march/micropython,SHA2017-badge/micropython-esp32,firstval/micropython,EcmaXp/micropython,dinau/micropython,adafruit/circuitpython,Timmenem/micropython,henriknelson/micropython,chrisdearman/micropython,toolmacher/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,cloudformdesign/micropython,MrSurly/micropython,henriknelson/micropython,cnoviello/micropython,toolmacher/micropython,noahchense/micropython,lbattraw/micropython,heisewangluo/micropython,mhoffma/micropython,selste/micropython,kerneltask/micropython,redbear/micropython,galenhz/micropython,martinribelotta/micropython,emfcamp/micropython,HenrikSolver/micropython,galenhz/micropython,adamkh/micropython,xuxiaoxin/micropython,tobbad/micropython,ganshun666/micropython,ernesto-g/micropython,neilh10/micropython,firstval/micropython,kostyll/micropython,ryannathans/micropython,oopy/micropython,kerneltask/micropython,danicampora/micropython,skybird6672/micropython,hosaka/micropython,jimkmc/micropython,suda/micropython,torwag/micropython,HenrikSolver/micropython,henriknelson/micropython,vitiral/micropython,cnoviello/micropython,praemdonck/micropython,blazewicz/micropython,adamkh/micropython,torwag/micropython,supergis/micropython,martinribelotta/micropython,firstval/micropython,matthewelse/micropython,jlillest/micropython,supergis/micropython,matthewelse/micropython,pramasoul/micropython,warner83/micropython,adafruit/circuitpython,trezor/micropython,ganshun666/micropython,danicampora/micropython,suda/micropython,SungEun-Steve-Kim/test-mp,chrisdearman/micropython,firstval/micropython,xhat/micropython,feilongfl/micropython,vitiral/micropython,ChuckM/micropython,blazewicz/micropython,trezor/micropython,redbear/micropython,cwyark/micropython,KISSMonX/micropython,tobbad/micropython,tralamazza/micropython,vitiral/micropython,adafruit/circuitpython,tuc-osg/micropython,PappaPeppar/micropython,ganshun666/micropython,drrk/micropython,tobbad/micropython,jimkmc/micropython,orionrobots/micropython,paul-xxx/micropython,alex-march/micropython,xuxiaoxin/micropython,tdautc19841202/micropython,bvernoux/micropython,drrk/micropython,orionrobots/micropython,xhat/micropython,rubencabrera/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,misterdanb/micropython,pozetroninc/micropython,heisewangluo/micropython,omtinez/micropython,dhylands/micropython
|
tests/basics/class_store_class.py
|
tests/basics/class_store_class.py
|
# Inspired by urlparse.py from CPython 3.3 stdlib
# There was a bug in MicroPython that under some conditions class stored
# in instance attribute later was returned "bound" as if it was a method,
# which caused class constructor to receive extra argument.
from _collections import namedtuple
_DefragResultBase = namedtuple('DefragResult', 'foo bar')
class _ResultMixinStr(object):
def encode(self):
return self._encoded_counterpart(*(x.encode() for x in self))
class _ResultMixinBytes(object):
def decode(self):
return self._decoded_counterpart(*(x.decode() for x in self))
class DefragResult(_DefragResultBase, _ResultMixinStr):
pass
class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
pass
DefragResult._encoded_counterpart = DefragResultBytes
DefragResultBytes._decoded_counterpart = DefragResult
# Due to differences in type and native subclass printing,
# the best thing we can do here is to just test that no exceptions
# happen
#print(DefragResult, DefragResult._encoded_counterpart)
#print(DefragResultBytes, DefragResultBytes._decoded_counterpart)
o1 = DefragResult("a", "b")
#print(o1, type(o1))
o2 = DefragResultBytes("a", "b")
#print(o2, type(o2))
#print(o1._encoded_counterpart)
_o1 = o1.encode()
print(_o1[0], _o1[1])
#print(_o1, type(_o1))
print("All's ok")
|
# Inspired by urlparse.py from CPython 3.3 stdlib
# There was a bug in MicroPython that under some conditions class stored
# in instance attribute later was returned "bound" as if it was a method,
# which caused class constructor to receive extra argument.
from collections import namedtuple
_DefragResultBase = namedtuple('DefragResult', 'foo bar')
class _ResultMixinStr(object):
def encode(self):
return self._encoded_counterpart(*(x.encode() for x in self))
class _ResultMixinBytes(object):
def decode(self):
return self._decoded_counterpart(*(x.decode() for x in self))
class DefragResult(_DefragResultBase, _ResultMixinStr):
pass
class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
pass
DefragResult._encoded_counterpart = DefragResultBytes
DefragResultBytes._decoded_counterpart = DefragResult
# Due to differences in type and native subclass printing,
# the best thing we can do here is to just test that no exceptions
# happen
#print(DefragResult, DefragResult._encoded_counterpart)
#print(DefragResultBytes, DefragResultBytes._decoded_counterpart)
o1 = DefragResult("a", "b")
#print(o1, type(o1))
o2 = DefragResultBytes("a", "b")
#print(o2, type(o2))
#print(o1._encoded_counterpart)
_o1 = o1.encode()
print(_o1[0], _o1[1])
#print(_o1, type(_o1))
print("All's ok")
|
mit
|
Python
|
7a331edf955d914c82751eb7ec1dd20896e25f83
|
Use SequenceEqual because we care about maintaining order.
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
tests/cases/stats/tests/kmeans.py
|
tests/cases/stats/tests/kmeans.py
|
import os
from django.test import TestCase
from avocado.stats import cluster, kmeans
from scipy.cluster import vq
import numpy
from itertools import chain
__all__ = ('KmeansTestCase',)
random_points_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points.txt'))
random_points_3d_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points_3d.txt'))
random_points = [float(x.strip()) for x in random_points_file.xreadlines()]
random_points_3d = [[float(x) for x in l.strip().split(",")] for l in random_points_3d_file.xreadlines()]
class KmeansTestCase(TestCase):
def test_std_dev(self):
numpy_std_dev = numpy.std(numpy.array(random_points))
our_std_dev = kmeans.std_dev(random_points)
self.assertEqual(numpy_std_dev, our_std_dev)
def test_whiten(self):
scipy_whiten = vq.whiten(numpy.array(random_points))
our_whiten = kmeans.whiten(random_points)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertEqual(*comp) for comp in comp_whiten]
scipy_whiten = vq.whiten(numpy.array(random_points_3d))
our_whiten = kmeans.whiten(random_points_3d)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertSequenceEqual(scipy_list.tolist(), our_list) for scipy_list, our_list in comp_whiten]
|
import os
from django.test import TestCase
from avocado.stats import cluster, kmeans
from scipy.cluster import vq
import numpy
from itertools import chain
__all__ = ('KmeansTestCase',)
random_points_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points.txt'))
random_points_3d_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/random_points_3d.txt'))
random_points = [float(x.strip()) for x in random_points_file.xreadlines()]
random_points_3d = [[float(x) for x in l.strip().split(",")] for l in random_points_3d_file.xreadlines()]
class KmeansTestCase(TestCase):
def test_std_dev(self):
numpy_std_dev = numpy.std(numpy.array(random_points))
our_std_dev = kmeans.std_dev(random_points)
self.assertEqual(numpy_std_dev, our_std_dev)
def test_whiten(self):
scipy_whiten = vq.whiten(numpy.array(random_points))
our_whiten = kmeans.whiten(random_points)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertEqual(*comp) for comp in comp_whiten]
scipy_whiten = vq.whiten(numpy.array(random_points_3d))
our_whiten = kmeans.whiten(random_points_3d)
self.assertEqual(len(scipy_whiten), len(our_whiten))
comp_whiten = zip(scipy_whiten, our_whiten)
[self.assertListEqual(scipy_list.tolist(), our_list) for scipy_list, our_list in comp_whiten]
|
bsd-2-clause
|
Python
|
268914e7a29231da882457a6e4744c9661526a73
|
Add latest version of py-tabulate (#14138)
|
iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack
|
var/spack/repos/builtin/packages/py-tabulate/package.py
|
var/spack/repos/builtin/packages/py-tabulate/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTabulate(PythonPackage):
"""Pretty-print tabular data"""
homepage = "https://bitbucket.org/astanin/python-tabulate"
url = "https://pypi.io/packages/source/t/tabulate/tabulate-0.8.6.tar.gz"
version('0.8.6', sha256='5470cc6687a091c7042cee89b2946d9235fe9f6d49c193a4ae2ac7bf386737c8')
version('0.8.3', sha256='8af07a39377cee1103a5c8b3330a421c2d99b9141e9cc5ddd2e3263fea416943')
version('0.7.7', sha256='83a0b8e17c09f012090a50e1e97ae897300a72b35e0c86c0b53d3bd2ae86d8c6')
depends_on('py-setuptools', type='build')
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTabulate(PythonPackage):
"""Pretty-print tabular data"""
homepage = "https://bitbucket.org/astanin/python-tabulate"
url = "https://pypi.io/packages/source/t/tabulate/tabulate-0.7.7.tar.gz"
version('0.7.7', sha256='83a0b8e17c09f012090a50e1e97ae897300a72b35e0c86c0b53d3bd2ae86d8c6')
depends_on('py-setuptools', type='build')
|
lgpl-2.1
|
Python
|
b286e10d7d7c43ceea80cd4025105851ebb9bd8f
|
Comment out save statement
|
alexmilesyounger/ds_basics
|
s4v3.py
|
s4v3.py
|
from s4v2 import *
import openpyxl
from openpyxl import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
def save_spreadsheet(filename, data_sample):
wb = Workbook() # shortcut for typing Workbook function
ws = wb.active # shortcut for typing active workbook function and also, for finding the sheet in the workbook that we're working on, the active one.
row_index = 1 # set the row index to 1, the starting point for excel, i.e. the upper left-hand corner
for rows in data_sample: # iterate through the rows in the spreadsheet
col_index = 1 # set the col index to 1 (starting point for excel, i.e. the upper left-hand corner)
for field in rows:
col_letter = get_column_letter(col_index) # use the imported get column letter function to get the letter of the column that we're working in.
ws.cell('{}{}'.format(col_letter, row_index)).value = field # I'm not entirely sure what we're doing here because I haven't worked with these function before, but my guess is that we're writing the values in the field of the data sample into the current cell of the new workbook
col_index += 1 # increase column index
row_index += 1 # increase row index
wb.save(filename)
kiton_ties = filter_col_by_string(data_from_csv, "brandName", "Kiton")
# save_spreadsheet("_data/s4-kiton.xlsx", kiton_ties)
|
from s4v2 import *
import openpyxl
from openpyxl import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
def save_spreadsheet(filename, data_sample):
wb = Workbook() # shortcut for typing Workbook function
ws = wb.active # shortcut for typing active workbook function and also, for finding the sheet in the workbook that we're working on, the active one.
row_index = 1 # set the row index to 1, the starting point for excel, i.e. the upper left-hand corner
for rows in data_sample: # iterate through the rows in the spreadsheet
col_index = 1 # set the col index to 1 (starting point for excel, i.e. the upper left-hand corner)
for field in rows:
col_letter = get_column_letter(col_index) # use the imported get column letter function to get the letter of the column that we're working in.
ws.cell('{}{}'.format(col_letter, row_index)).value = field # I'm not entirely sure what we're doing here because I haven't worked with these function before, but my guess is that we're writing the values in the field of the data sample into the current cell of the new workbook
col_index += 1 # increase column index
row_index += 1 # increase row index
wb.save(filename)
kiton_ties = filter_col_by_string(data_from_csv, "brandName", "Kiton")
save_spreadsheet("_data/s4-kiton.xlsx", kiton_ties)
|
mit
|
Python
|
5dd61d20f14ecbe1bc20fe8db3fd73a78707485a
|
Refactor partition.
|
joshbohde/functional_python
|
lazy.py
|
lazy.py
|
import operator as op
import itertools as it
from functools import partial
from collections import deque
class Wrapper(object):
def __init__(self, data):
self.data = data
def __lt__(self, other):
print 'comparing', self.data, other.data
return self.data < other.data
def partition(predicate, iterable):
passing, failing = deque(), deque()
def gen(f, mine, other):
while True:
if mine:
yield mine.popleft()
else:
newval = next(iterable)
if f(newval):
yield newval
else:
other.append(newval)
return (
gen(predicate, passing, failing),
gen(lambda i: not(predicate(i)), failing, passing)
)
def isorted(xs):
xs = iter(xs)
pivot = next(xs)
below, above = partition(lambda y: y < pivot, xs)
for x in isorted(below):
yield x
yield pivot
for x in isorted(above):
yield x
def imin(xs):
return next(isorted(xs))
def insmallest(n, xs):
return it.islice(isorted(xs), 0, n)
|
import operator as op
import itertools as it
from functools import partial
class Wrapper(object):
def __init__(self, data):
self.data = data
def __lt__(self, other):
print 'comparing', self.data, other.data
return self.data < other.data
def partition(predicate, iterable):
pack = partial(it.imap, lambda i: (predicate(i), i))
new_pred = op.itemgetter(0)
unpack = partial(it.imap, op.itemgetter(1))
packed = pack(iterable)
first, second = it.tee(packed)
passing = it.ifilter(new_pred, first)
failing = it.ifilterfalse(new_pred, second)
return map(unpack, (passing, failing))
def isorted(xs):
xs = iter(xs)
pivot = next(xs)
below, above = partition(lambda y: y < pivot, i)
for x in isorted(below):
yield x
yield pivot
for x in isorted(above):
yield x
def imin(xs):
return next(isorted(xs))
def insmallest(n, xs):
return it.islice(isorted(xs), 0, n)
|
bsd-3-clause
|
Python
|
14f0afc20c9d6c200c6e9fa52a4121c98d349be7
|
Set version 0.2.5
|
pombredanne/django-page-cms-1,remik/django-page-cms,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,akaihola/django-page-cms,remik/django-page-cms,batiste/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,remik/django-page-cms,oliciv/django-page-cms,oliciv/django-page-cms,akaihola/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,batiste/django-page-cms
|
pages/__init__.py
|
pages/__init__.py
|
# -*- coding: utf-8 -*-
VERSION = (0, 2, 5)
__version__ = '.'.join(map(str, VERSION))
|
# -*- coding: utf-8 -*-
VERSION = (0, 2, 4)
__version__ = '.'.join(map(str, VERSION))
|
bsd-3-clause
|
Python
|
d282d5525c4d965dbe0a6ee4967a14f1f412f2b4
|
update version number from 1.4 to 1.5
|
hearsaycorp/python-oauth2,armersong/python-oauth2,dpedowitz/python-oauth2,simplegeo/python-oauth2,jasonrubenstein/python_oauth2,edevil/python-oauth2,hayd/python-oauth2,jackiekazil/python-oauth2,MiCHiLU/python-oauth2,InnovativeTravel/python-oauth2,mop/python-oauth2,fmondaini/python-oauth2,avenpace/python-oauth2,kylemcc/python-oauth2,webjunkie/python-oauth2,jace/python-oauth2,rickhanlonii/python-oauth2,CentricWebEstate/python-oauth2,jellonek/python-oauth2,gugu/python-oauth2,bensonk/python-oauth2,maxcountryman/python-oauth2,jefftriplett/python-oauth2,spotrh/python-oauth2,chb/python-oauth2,davidlehn/python-oauth2,brstrat/python-oauth2,hades/python-oauth2,strycore/python-oauth2,PrincessPolymath/python-oauth2,godaddy/python-oauth2,erikvanzijst/python-oauth2,felixleong/python-oauth2,ferrix/python-oauth2,jparise/python-oauth2,taozlab/python-oauth2,glenbot/python-oauth2,kgrandis/python-oauth2,zyjibmcn/python-oauth2,Jberlinsky/python-oauth2,CoolCloud/python-oauth2-1,sookasa/python-oauth2,mitchellrj/python-oauth2,prudnikov/python-oauth2,jhutch11/python-oauth2,meizon/python-oauth2,bossiernesto/python-oauth2,mkaziz/python-oauth2,dirn/python-oauth2,jeffl/python-oauth2,tseaver/python-oauth2,brstrat/python-oauth2,spattersongt/python-oauth2,philipforget/python-oauth2,rancavil/python-oauth2,bossiernesto/python-oauth2,CestusMagnus/python-oauth2,i-kiwamu/python3-oauth2,tylerwilliams/python-oauth2,jgaul/python-oauth2,squirro/python-oauth2,iserko/python-oauth2,jefftriplett/python-oauth2,spoqa/python-oauth2,prudnikov/python-oauth2,lann/python-oauth2,joestump/python-oauth2,fugu13/python-oauth2,arthurian/python-oauth2,strycore/python-oauth2,mitchellrj/python-oauth2,alekskorolev/python-oauth2,inean/python-oauth2,predatell/python-oauth2,ActionLuzifer/python-oauth2,AdamJacobMuller/python-oauth2,terrasea/python-oauth2,ActionLuzifer/python-oauth2,fission6/python-oauth2,jgsogo/python-oauth2,robkinyon/python-oauth2,ustudio/python-oauth2,terrasea/python-oauth2,DaGoodBoy/python-oauth2,frattallone/python-oauth2,jgaul/python-oauth2,Pyha/python-oauth2-py3.3,thisfred/python-oauth2,benthor/python-oauth2,KLab/python-oauth2,Proteus-tech/python3-oauth2,andelf/python-oauth2,AdamJacobMuller/python-oauth2,bjourne/python-oauth2,lukegb/python-oauth2,tygpatton/python-oauth2,Instapaper/python-oauth2,optixx/python-oauth2,vlinhart/python-oauth2,xen0n/python-oauth2,ebrelsford/python-oauth2,amitm/python-oauth2,npetrell/python-oauth2,zyegfryed/python-oauth2,benadida/python-oauth2,edworboys/python-oauth2,artemrizhov/python-oauth2,KLab/python-oauth2
|
oauth2/_version.py
|
oauth2/_version.py
|
# This is the version of this source code.
manual_verstr = "1.5"
auto_build_num = "143"
verstr = manual_verstr + "." + auto_build_num
try:
from pyutil.version_class import Version as pyutil_Version
__version__ = pyutil_Version(verstr)
except (ImportError, ValueError):
# Maybe there is no pyutil installed.
from distutils.version import LooseVersion as distutils_Version
__version__ = distutils_Version(verstr)
|
# This is the version of this source code.
manual_verstr = "1.4"
auto_build_num = "143"
verstr = manual_verstr + "." + auto_build_num
try:
from pyutil.version_class import Version as pyutil_Version
__version__ = pyutil_Version(verstr)
except (ImportError, ValueError):
# Maybe there is no pyutil installed.
from distutils.version import LooseVersion as distutils_Version
__version__ = distutils_Version(verstr)
|
mit
|
Python
|
7bfc2287d15198d9e37b4def4632481c8446a932
|
bump version
|
caktus/django_bread,caktus/django_bread,caktus/django_bread,caktus/django_bread
|
bread/__init__.py
|
bread/__init__.py
|
VERSION = '0.6.0'
|
VERSION = '0.5.1'
|
apache-2.0
|
Python
|
928c3bb38f4fa24d082ea18db09ff4542b78466c
|
remove units from x gt 1 example
|
galbramc/gpkit,galbramc/gpkit,convexopt/gpkit,hoburg/gpkit,hoburg/gpkit,convexopt/gpkit
|
docs/source/examples/x_greaterthan_1.py
|
docs/source/examples/x_greaterthan_1.py
|
from gpkit import Variable, GP
# Decision variable
x = Variable('x')
# Constraint
constraints = [x >= 1]
# Objective (to minimize)
objective = x
# Formulate the GP
gp = GP(objective, constraints)
# Solve the GP
sol = gp.solve()
# Print results table
print sol.table()
|
from gpkit import Variable, GP
# Decision variable
x = Variable("x", "m", "A really useful variable called x with units of meters")
# Constraint
constraint = [1/x <= 1]
# Objective (to minimize)
objective = x
# Formulate the GP
gp = GP(objective, constraint)
# Solve the GP
sol = gp.solve()
# Print results table
print sol.table()
|
mit
|
Python
|
5d30c02f9adb7de3ce9eebef5178466711d96c64
|
Remove unused import: `RelatedField`
|
kevin-brown/drf-json-api
|
rest_framework_json_api/utils.py
|
rest_framework_json_api/utils.py
|
from django.utils.encoding import force_text
from django.utils.text import slugify
try:
from rest_framework.serializers import ManyRelatedField
except ImportError:
ManyRelatedField = type(None)
try:
from rest_framework.serializers import ListSerializer
except ImportError:
ListSerializer = type(None)
def get_related_field(field):
if isinstance(field, ManyRelatedField):
return field.child_relation
if isinstance(field, ListSerializer):
return field.child
return field
def is_related_many(field):
if hasattr(field, "many"):
return field.many
if isinstance(field, ManyRelatedField):
return True
if isinstance(field, ListSerializer):
return True
return False
def model_from_obj(obj):
model = getattr(obj, "model", None)
if model is not None:
return model
queryset = getattr(obj, "queryset", None)
if queryset is not None:
return queryset.model
return None
def model_to_resource_type(model):
'''Return the verbose plural form of a model name, with underscores
Examples:
Person -> "people"
ProfileImage -> "profile_image"
'''
if model is None:
return "data"
return force_text(model._meta.verbose_name_plural)
#
# String conversion
#
def camelcase(string):
'''Return a string in lowerCamelCase
Examples:
"people" -> "people"
"profile images" -> "profileImages"
'''
out = slug(string).replace('-', ' ').title().replace(' ', '')
return out[0].lower() + out[1:]
def slug(string):
'''Return a string where words are connected with hyphens'''
return slugify(force_text(string))
def snakecase(string):
'''Return a string where words are connected with underscores
Examples:
"people" -> "people"
"profile images" -> "profile_images"
'''
return slug(string).replace('-', '_')
|
from django.utils.encoding import force_text
from django.utils.text import slugify
from rest_framework.serializers import RelatedField
try:
from rest_framework.serializers import ManyRelatedField
except ImportError:
ManyRelatedField = type(None)
try:
from rest_framework.serializers import ListSerializer
except ImportError:
ListSerializer = type(None)
def get_related_field(field):
if isinstance(field, ManyRelatedField):
return field.child_relation
if isinstance(field, ListSerializer):
return field.child
return field
def is_related_many(field):
if hasattr(field, "many"):
return field.many
if isinstance(field, ManyRelatedField):
return True
if isinstance(field, ListSerializer):
return True
return False
def model_from_obj(obj):
model = getattr(obj, "model", None)
if model is not None:
return model
queryset = getattr(obj, "queryset", None)
if queryset is not None:
return queryset.model
return None
def model_to_resource_type(model):
'''Return the verbose plural form of a model name, with underscores
Examples:
Person -> "people"
ProfileImage -> "profile_image"
'''
if model is None:
return "data"
return force_text(model._meta.verbose_name_plural)
#
# String conversion
#
def camelcase(string):
'''Return a string in lowerCamelCase
Examples:
"people" -> "people"
"profile images" -> "profileImages"
'''
out = slug(string).replace('-', ' ').title().replace(' ', '')
return out[0].lower() + out[1:]
def slug(string):
'''Return a string where words are connected with hyphens'''
return slugify(force_text(string))
def snakecase(string):
'''Return a string where words are connected with underscores
Examples:
"people" -> "people"
"profile images" -> "profile_images"
'''
return slug(string).replace('-', '_')
|
mit
|
Python
|
8157af3da0e535074b18c76f0e5391d8cac806e8
|
Add error field to expected JSON
|
osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api
|
whats_fresh/whats_fresh_api/tests/views/test_stories.py
|
whats_fresh/whats_fresh_api/tests/views/test_stories.py
|
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class StoriesTestCase(TestCase):
fixtures = ['whats_fresh_api/tests/testdata/test_fixtures.json']
def setUp(self):
self.expected_json = """"
{
error": {
"error_status": false,
"error_name": null,
"error_text": null,
"error_level": null
},
{
"story": "These are the voyages of the Starfish Enterblub; her five year mission -- to seek out new fish and new fishilizations..."
}
}"""
def test_url_endpoint(self):
url = reverse('story-details', kwargs={'id': '1'})
self.assertEqual(url, '/stories/1')
def test_json_equals(self):
c = Client()
response = c.get(reverse('story-details', kwargs={'id': '1'})).content
parsed_answer = json.loads(response)
expected_answer = json.loads(self.expected_json)
self.assertTrue(parsed_answer == expected_answer)
|
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class StoriesTestCase(TestCase):
fixtures = ['whats_fresh_api/tests/testdata/test_fixtures.json']
def setUp(self):
self.expected_json = """
{
"story": "These are the voyages of the Starfish Enterblub; her five year mission -- to seek out new fish and new fishilizations..."
}"""
def test_url_endpoint(self):
url = reverse('story-details', kwargs={'id': '1'})
self.assertEqual(url, '/stories/1')
def test_json_equals(self):
c = Client()
response = c.get(reverse('story-details', kwargs={'id': '1'})).content
parsed_answer = json.loads(response)
expected_answer = json.loads(self.expected_json)
self.assertTrue(parsed_answer == expected_answer)
|
apache-2.0
|
Python
|
feab9b1067a42a6d5d8586361ab1d02f1844aa7e
|
Remove unused imports
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
tests/integration/api/conftest.py
|
tests/integration/api/conftest.py
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
API-specific fixtures
"""
import pytest
from .helpers import assemble_authorization_header
API_TOKEN = 'just-say-PLEASE!'
@pytest.fixture(scope='package')
# `admin_app` fixture is required because it sets up the database.
def api_app(admin_app, make_admin_app):
config_overrides = {
'API_TOKEN': API_TOKEN,
'SERVER_NAME': 'api.acmecon.test',
}
app = make_admin_app(**config_overrides)
with app.app_context():
yield app
@pytest.fixture(scope='package')
def api_client(api_app):
"""Provide a test HTTP client against the API."""
return api_app.test_client()
@pytest.fixture(scope='package')
def api_client_authz_header():
"""Provide a test HTTP client against the API."""
return assemble_authorization_header(API_TOKEN)
|
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
API-specific fixtures
"""
import pytest
from tests.conftest import CONFIG_PATH_DATA_KEY
from tests.helpers import create_admin_app
from .helpers import assemble_authorization_header
API_TOKEN = 'just-say-PLEASE!'
@pytest.fixture(scope='package')
# `admin_app` fixture is required because it sets up the database.
def api_app(admin_app, make_admin_app):
config_overrides = {
'API_TOKEN': API_TOKEN,
'SERVER_NAME': 'api.acmecon.test',
}
app = make_admin_app(**config_overrides)
with app.app_context():
yield app
@pytest.fixture(scope='package')
def api_client(api_app):
"""Provide a test HTTP client against the API."""
return api_app.test_client()
@pytest.fixture(scope='package')
def api_client_authz_header():
"""Provide a test HTTP client against the API."""
return assemble_authorization_header(API_TOKEN)
|
bsd-3-clause
|
Python
|
f2139cad673ee50f027164bda80d86979d5ce7a0
|
Add more imports for further functionality
|
GregBrimble/boilerplate-web-service,GregBrimble/boilerplate-web-service
|
passenger_wsgi.py
|
passenger_wsgi.py
|
import os
import sys
try:
from flask import Flask
import flask_login
from flask_restless import APIManager
from flask_sqlalchemy import SQLAlchemy
import requests
except ImportError:
INTERP = "venv/bin/python"
if os.path.relpath(sys.executable, os.getcwd()) != INTERP:
try:
os.execl(INTERP, INTERP, *sys.argv)
except OSError:
sys.exit("Could not find virtual environment. Run `:~$ ./setup.sh`")
else:
sys.exit("Could not find requirements. Are they all included in requirements.txt? Run `:~$ ./setup.sh`")
application = Flask(__name__)
@application.route("/")
def index():
return "Hello, world!"
|
import os
import sys
try:
from flask import Flask, render_template, send_file, Response
import requests
except ImportError:
INTERP = "venv/bin/python"
if os.path.relpath(sys.executable, os.getcwd()) != INTERP:
try:
os.execl(INTERP, INTERP, *sys.argv)
except OSError:
sys.exit("Could not find virtual environment. Run `:~$ ./setup.sh`")
else:
sys.exit("Could not find requirements. Are they all included in requirements.txt? Run `:~$ ./setup.sh`")
application = Flask(__name__)
@application.route("/")
def index():
return "Hello, world!"
|
mit
|
Python
|
f4e6f2c6eb77876b646da14805ee496b0b25f0bc
|
Support PortOpt from oslo.cfg
|
FrankDuan/df_code,openstack/dragonflow,FrankDuan/df_code,FrankDuan/df_code,openstack/dragonflow,openstack/dragonflow
|
dragonflow/common/common_params.py
|
dragonflow/common/common_params.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from neutron.i18n import _
df_opts = [
cfg.StrOpt('remote_db_ip',
default='127.0.0.1',
help=_('The remote db server ip address')),
cfg.PortOpt('remote_db_port',
default=4001,
help=_('The remote db server port')),
cfg.StrOpt('nb_db_class',
default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver',
help=_('The driver class for the NB DB driver')),
cfg.StrOpt('local_ip',
default='127.0.0.1',
help=_('Local host IP')),
cfg.StrOpt('tunnel_type',
default='geneve',
help=_('The encapsulation type for the tunnel')),
cfg.StrOpt('apps_list',
default='l2_app.L2App,l3_app.L3App',
help=_('List of openflow applications classes to load')),
cfg.BoolOpt('use_centralized_ipv6_DHCP',
default=False,
help=_("Enable IPv6 DHCP by using DHCP agent"))
]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from neutron.i18n import _
df_opts = [
cfg.StrOpt('remote_db_ip',
default='127.0.0.1',
help=_('The remote db server ip address')),
cfg.IntOpt('remote_db_port',
default=4001,
help=_('The remote db server port')),
cfg.StrOpt('nb_db_class',
default='dragonflow.db.drivers.etcd_db_driver.EtcdDbDriver',
help=_('The driver class for the NB DB driver')),
cfg.StrOpt('local_ip',
default='127.0.0.1',
help=_('Local host IP')),
cfg.StrOpt('tunnel_type',
default='geneve',
help=_('The encapsulation type for the tunnel')),
cfg.StrOpt('apps_list',
default='l2_app.L2App,l3_app.L3App',
help=_('List of openflow applications classes to load')),
cfg.BoolOpt('use_centralized_ipv6_DHCP',
default=False,
help=_("Enable IPv6 DHCP by using DHCP agent"))
]
|
apache-2.0
|
Python
|
4ec09eb10aa352175769cc00f189ece719802ea6
|
remove temperature for now
|
mookfist/mookfist-lled-controller
|
lled.py
|
lled.py
|
#!/usr/bin/env python
"""Mookfist LimitlessLED Control
This tool can be used to control your LimitlessLED based lights.
Usage:
lled.py fade <start> <end> (--group=<GROUP>)... [options]
lled.py fadec <start> <end> (--group=<GROUP>)... [options]
lled.py fadeb <startb> <endb> <startc> <endc> (--group=<GROUP>)... [options]
lled.py on (--group=<group>)... [options]
lled.py off (--group=<group>)... [options]
lled.py color <color> (--group=<GROUP>)... [options]
lled.py colorcycle (--group=<GROUP>)... [options]
lled.py rgb <r> <g> <b> (--group=<GROUP>)... [options]
lled.py white (--group=<GROUP>)... [options]
lled.py brightness <brightness> (--group=<GROUP>)... [options]
lled.py scan [options]
Options:
-h --bridge-ip=HOST IP / Hostname of the bridge
-p --bridge-port=PORT Port number of the bridge (defaults to 8899 or 5987)
--bridge-version=VERSION Bridge version (defaults to 4)
-g GROUP --group=GROUP Group number (defaults to 1)
--bulb=BULB Bulb type
-r RC --repeat=RC Number of times to repeat a command
--pause=PAUSE Number of milliseconds to wait between commands
--debug Enable debugging output
-h --help Show this help
--help-bulbtypes Display possible bulb type values
"""
import logging
from docopt import docopt
from mookfist_lled_controller.cli import configure_logger
from mookfist_lled_controller.cli import Main
def main():
"""Main function!"""
arguments = docopt(__doc__, version='Mookfist LimitlessLED Control 0.0.1')
configure_logger(arguments['--debug'])
log = logging.getLogger('lled')
log.info('Welcome to the Mookfist LimitlessLED Controller')
try:
m = Main(arguments)
m.run()
except KeyboardInterrupt:
log.warning('Stopping')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""Mookfist LimitlessLED Control
This tool can be used to control your LimitlessLED based lights.
Usage:
lled.py fade <start> <end> (--group=<GROUP>)... [options]
lled.py fadec <start> <end> (--group=<GROUP>)... [options]
lled.py fadeb <startb> <endb> <startc> <endc> (--group=<GROUP>)... [options]
lled.py on (--group=<group>)... [options]
lled.py off (--group=<group>)... [options]
lled.py color <color> (--group=<GROUP>)... [options]
lled.py colorcycle (--group=<GROUP>)... [options]
lled.py rgb <r> <g> <b> (--group=<GROUP>)... [options]
lled.py white (--group=<GROUP>)... [options]
lled.py brightness <brightness> (--group=<GROUP>)... [options]
lled.py temperature <temp> (--group=<GROUP>)... [options]
lled.py scan [options]
Options:
-h --bridge-ip=HOST IP / Hostname of the bridge
-p --bridge-port=PORT Port number of the bridge (defaults to 8899 or 5987)
--bridge-version=VERSION Bridge version (defaults to 4)
-g GROUP --group=GROUP Group number (defaults to 1)
--bulb=BULB Bulb type
-r RC --repeat=RC Number of times to repeat a command
--pause=PAUSE Number of milliseconds to wait between commands
--debug Enable debugging output
-h --help Show this help
--help-bulbtypes Display possible bulb type values
"""
import logging
from docopt import docopt
from mookfist_lled_controller.cli import configure_logger
from mookfist_lled_controller.cli import Main
def main():
"""Main function!"""
arguments = docopt(__doc__, version='Mookfist LimitlessLED Control 0.0.1')
configure_logger(arguments['--debug'])
log = logging.getLogger('lled')
log.info('Welcome to the Mookfist LimitlessLED Controller')
try:
m = Main(arguments)
m.run()
except KeyboardInterrupt:
log.warning('Stopping')
if __name__ == '__main__':
main()
|
mit
|
Python
|
a324e8de7dc0bcb1676a8ae506d139f05751b233
|
fix lint for tests
|
catmaid/catpy
|
tests/test_relation_identifier.py
|
tests/test_relation_identifier.py
|
from __future__ import absolute_import
import pytest
from catpy.client import ConnectorRelation, CatmaidClient
from catpy.applications import RelationIdentifier
from tests.common import relation_identifier, connectors_types # noqa
def test_from_id(relation_identifier): # noqa
assert relation_identifier.from_id(0) == ConnectorRelation.presynaptic_to
def test_to_id(relation_identifier): # noqa
assert relation_identifier.to_id(ConnectorRelation.presynaptic_to) == 0
@pytest.fixture
def real_relation_identifier(credentials):
return RelationIdentifier(CatmaidClient(**credentials))
def populate_relid(relid):
relid._get_dict(False, None)
relid._get_dict(True, None)
def test_from_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.id_to_relation
def test_to_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.relation_to_id
|
from __future__ import absolute_import
import pytest
from catpy.client import ConnectorRelation, CatmaidClient
from catpy.applications import RelationIdentifier
from tests.common import relation_identifier, connectors_types # noqa
def test_from_id(relation_identifier): # noqa
assert relation_identifier.from_id(0) == ConnectorRelation.presynaptic_to
def test_to_id(relation_identifier): # noqa
assert relation_identifier.to_id(ConnectorRelation.presynaptic_to) == 0
@pytest.fixture
def real_relation_identifier(credentials):
return RelationIdentifier(CatmaidClient(**credentials))
def populate_relid(relation_identifier):
relation_identifier._get_dict(False, None)
relation_identifier._get_dict(True, None)
def test_from_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.id_to_relation
def test_to_id_real(real_relation_identifier):
populate_relid(real_relation_identifier)
assert real_relation_identifier.relation_to_id
|
mit
|
Python
|
ad4b9ffb7292a5b810df033088008cd503bc1169
|
Add pre-fabricated fake PyPI envs at the top.
|
suutari/prequ,suutari/prequ,suutari-ai/prequ
|
tests/unit/test_spec_resolving.py
|
tests/unit/test_spec_resolving.py
|
import unittest
from piptools.datastructures import SpecSet
from piptools.package_manager import FakePackageManager
def print_specset(specset, round):
print('After round #%s:' % (round,))
for spec in specset:
print(' - %s' % (spec.description(),))
simple = {
'foo-0.1': ['bar'],
'bar-1.2': ['qux', 'simplejson'],
'qux-0.1': ['simplejson<2.6'],
'simplejson-2.4.0': [],
'simplejson-2.6.2': [],
}
class TestDependencyResolving(unittest.TestCase):
def test_find_dependencies_simple(self):
"""A simple scenario for finding dependencies."""
pkgmgr = FakePackageManager(simple)
spec_set = SpecSet()
spec_set.add_spec('foo')
round = 1
print_specset(spec_set, round)
while True:
round += 1
new_deps = []
for spec in spec_set.normalize():
name, version = pkgmgr.find_best_match(spec)
new_deps += pkgmgr.get_dependencies(name, version)
if not new_deps:
break
# TODO: We should detect whether adding the new_deps really
# "changes anything" to the spec set. In order words: if no
# significant new constraints are added, we're done
# XXX: FIXME: Current, we "just stop" after X rounds (to prevent
# endless loops), but obviously this is not the correct impl!
if round > 4:
break
spec_set.add_specs(new_deps)
print_specset(spec_set, round)
# Print the final result:
print_specset(spec_set.normalize(), 'final')
spec_set = spec_set.normalize()
self.assertItemsEqual(['foo', 'qux', 'bar', 'simplejson<2.6'], map(str, spec_set))
|
import unittest
from piptools.datastructures import SpecSet
from piptools.package_manager import FakePackageManager
def print_specset(specset, round):
print('After round #%s:' % (round,))
for spec in specset:
print(' - %s' % (spec.description(),))
class TestDependencyResolving(unittest.TestCase):
def test_find_dependencies_simple(self):
"""A simple scenario for finding dependencies."""
content = {
'foo-0.1': ['bar'],
'bar-1.2': ['qux', 'simplejson'],
'qux-0.1': ['simplejson<2.6'],
'simplejson-2.4.0': [],
'simplejson-2.6.2': [],
}
pkgmgr = FakePackageManager(content)
spec_set = SpecSet()
spec_set.add_spec('foo')
round = 1
print_specset(spec_set, round)
while True:
round += 1
new_deps = []
for spec in spec_set.normalize():
name, version = pkgmgr.find_best_match(spec)
new_deps += pkgmgr.get_dependencies(name, version)
if not new_deps:
break
# TODO: We should detect whether adding the new_deps really
# "changes anything" to the spec set. In order words: if no
# significant new constraints are added, we're done
# XXX: FIXME: Current, we "just stop" after X rounds (to prevent
# endless loops), but obviously this is not the correct impl!
if round > 4:
break
spec_set.add_specs(new_deps)
print_specset(spec_set, round)
# Print the final result:
print_specset(spec_set.normalize(), 'final')
spec_set = spec_set.normalize()
self.assertItemsEqual(['foo', 'qux', 'bar', 'simplejson<2.6'], map(str, spec_set))
|
bsd-2-clause
|
Python
|
bbfa9c3135ebdc5a99257d62556b691f8c87a26c
|
Update irrigate.py
|
Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System
|
device/src/irrigate.py
|
device/src/irrigate.py
|
#!/usr/bin/env python
#In this project, I use a servo to simulate the water tap.
#Roating to 90 angle suggest that the water tap is open, and 0 angle means close.
#Pin connection:
#deep red <--> GND
#red <--> VCC
#yellow <--> signal(X1)
#Update!!!!!
#Use real water pump(RS360) to irrigate the plants, need to use relay to drive the pump which is powered by 5V power.
#
from pyb import Servo
servo=Servo(1) # X1
def irrigate_start():
servo.angle(90)
def irrigate_stop():
servo.angle(0)
|
#!/usr/bin/env python
#In this project, I use a servo to simulate the water tap.
#Roating to 90 angle suggest that the water tap is open, and 0 angle means close.
#Pin connection:
#deep red <--> GND
#red <--> VCC
#yellow <--> signal(X1)
from pyb import Servo
servo=Servo(1) # X1
def irrigate_start():
servo.angle(90)
def irrigate_stop():
servo.angle(0)
|
mit
|
Python
|
173d7ffefe10e8896055bd5b41272c2d0a1f8889
|
Update version to 0.1.6 for upcoming release
|
MatthewGilbert/pdblp
|
pdblp/_version.py
|
pdblp/_version.py
|
__version__ = "0.1.6"
|
__version__ = "0.1.5"
|
mit
|
Python
|
b87ebc9dbbc33928345a83ac8ea0ce71806ac024
|
simplify play down to wall and standard defense
|
RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software
|
soccer/gameplay/plays/Defend_Restart_Defensive/BasicDefendRestartDefensive.py
|
soccer/gameplay/plays/Defend_Restart_Defensive/BasicDefendRestartDefensive.py
|
import main
import robocup
import behavior
import constants
import enum
import standard_play
import tactics.positions.submissive_goalie as submissive_goalie
import tactics.positions.submissive_defender as submissive_defender
import evaluation.opponent as eval_opp
import tactics.positions.wing_defender as wing_defender
import skills.mark as mark
import tactics.wall as wall
import situational_play_selection
## Restart that uses standard defense and uses the remaining
# robots to form a wall
#
class BasicDefendRestartDefensive(standard_play.StandardPlay):
_situationList = [
situational_play_selection.SituationalPlaySelector.Situation.DEFEND_RESTART_DEFENSIVE
] # yapf: disable
def __init__(self, num_defenders=2):
super().__init__(continuous=True)
self.num_defenders = num_defenders
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'Immediately')
self.add_subbehavior(wall.Wall(), 'wall', required=False)
|
import main
import robocup
import behavior
import constants
import enum
import standard_play
import tactics.positions.submissive_goalie as submissive_goalie
import tactics.positions.submissive_defender as submissive_defender
import evaluation.opponent as eval_opp
import tactics.positions.wing_defender as wing_defender
import skills.mark as mark
import tactics.defense
import situational_play_selection
## Play that uses submissive defenders to defend
# an attack close to our goal.
#
# By default, we will use standard defense (two submissive
# defenders, one goalie) and additional marking robots.
#
class BasicDefendRestartDefensive(standard_play.StandardPlay):
_situationList = [
situational_play_selection.SituationalPlaySelector.Situation.DEFEND_RESTART_DEFENSIVE
] # yapf: disable
def __init__(self, num_defenders=2):
super().__init__(continuous=True)
self.num_defenders = num_defenders
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'Immediately')
for i in range(num_defenders):
self.add_subbehavior(mark.Mark(), 'mark' + str(i), required=False)
# Keep track of which robots are currently being defended
self.defended = {}
for i in range(len(main.their_robots())):
self.defended[i] = False
def execute_running(self):
for i in range(len(main.their_robots())):
if not eval_opp.is_marked(main.their_robots()[i].pos):
self.defended[i] = False
# mark highest threat robot
for i in range(self.num_defenders):
mark_bhvr = self.subbehavior_with_name('mark' + str(i))
threat_found = False
for threat_pt, _, _ in eval_opp.get_threat_list([mark_bhvr]):
print(threat_pt)
closest_opp = eval_opp.get_closest_opponent(threat_pt)
if not threat_found and (closest_opp.pos - main.ball().pos).mag() > constants.Field.CenterRadius + constants.Robot.Radius * 2:
print((closest_opp.pos - main.ball().pos).mag())
# print(constants.Field.CenterRadius)
mark_bhvr.mark_robot = closest_opp
threat_found = True
|
apache-2.0
|
Python
|
abae242bbcdc3eefcd0ab1ff29f660f89d47db1a
|
Add absolute URL for Surprises
|
mirigata/mirigata,mirigata/mirigata,mirigata/mirigata,mirigata/mirigata
|
mirigata/surprise/models.py
|
mirigata/surprise/models.py
|
from django.core.urlresolvers import reverse
from django.db import models
class Surprise(models.Model):
link = models.URLField(max_length=500)
description = models.TextField(max_length=1000)
def get_absolute_url(self):
return reverse('surprise-detail', kwargs={"pk": self.id})
|
from django.db import models
class Surprise(models.Model):
link = models.URLField(max_length=500)
description = models.TextField(max_length=1000)
|
agpl-3.0
|
Python
|
c0fdbf78fcc6b74086cc40e8e0deb273dee6d03c
|
Update BUILD_OSS to 4666.
|
google/mozc,google/mozc,fcitx/mozc,google/mozc,fcitx/mozc,fcitx/mozc,google/mozc,fcitx/mozc,fcitx/mozc,google/mozc
|
src/data/version/mozc_version_template.bzl
|
src/data/version/mozc_version_template.bzl
|
# Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4666
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
|
# Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4660
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
|
bsd-3-clause
|
Python
|
c5225c00191595b6d1a824ee808465e0c488769b
|
Add missing arg which didn't make it because of the bad merge conflict resolution.
|
nzlosh/st2,nzlosh/st2,Plexxi/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2
|
st2stream/st2stream/controllers/v1/stream.py
|
st2stream/st2stream/controllers/v1/stream.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from st2common import log as logging
from st2common.router import Response
from st2common.util.jsonify import json_encode
from st2stream.listener import get_listener
LOG = logging.getLogger(__name__)
def format(gen):
message = '''event: %s\ndata: %s\n\n'''
for pack in gen:
if not pack:
# Note: gunicorn wsgi handler expect bytes, not unicode
yield six.binary_type('\n')
else:
(event, body) = pack
# Note: gunicorn wsgi handler expect bytes, not unicode
yield six.binary_type(message % (event, json_encode(body, indent=None)))
class StreamController(object):
def get_all(self, requester_user):
def make_response():
res = Response(content_type='text/event-stream',
app_iter=format(get_listener().generator()))
return res
stream = make_response()
return stream
stream_controller = StreamController()
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from st2common import log as logging
from st2common.router import Response
from st2common.util.jsonify import json_encode
from st2stream.listener import get_listener
LOG = logging.getLogger(__name__)
def format(gen):
message = '''event: %s\ndata: %s\n\n'''
for pack in gen:
if not pack:
# Note: gunicorn wsgi handler expect bytes, not unicode
yield six.binary_type('\n')
else:
(event, body) = pack
# Note: gunicorn wsgi handler expect bytes, not unicode
yield six.binary_type(message % (event, json_encode(body, indent=None)))
class StreamController(object):
def get_all(self):
def make_response():
res = Response(content_type='text/event-stream',
app_iter=format(get_listener().generator()))
return res
stream = make_response()
return stream
stream_controller = StreamController()
|
apache-2.0
|
Python
|
1df3dc91f71bf2a02b059d414ea5b041a382f1ad
|
change CSS selectors
|
jrafa/hotshot,jrafa/hotshot,jrafa/hotshot
|
shot.py
|
shot.py
|
# -*- coding: utf-8 -*-
import redis
import urllib2
from bs4 import BeautifulSoup
from datetime import datetime
url = 'http://www.x-kom.pl'
FORMAT_DATETIME = '%Y-%m-%d %H:%M:%S.%f'
redis_server = redis.Redis(host='localhost', port=6379)
def get_number(number):
return float(number.strip().split()[0].replace(',', '.'))
def get_element(soup, tag, class_name):
return soup.find(tag, {'class': class_name}).get_text()
def get_data(url):
html = urllib2.urlopen(url).read()
soup = BeautifulSoup(html, 'html.parser')
title = get_element(soup, 'p', 'product-name')
price = get_element(soup, 'div', 'new-price')
price_first = get_element(soup, 'div', 'old-price')
return { 'title': title.encode('utf-8'), 'price': get_number(price), 'price_first': get_number(price_first), 'date': datetime.now()}
def save_to_db():
item = get_data(url)
date = item['date'].strftime(FORMAT_DATETIME)
redis_server.hmset(date, item)
def show_all():
keys = redis_server.keys()
for i, key in enumerate(keys):
print '{}: {}'.format(i, redis_server.hgetall(key))
if __name__ == '__main__':
save_to_db()
# show_all()
|
# -*- coding: utf-8 -*-
import redis
import urllib2
from bs4 import BeautifulSoup
from datetime import datetime
url = 'http://www.x-kom.pl'
FORMAT_DATETIME = '%Y-%m-%d %H:%M:%S.%f'
redis_server = redis.Redis(host='localhost', port=6379)
def get_number(number):
return float(number.strip().split()[0].replace(',', '.'))
def get_element(soup, tag, class_name):
return soup.find(tag, {'class': class_name}).get_text()
def get_data(url):
html = urllib2.urlopen(url).read()
soup = BeautifulSoup(html, 'html.parser')
title = get_element(soup, 'div', 'killer-product-title')
price = get_element(soup, 'div', 'killer-price')
price_first = get_element(soup, 'div', 'discount-price')
return { 'title': title.encode('utf-8'), 'price': get_number(price), 'price_first': get_number(price_first), 'date': datetime.now()}
def save_to_db():
item = get_data(url)
date = item['date'].strftime(FORMAT_DATETIME)
redis_server.hmset(date, item)
def show_all():
keys = redis_server.keys()
for i, key in enumerate(keys):
print '{}: {}'.format(i, redis_server.hgetall(key))
if __name__ == '__main__':
save_to_db()
# show_all()
|
mit
|
Python
|
8b944f04ebf9b635029182a3137e9368edafe9d2
|
Handle exception for bad search strings
|
groundupnews/gu,groundupnews/gu,groundupnews/gu,groundupnews/gu,groundupnews/gu
|
pgsearch/utils.py
|
pgsearch/utils.py
|
from django.contrib.postgres.search import SearchVector, SearchRank, SearchQuery
import shlex
import string
def parseSearchString(search_string):
try:
search_strings = shlex.split(search_string)
translator = str.maketrans({key: None for key in string.punctuation})
search_strings = [s.translate(translator) for s in search_strings]
except:
search_strings = []
return search_strings
def createSearchQuery(list_of_terms):
if len(list_of_terms) > 0:
q = SearchQuery(list_of_terms[0])
for term in list_of_terms[1:]:
q = q & SearchQuery(term)
return q
else:
return None
def searchPostgresDB(search_string, Table, config, rank, *fields):
list_of_terms = parseSearchString(search_string)
search_query = createSearchQuery(list_of_terms)
if rank == True:
vector = SearchVector(*fields, config=config)
objs = Table.objects.annotate(rank=SearchRank(vector, search_query)).\
order_by('-rank')
else:
objs = Table.objects.annotate(search=SearchVector(*fields,
config=config),).\
filter(search=search_query)
return objs
|
from django.contrib.postgres.search import SearchVector, SearchRank, SearchQuery
import shlex
import string
def parseSearchString(search_string):
search_strings = shlex.split(search_string)
translator = str.maketrans({key: None for key in string.punctuation})
search_strings = [s.translate(translator) for s in search_strings]
return search_strings
def createSearchQuery(list_of_terms):
if len(list_of_terms) > 0:
q = SearchQuery(list_of_terms[0])
for term in list_of_terms[1:]:
q = q & SearchQuery(term)
return q
else:
return None
def searchPostgresDB(search_string, Table, config, rank, *fields):
list_of_terms = parseSearchString(search_string)
search_query = createSearchQuery(list_of_terms)
if rank == True:
vector = SearchVector(*fields, config=config)
objs = Table.objects.annotate(rank=SearchRank(vector, search_query)).\
order_by('-rank')
else:
objs = Table.objects.annotate(search=SearchVector(*fields,
config=config),).\
filter(search=search_query)
return objs
|
bsd-3-clause
|
Python
|
6df0e3efd239f7be073057ede44033dc95064a23
|
Fix StringIO import
|
ktdreyer/teuthology,ceph/teuthology,ktdreyer/teuthology,ceph/teuthology
|
teuthology/task/tests/test_run.py
|
teuthology/task/tests/test_run.py
|
import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
|
import logging
import pytest
from StringIO import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
|
mit
|
Python
|
8e24d3139c11428cda1e07da62ff007be9c77424
|
Add convenience method.
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
abilian/testing/__init__.py
|
abilian/testing/__init__.py
|
"""Base stuff for testing.
"""
import os
import subprocess
import requests
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
CSRF_ENABLED = False
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
def assert_302(self, response):
self.assert_status(response, 302)
#
# Validates HTML if asked by the config or the Unix environment
#
def get(self, url, validate=True):
response = self.client.get(url)
if not validate or response != 200:
return response
validator_url = self.app.config.get('VALIDATOR_URL') \
or os.environ.get('VALIDATOR_URL')
if not validator_url:
return response
content_type = response.headers['Content-Type']
if content_type.split(';')[0].strip() != 'text/html':
return response
return self.validate(url, response.data, content_type, validator_url)
# TODO: post(), put(), etc.
def assert_valid(self, response):
validator_url = self.app.config.get('VALIDATOR_URL') \
or os.environ.get('VALIDATOR_URL')
if validator_url:
self.validate(None, response.data,
response.headers['Content-Type'], validator_url)
def validate(self, url, content, content_type, validator_url):
response = requests.post(validator_url + '?out=json', content,
headers={'Content-Type': content_type})
body = response.json()
for message in body['messages']:
if message['type'] == 'error':
detail = u'on line %s [%s]\n%s' % (
message['lastLine'],
message['extract'],
message['message'])
self.fail((u'Got a validation error for %r:\n%s' %
(url, detail)).encode('utf-8'))
|
"""Base stuff for testing.
"""
import os
import subprocess
import requests
assert not 'twill' in subprocess.__file__
from flask.ext.testing import TestCase
from abilian.application import Application
__all__ = ['TestConfig', 'BaseTestCase']
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = "sqlite://"
SQLALCHEMY_ECHO = False
TESTING = True
SECRET_KEY = "SECRET"
CSRF_ENABLED = False
class BaseTestCase(TestCase):
config_class = TestConfig
application_class = Application
def create_app(self):
config = self.config_class()
self.app = self.application_class(config)
return self.app
def setUp(self):
self.app.create_db()
self.session = self.db.session
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
self.db.engine.dispose()
@property
def db(self):
return self.app.extensions['sqlalchemy'].db
# Useful for debugging
def dump_routes(self):
rules = list(self.app.url_map.iter_rules())
rules.sort(key=lambda x: x.rule)
for rule in rules:
print rule, rule.methods, rule.endpoint
#
# Validates HTML if asked by the config or the Unix environment
#
def get(self, url, validate=True):
response = self.client.get(url)
if not validate or response != 200:
return response
validator_url = self.app.config.get('VALIDATOR_URL') \
or os.environ.get('VALIDATOR_URL')
if not validator_url:
return response
content_type = response.headers['Content-Type']
if content_type.split(';')[0].strip() != 'text/html':
return response
return self.validate(url, response.data, content_type, validator_url)
# TODO: post(), put(), etc.
def assert_valid(self, response):
validator_url = self.app.config.get('VALIDATOR_URL') \
or os.environ.get('VALIDATOR_URL')
if validator_url:
self.validate(None, response.data,
response.headers['Content-Type'], validator_url)
def validate(self, url, content, content_type, validator_url):
response = requests.post(validator_url + '?out=json', content,
headers={'Content-Type': content_type})
body = response.json()
for message in body['messages']:
if message['type'] == 'error':
detail = u'on line %s [%s]\n%s' % (
message['lastLine'],
message['extract'],
message['message'])
self.fail((u'Got a validation error for %r:\n%s' %
(url, detail)).encode('utf-8'))
|
lgpl-2.1
|
Python
|
7292b2d276db056870993a108466fccc18debcae
|
Update count-different-palindromic-subsequences.py
|
kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode
|
Python/count-different-palindromic-subsequences.py
|
Python/count-different-palindromic-subsequences.py
|
# Time: O(n^2)
# Space: O(n^2)
# Given a string S, find the number of different non-empty palindromic subsequences in S,
# and return that number modulo 10^9 + 7.
#
# A subsequence of a string S is obtained by deleting 0 or more characters from S.
#
# A sequence is palindromic if it is equal to the sequence reversed.
#
# Two sequences A_1, A_2, ... and B_1, B_2, ... are different if there is some i for which A_i != B_i.
#
# Example 1:
# Input:
# S = 'bccb'
# Output: 6
# Explanation:
# The 6 different non-empty palindromic subsequences are 'b', 'c', 'bb', 'cc', 'bcb', 'bccb'.
# Note that 'bcb' is counted only once, even though it occurs twice.
#
# Example 2:
# Input:
# S = 'abcdabcdabcdabcdabcdabcdabcdabcddcbadcbadcbadcbadcbadcbadcbadcba'
# Output: 104860361
#
# Explanation:
# There are 3104860382 different non-empty palindromic subsequences, which is 104860361 modulo 10^9 + 7.
# Note:
# - The length of S will be in the range [1, 1000].
# - Each character S[i] will be in the set {'a', 'b', 'c', 'd'}.
class Solution(object):
def countPalindromicSubsequences(self, S):
"""
:type S: str
:rtype: int
"""
def dp(i, j, prv, nxt, lookup):
if lookup[i][j] is not None:
return lookup[i][j]
result = 1
if i <= j:
for x in xrange(4):
i0 = nxt[i][x]
j0 = prv[j][x]
if i <= i0 <= j:
result = (result + 1) % P
if None < i0 < j0:
result = (result + dp(i0+1, j0-1, prv, nxt, lookup)) % P
result %= P
lookup[i][j] = result
return result
prv = [None] * len(S)
nxt = [None] * len(S)
last = [None] * 4
for i in xrange(len(S)):
last[ord(S[i])-ord('a')] = i
prv[i] = tuple(last)
last = [None] * 4
for i in reversed(xrange(len(S))):
last[ord(S[i])-ord('a')] = i
nxt[i] = tuple(last)
P = 10**9 + 7
lookup = [[None] * len(S) for _ in xrange(len(S))]
return dp(0, len(S)-1, prv, nxt, lookup) - 1
|
# Time: O(n^2)
# Space: O(n^2)
class Solution(object):
def countPalindromicSubsequences(self, S):
"""
:type S: str
:rtype: int
"""
def dp(i, j, prv, nxt, lookup):
if lookup[i][j] is not None:
return lookup[i][j]
result = 1
if i <= j:
for x in xrange(4):
i0 = nxt[i][x]
j0 = prv[j][x]
if i <= i0 <= j:
result = (result + 1) % P
if None < i0 < j0:
result = (result + dp(i0+1, j0-1, prv, nxt, lookup)) % P
result %= P
lookup[i][j] = result
return result
prv = [None] * len(S)
nxt = [None] * len(S)
last = [None] * 4
for i in xrange(len(S)):
last[ord(S[i])-ord('a')] = i
prv[i] = tuple(last)
last = [None] * 4
for i in reversed(xrange(len(S))):
last[ord(S[i])-ord('a')] = i
nxt[i] = tuple(last)
P = 10**9 + 7
lookup = [[None] * len(S) for _ in xrange(len(S))]
return dp(0, len(S)-1, prv, nxt, lookup) - 1
|
mit
|
Python
|
358de4c3ce20569e217b1caf5c25ce826b536bbc
|
Reformat datastructuretools
|
Pulgama/supriya,Pulgama/supriya,josiah-wolf-oberholtzer/supriya,Pulgama/supriya,Pulgama/supriya
|
supriya/tools/datastructuretools/__init__.py
|
supriya/tools/datastructuretools/__init__.py
|
# -*- encoding: utf-8 -*-
r"""
Tools for working with generic datastructures.
"""
from abjad.tools import systemtools
systemtools.ImportManager.import_structured_package(
__path__[0],
globals(),
)
|
# -*- encoding: utf-8 -*-
r'''
Tools for working with generic datastructures.
'''
from abjad.tools import systemtools
systemtools.ImportManager.import_structured_package(
__path__[0],
globals(),
)
|
mit
|
Python
|
9098692bf431b4947da96dc054fe8e1559e27aa5
|
Update hexagon_nn_headers to v1.10.3.1.3 Changes Includes: * Support soc_id:371 * New method exposed that returns the version of hexagon_nn used in libhexagon_interface.so
|
tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,annarev/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aldian/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,aam-at/tensorflow,karllessard/tensorflow,petewarden/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,cxxgtxy/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,paolodedios/tensorflow,annarev/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,aam-at/tensorflow,annarev/tensorflow,sarvex/tensorflow,annarev/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,cxxgtxy/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gunan/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,renyi533/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,davidzchen/tensorflow,gunan/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,gunan/tensorflow,sarvex/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,aldian/tensorflow,renyi533/tensorflow,annarev/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,gunan/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,petewarden/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,petewarden/tensorflow,xzturn/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,aam-at/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,annarev/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,annarev/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,aldian/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,renyi533/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,Intel-Corporation/tensorflow,xzturn/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,aldian/tensorflow,sarvex/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gunan/tensorflow,renyi533/tensorflow,karllessard/tensorflow,davidzchen/tensorflow,yongtang/tensorflow,karllessard/tensorflow,xzturn/tensorflow,Intel-Corporation/tensorflow,gunan/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,davidzchen/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,xzturn/tensorflow,aldian/tensorflow,gunan/tensorflow,aldian/tensorflow,yongtang/tensorflow,renyi533/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,renyi533/tensorflow,frreiss/tensorflow-fred,cxxgtxy/tensorflow,xzturn/tensorflow,annarev/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,gunan/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gunan/tensorflow,yongtang/tensorflow,aldian/tensorflow,xzturn/tensorflow,renyi533/tensorflow,annarev/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,cxxgtxy/tensorflow,aam-at/tensorflow
|
third_party/hexagon/workspace.bzl
|
third_party/hexagon/workspace.bzl
|
"""Loads the Hexagon NN Header files library, used by TF Lite."""
load("//third_party:repo.bzl", "third_party_http_archive")
def repo():
third_party_http_archive(
name = "hexagon_nn",
sha256 = "281d46b47f7191f03a8a4071c4c8d2af9409bb9d59573dc2e42f04c4fd61f1fd",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/storage.cloud.google.com/download.tensorflow.org/tflite/hexagon_nn_headers_v1.10.3.1.3.tgz",
],
build_file = "//third_party/hexagon:BUILD",
)
|
"""Loads the Hexagon NN Header files library, used by TF Lite."""
load("//third_party:repo.bzl", "third_party_http_archive")
def repo():
third_party_http_archive(
name = "hexagon_nn",
sha256 = "4cbf3c18834e24b1f64cc507f9c2f22b4fe576c6ff938d55faced5d8f1bddf62",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/storage.cloud.google.com/download.tensorflow.org/tflite/hexagon_nn_headers_v1.10.3.1.2.tgz",
],
build_file = "//third_party/hexagon:BUILD",
)
|
apache-2.0
|
Python
|
86391ed76c49578321c026187f159c53c2cf4ed1
|
Fix slack welcome message display bug and add user handle
|
b12io/orchestra,Sonblind/orchestra,b12io/orchestra,Sonblind/orchestra,unlimitedlabs/orchestra,b12io/orchestra,b12io/orchestra,b12io/orchestra,Sonblind/orchestra,unlimitedlabs/orchestra,unlimitedlabs/orchestra
|
orchestra/slack.py
|
orchestra/slack.py
|
import base64
from uuid import uuid1
from django.conf import settings
import slacker
from orchestra.utils.settings import run_if
class SlackService(object):
"""
Wrapper slack service to allow easy swapping and mocking out of API.
"""
def __init__(self, api_key):
self._service = slacker.Slacker(api_key)
for attr_name in ('chat', 'groups', 'users'):
setattr(self, attr_name, getattr(self._service, attr_name))
@run_if('SLACK_EXPERTS')
def add_worker_to_project_team(worker, project):
slack = SlackService(settings.SLACK_EXPERTS_API_KEY)
try:
user_id = slack.users.get_user_id(worker.slack_username)
response = slack.groups.invite(project.slack_group_id, user_id)
if not response.body.get('already_in_group'):
welcome_message = (
'<@{}|{}> has been added to the team. '
'Welcome aboard!').format(user_id, worker.slack_username)
slack.chat.post_message(project.slack_group_id, welcome_message)
except:
# TODO(jrbotros): for now, using slack on a per-worker basis is
# optional; we'll want to rethink this in the future
pass
@run_if('SLACK_EXPERTS')
def create_project_slack_group(project):
"""
Create slack channel for project team communication
"""
slack = SlackService(settings.SLACK_EXPERTS_API_KEY)
response = slack.groups.create(_project_slack_group_name(project))
project.slack_group_id = response.body['group']['id']
slack.groups.set_topic(project.slack_group_id, project.short_description)
slack.groups.set_purpose(project.slack_group_id,
'Discussing work on `{}`'.format(
project.short_description))
project.save()
return project.slack_group_id
def _project_slack_group_name(project):
"""
Return a unique identifier for project slack groups; must fit into slack's
21 char limit for group names.
"""
return base64.b64encode(uuid1().bytes)
|
import base64
from uuid import uuid1
from django.conf import settings
import slacker
from orchestra.utils.settings import run_if
class SlackService(object):
"""
Wrapper slack service to allow easy swapping and mocking out of API.
"""
def __init__(self, api_key):
self._service = slacker.Slacker(api_key)
for attr_name in ('chat', 'groups', 'users'):
setattr(self, attr_name, getattr(self._service, attr_name))
@run_if('SLACK_EXPERTS')
def add_worker_to_project_team(worker, project):
slack = SlackService(settings.SLACK_EXPERTS_API_KEY)
try:
response = slack.groups.invite(project.slack_group_id,
slack.users.get_user_id(
worker.slack_username))
if not response.body['already_in_group']:
welcome_message = ('{} has been added to the team. '
'Welcome aboard!').format(worker.user.username)
slack.chat.post_message(project.slack_group_id, welcome_message)
except:
# TODO(jrbotros): for now, using slack on a per-worker basis is
# optional; we'll want to rethink this in the future
pass
@run_if('SLACK_EXPERTS')
def create_project_slack_group(project):
"""
Create slack channel for project team communication
"""
slack = SlackService(settings.SLACK_EXPERTS_API_KEY)
response = slack.groups.create(_project_slack_group_name(project))
project.slack_group_id = response.body['group']['id']
slack.groups.set_topic(project.slack_group_id, project.short_description)
slack.groups.set_purpose(project.slack_group_id,
'Discussing work on `{}`'.format(
project.short_description))
project.save()
return project.slack_group_id
def _project_slack_group_name(project):
"""
Return a unique identifier for project slack groups; must fit into slack's
21 char limit for group names.
"""
return base64.b64encode(uuid1().bytes)
|
apache-2.0
|
Python
|
e7b50269a6d83234b283f769265bf474666b6cd2
|
Update project model with property has_description
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
polyaxon/projects/models.py
|
polyaxon/projects/models.py
|
import uuid
from django.conf import settings
from django.core.validators import validate_slug
from django.db import models
from libs.blacklist import validate_blacklist_name
from libs.models import DescribableModel, DiffModel
class Project(DiffModel, DescribableModel):
"""A model that represents a set of experiments to solve a specific problem."""
uuid = models.UUIDField(
default=uuid.uuid4,
editable=False,
unique=True,
null=False)
name = models.CharField(
max_length=256,
validators=[validate_slug, validate_blacklist_name])
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='projects')
is_public = models.BooleanField(
default=True,
help_text='If project is public or private.')
def __str__(self):
return self.unique_name
class Meta:
unique_together = (('user', 'name'),)
@property
def unique_name(self):
return '{}.{}'.format(self.user.username, self.name)
@property
def has_code(self):
return hasattr(self, 'repo')
@property
def has_description(self):
return bool(self.description)
@property
def tensorboard(self):
if settings.DEPLOY_RUNNER:
return self.tensorboard_jobs.last()
return None
@property
def notebook(self):
if settings.DEPLOY_RUNNER:
return self.notebook_jobs.last()
return None
@property
def has_tensorboard(self):
tensorboard = self.tensorboard
return tensorboard and tensorboard.is_running
@property
def has_notebook(self):
notebook = self.notebook
return notebook and notebook.is_running
|
import uuid
from django.conf import settings
from django.core.validators import validate_slug
from django.db import models
from libs.blacklist import validate_blacklist_name
from libs.models import DescribableModel, DiffModel
class Project(DiffModel, DescribableModel):
"""A model that represents a set of experiments to solve a specific problem."""
uuid = models.UUIDField(
default=uuid.uuid4,
editable=False,
unique=True,
null=False)
name = models.CharField(
max_length=256,
validators=[validate_slug, validate_blacklist_name])
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='projects')
is_public = models.BooleanField(
default=True,
help_text='If project is public or private.')
def __str__(self):
return self.unique_name
class Meta:
unique_together = (('user', 'name'),)
@property
def unique_name(self):
return '{}.{}'.format(self.user.username, self.name)
@property
def has_code(self):
return hasattr(self, 'repo')
@property
def tensorboard(self):
if settings.DEPLOY_RUNNER:
return self.tensorboard_jobs.last()
return None
@property
def notebook(self):
if settings.DEPLOY_RUNNER:
return self.notebook_jobs.last()
return None
@property
def has_tensorboard(self):
tensorboard = self.tensorboard
return tensorboard and tensorboard.is_running
@property
def has_notebook(self):
notebook = self.notebook
return notebook and notebook.is_running
|
apache-2.0
|
Python
|
76bf774f3af2fb4fc2518945944b9f64c413712a
|
Simplify "cursor" function in "misc" module
|
idanarye/breeze.vim
|
autoload/breeze/utils/misc.py
|
autoload/breeze/utils/misc.py
|
# -*- coding: utf-8 -*-
"""
breeze.utils.misc
~~~~~~~~~~~~~~~~~
This module defines various utility functions and some tiny wrappers
around vim functions.
"""
import vim
import breeze.utils.settings
def echom(msg):
"""Gives a simple feedback to the user via the command line."""
vim.command('echom "[breeze] {0}"'.format(msg.replace('"', '\"')))
def echov(msg):
"""Gives a feedback only if g:breeze_verbosity = 1."""
if breeze.utils.settings.get("verbosity", bool):
echom(msg)
def cursor(target=None):
"""Moves the cursor or returs the current cursor position."""
if not target:
return vim.current.window.cursor
else:
vim.current.window.cursor = target
def window_bundaries():
"""Returns the top and bottom lines number for the current window."""
curr_pos = cursor()
scrolloff = vim.eval("&scrolloff")
vim.command("setlocal scrolloff=0")
# :help keepjumps -> Moving around in {command} does not change the '',
# '. and '^ marks, the jumplist or the changelist.
vim.command("keepjumps normal! H")
top = cursor()[0]
vim.command("keepjumps normal! L")
bot = cursor()[0]
# restore position and changed options
cursor(curr_pos)
vim.command("setlocal scrolloff={0}".format(scrolloff))
return top, bot
def highlight(group, patt, priority=10):
"""Wraps the matchadd() vim function."""
vim.eval("matchadd('{0}', '{1}', {2})".format(
group, patt, priority))
def subst_char(buffer, v, row, col):
"""Substitutes a character in the buffer with the given character at the
given position. Return the substituted character."""
if row >= len(buffer):
raise ValueError("row index out of bound")
new_line = list(buffer[row])
if col >= len(new_line):
raise ValueError("column index out of bound")
old = buffer[row][col]
new_line[col] = v
buffer[row] = "".join(new_line)
return old
def clear_highlighting():
"""Clears Breeze highlightings."""
for match in vim.eval("getmatches()"):
if match['group'] in ('BreezeJumpMark', 'BreezeShade', 'BreezeHl'):
vim.command("call matchdelete({0})".format(match['id']))
|
# -*- coding: utf-8 -*-
"""
breeze.utils.misc
~~~~~~~~~~~~~~~~~
This module defines various utility functions and some tiny wrappers
around vim functions.
"""
import vim
import breeze.utils.settings
def echom(msg):
"""Gives a simple feedback to the user via the command line."""
vim.command('echom "[breeze] {0}"'.format(msg.replace('"', '\"')))
def echov(msg):
"""Gives a feedback only if g:breeze_verbosity = 1."""
if breeze.utils.settings.get("verbosity", bool):
echom(msg)
def cursor(target=None, kj=False):
"""Moves the cursor.
If the kj parameter is set to True, then the command behaves as following:
:help keepjumps -> Moving around in {command} does not change the '', '.
and '^ marks, the jumplist or the changelist...
"""
if not target:
return vim.current.window.cursor
vim.command("{0}call cursor({1}, {2})".format(
"keepjumps " if kj else "", target[0], target[1]))
def window_bundaries():
"""Returns the top and bottom lines number for the current window."""
curr_pos = cursor()
scrolloff = vim.eval("&scrolloff")
vim.command("setlocal scrolloff=0")
# :help keepjumps -> Moving around in {command} does not change the '',
# '. and '^ marks, the jumplist or the changelist.
vim.command("keepjumps normal! H")
top = cursor()[0]
vim.command("keepjumps normal! L")
bot = cursor()[0]
# restore position and changed options
cursor(curr_pos)
vim.command("setlocal scrolloff={0}".format(scrolloff))
return top, bot
def highlight(group, patt, priority=10):
"""Wraps the matchadd() vim function."""
vim.eval("matchadd('{0}', '{1}', {2})".format(
group, patt, priority))
def subst_char(buffer, v, row, col):
"""Substitutes a character in the buffer with the given character at the
given position. Return the substituted character."""
if row >= len(buffer):
raise ValueError("row index out of bound")
new_line = list(buffer[row])
if col >= len(new_line):
raise ValueError("column index out of bound")
old = buffer[row][col]
new_line[col] = v
buffer[row] = "".join(new_line)
return old
def clear_highlighting():
"""Clears Breeze highlightings."""
for match in vim.eval("getmatches()"):
if match['group'] in ('BreezeJumpMark', 'BreezeShade', 'BreezeHl'):
vim.command("call matchdelete({0})".format(match['id']))
|
mit
|
Python
|
5cc1cc719958178519e10c06f7337b8b48ce02fc
|
sort house numbers
|
sambandi/eMonitor,sambandi/eMonitor,digifant/eMonitor,digifant/eMonitor,digifant/eMonitor,sambandi/eMonitor
|
emonitor/modules/streets/street.py
|
emonitor/modules/streets/street.py
|
import yaml
from sqlalchemy.orm import relationship
from emonitor.extensions import db
from emonitor.modules.streets.housenumber import Housenumber
class Street(db.Model):
__tablename__ = 'streets'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
navigation = db.Column(db.Text)
cityid = db.Column(db.Integer, db.ForeignKey('cities.id'))
subcity = db.Column(db.String(40))
lat = db.Column(db.Float)
lng = db.Column(db.Float)
zoom = db.Column(db.Integer)
active = db.Column(db.Integer, default=0)
osmid = db.Column(db.Integer, default=0)
city = relationship("City", backref="cities", lazy='joined')
housenumbers = relationship(Housenumber.__name__, backref="streets", lazy='joined', order_by=Housenumber.number)
def __init__(self, name, navigation, cityid, subcity, lat, lng, zoom, active, osmid):
self.name = name
self.navigation = navigation
self.cityid = cityid
self.subcity = subcity
self.lat = lat
self.lng = lng
self.zoom = zoom
self.active = active
self.osmid = osmid
def __repr__(self):
return '<Street %r - %r>' % (self.id, self.name)
def addHouseNumber(self, number, points):
if number not in [hn.number for hn in self.housenumbers]:
db.session.add(Housenumber(self.id, number, yaml.dump(points)))
db.session.commit()
@staticmethod
def getStreet(id=0):
try:
if int(id):
street = db.session.query(Street).filter_by(id=int(id))
if street:
return street.first()
except ValueError:
return None
return None
@staticmethod
#@cache.memoize()
def getAllStreets():
return db.session.query(Street).all()
@staticmethod
def getStreetsDict():
ret = {}
for street in db.session.query(Street).filter_by(active=1).order_by('name'):
ret[street.id] = street
ret[0] = Street('', '', 0, '', 0, 0, 0, 1, 0)
return ret
|
import yaml
from sqlalchemy.orm import relationship
from emonitor.extensions import db
from emonitor.modules.streets.housenumber import Housenumber
class Street(db.Model):
__tablename__ = 'streets'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
navigation = db.Column(db.Text)
cityid = db.Column(db.Integer, db.ForeignKey('cities.id'))
subcity = db.Column(db.String(40))
lat = db.Column(db.Float)
lng = db.Column(db.Float)
zoom = db.Column(db.Integer)
active = db.Column(db.Integer, default=0)
osmid = db.Column(db.Integer, default=0)
city = relationship("City", backref="cities", lazy='joined')
housenumbers = relationship(Housenumber.__name__, backref="streets", lazy='joined')
def __init__(self, name, navigation, cityid, subcity, lat, lng, zoom, active, osmid):
self.name = name
self.navigation = navigation
self.cityid = cityid
self.subcity = subcity
self.lat = lat
self.lng = lng
self.zoom = zoom
self.active = active
self.osmid = osmid
def __repr__(self):
return '<Street %r - %r>' % (self.id, self.name)
def addHouseNumber(self, number, points):
if number not in [hn.number for hn in self.housenumbers]:
db.session.add(Housenumber(self.id, number, yaml.dump(points)))
db.session.commit()
@staticmethod
def getStreet(id=0):
try:
if int(id):
street = db.session.query(Street).filter_by(id=int(id))
if street:
return street.first()
except ValueError:
return None
return None
@staticmethod
#@cache.memoize()
def getAllStreets():
return db.session.query(Street).all()
@staticmethod
def getStreetsDict():
ret = {}
for street in db.session.query(Street).filter_by(active=1).order_by('name'):
ret[street.id] = street
ret[0] = Street('', '', 0, '', 0, 0, 0, 1, 0)
return ret
|
bsd-3-clause
|
Python
|
8da02c7c4ad382f4e7a2f7a017b32c0cff51547e
|
set limit of tw id over 5 letters
|
amimoto-ami/amimoto-amazon-alexa,amimoto-ami/amimoto-amazon-alexa
|
build_attendee.py
|
build_attendee.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from pyquery import PyQuery as pq
import json
if __name__ == "__main__":
## ref: pyquery
# https://media.readthedocs.org/pdf/pyquery/latest/pyquery.pdf
data = dict()
file = open('data/attendees.json', "w")
dom = pq(url='https://2016.europe.wordcamp.org/attendees/')
entries = dom.find('ul.tix-attendee-list')
for x in entries('li'):
twitter_name = pq(x).find('a.tix-attendee-twitter').text()
full_name = pq(x).find('div.tix-attendee-name').text()
if twitter_name != None:
# have more than 3 characters ?
if len(twitter_name) > 4:
data[full_name.lower()] = twitter_name
json.dump(data, file, indent=2)
file.close()
file = open('data/list_of_attendees', "w")
for x in data.keys():
file.write(x.encode('utf8'))
file.write("\n")
file.close()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from pyquery import PyQuery as pq
import json
if __name__ == "__main__":
## ref: pyquery
# https://media.readthedocs.org/pdf/pyquery/latest/pyquery.pdf
data = dict()
file = open('data/attendees.json', "w")
dom = pq(url='https://2016.europe.wordcamp.org/attendees/')
entries = dom.find('ul.tix-attendee-list')
for x in entries('li'):
twitter_name = pq(x).find('a.tix-attendee-twitter').text()
full_name = pq(x).find('div.tix-attendee-name').text()
if twitter_name != None:
# have more than 3 characters ?
if len(twitter_name) > 3:
data[full_name.lower()] = twitter_name
json.dump(data, file, indent=2)
file.close()
file = open('data/list_of_attendees', "w")
for x in data.keys():
file.write(x.encode('utf8'))
file.write("\n")
file.close()
|
mit
|
Python
|
dc54a12bfd2124e7203270940928e47198ed914e
|
bump version
|
theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs
|
bulbs/__init__.py
|
bulbs/__init__.py
|
__version__ = "0.6.24"
|
__version__ = "0.6.23"
|
mit
|
Python
|
64383b6d8095f27af775d3c6030b22ee36055b29
|
Change summoner example function name, add params
|
robrua/cassiopeia,10se1ucgo/cassiopeia,meraki-analytics/cassiopeia
|
examples/summoner.py
|
examples/summoner.py
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def print_summoner(name: str, region: str):
summoner = Summoner(name=name, region=region)
print("Name:", summoner.name)
print("ID:", summoner.id)
print("Account ID:", summoner.account.id)
print("Level:", summoner.level)
print("Revision date:", summoner.revision_date)
print("Profile icon ID:", summoner.profile_icon.id)
print("Profile icon name:", summoner.profile_icon.name)
print("Profile icon URL:", summoner.profile_icon.url)
print("Profile icon image:", summoner.profile_icon.image)
# These are equivalent ways of obtaining a Summoner.
# Note that the region defaults to NA.
# summoner = cass.get_summoner(name)
# summoner = cass.get_summoner(name=summoner.name)
# summoner = cass.get_summoner(id=summoner.id)
# summoner = cass.get_summoner(account_id=summoner.account.id)
if __name__ == "__main__":
print_summoner("Kalturi", "NA")
|
import cassiopeia as cass
from cassiopeia.core import Summoner
def test_cass():
name = "Kalturi"
me = Summoner(name=name)
print("Name:", me.name)
print("Id:", me.id)
print("Account id:", me.account.id)
print("Level:", me.level)
print("Revision date:", me.revision_date)
print("Profile icon id:", me.profile_icon.id)
print("Profile icon name:", me.profile_icon.name)
print("Profile icon url:", me.profile_icon.url)
print("Profile icon image:", me.profile_icon.image)
name = me.name
id = me.id
account_id = me.account.id
me = cass.get_summoner(name)
me = cass.get_summoner(name=name)
me = cass.get_summoner(id=id)
me = cass.get_summoner(account_id=account_id)
if __name__ == "__main__":
test_cass()
|
mit
|
Python
|
b077df615eb4354f416877cc2857fb9848e158eb
|
Fix get_sort_by_toggle to work with QueryDicts with multiple values
|
UITools/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,maferelo/saleor,UITools/saleor
|
saleor/core/templatetags/shop.py
|
saleor/core/templatetags/shop.py
|
from __future__ import unicode_literals
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
from django.template import Library
from django.utils.http import urlencode
register = Library()
@register.filter
def slice(items, group_size=1):
args = [iter(items)] * group_size
return (filter(None, group)
for group in zip_longest(*args, fillvalue=None))
@register.simple_tag(takes_context=True)
def get_sort_by_url(context, field, descending=False):
request = context['request']
request_get = request.GET.dict()
if descending:
request_get['sort_by'] = '-' + field
else:
request_get['sort_by'] = field
return '%s?%s' % (request.path, urlencode(request_get))
@register.simple_tag(takes_context=True)
def get_sort_by_url_toggle(context, field):
request = context['request']
request_get = request.GET.copy()
if field == request_get.get('sort_by'):
new_sort_by = u'-%s' % field # descending sort
else:
new_sort_by = field # ascending sort
request_get['sort_by'] = new_sort_by
return '%s?%s' % (request.path, request_get.urlencode())
|
from __future__ import unicode_literals
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
from django.template import Library
from django.utils.http import urlencode
register = Library()
@register.filter
def slice(items, group_size=1):
args = [iter(items)] * group_size
return (filter(None, group)
for group in zip_longest(*args, fillvalue=None))
@register.simple_tag(takes_context=True)
def get_sort_by_url(context, field, descending=False):
request = context['request']
request_get = request.GET.dict()
if descending:
request_get['sort_by'] = '-' + field
else:
request_get['sort_by'] = field
return '%s?%s' % (request.path, urlencode(request_get))
@register.simple_tag(takes_context=True)
def get_sort_by_url_toggle(context, field):
request = context['request']
request_get = request.GET.dict()
if field == request_get.get('sort_by'):
new_sort_by = '-%s' % field # descending sort
else:
new_sort_by = field # ascending sort
request_get['sort_by'] = new_sort_by
return '%s?%s' % (request.path, urlencode(request_get))
|
bsd-3-clause
|
Python
|
3e62a39892c231419ac09310808d95cb42b4f69f
|
add python solution for valid_parentheses
|
hsadler/programming-language-examples,hsadler/programming-language-examples,hsadler/programming-language-examples,hsadler/programming-language-examples,hsadler/programming-language-examples
|
python/valid_parentheses.py
|
python/valid_parentheses.py
|
# validate parentheses of string
import sys
inputChars = [ x for x in sys.argv[1] ]
openParens = ('(', '[', '{')
closeParens = (')', ']', '}')
parenPairs = {
')': '(',
']': '[',
'}': '{'
}
parenHistory = []
for c in inputChars:
if c in openParens:
parenHistory.append(c)
elif c in closeParens:
if len(parenHistory) == 0 or parenHistory.pop() != parenPairs[c]:
print False
sys.exit(1)
print True if len(parenHistory) == 0 else False
|
mit
|
Python
|
|
60f753e736827f61607e10d160b7e7bab75b77cc
|
update pyasn version for workers
|
Jigsaw-Code/censoredplanet-analysis,Jigsaw-Code/censoredplanet-analysis
|
pipeline/setup.py
|
pipeline/setup.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dependency setup for beam remote workers."""
import setuptools
setuptools.setup(
name='censoredplanet-analysis',
version='0.0.1',
install_requires=['pyasn==1.6.1'],
packages=setuptools.find_packages(),
url='https://github.com/Jigsaw-Code/censoredplanet-analysis',
author='Sarah Laplante',
author_email='[email protected]')
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dependency setup for beam remote workers."""
import setuptools
setuptools.setup(
name='censoredplanet-analysis',
version='0.0.1',
install_requires=['pyasn==1.6.0b1'],
packages=setuptools.find_packages(),
url='https://github.com/Jigsaw-Code/censoredplanet-analysis',
author='Sarah Laplante',
author_email='[email protected]')
|
apache-2.0
|
Python
|
7842919b2af368c640363b4e4e05144049b111ba
|
Remove BaseMail dependency on User object
|
OpenVolunteeringPlatform/django-ovp-core,OpenVolunteeringPlatform/django-ovp-core
|
ovp_core/emails.py
|
ovp_core/emails.py
|
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from django.conf import settings
import threading
class EmailThread(threading.Thread):
def __init__(self, msg):
self.msg = msg
threading.Thread.__init__(self)
def run (self):
return self.msg.send() > 0
class BaseMail:
"""
This class is responsible for firing emails
"""
from_email = ''
def __init__(self, email_address, async_mail=None):
self.email_address = email_address
self.async_mail = async_mail
def sendEmail(self, template_name, subject, context):
ctx = Context(context)
text_content = get_template('email/{}.txt'.format(template_name)).render(ctx)
html_content = get_template('email/{}.html'.format(template_name)).render(ctx)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [self.email_address])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
if self.async_mail:
async_flag="async"
else:
async_flag=getattr(settings, "DEFAULT_SEND_EMAIL", "async")
if async_flag == "async":
t = EmailThread(msg)
t.start()
return t
else:
return msg.send() > 0
|
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from django.conf import settings
import threading
class EmailThread(threading.Thread):
def __init__(self, msg):
self.msg = msg
threading.Thread.__init__(self)
def run (self):
return self.msg.send() > 0
class BaseMail:
"""
This class is responsible for firing emails
"""
from_email = ''
def __init__(self, user, async_mail=None):
self.user = user
self.async_mail = async_mail
def sendEmail(self, template_name, subject, context):
ctx = Context(context)
text_content = get_template('email/{}.txt'.format(template_name)).render(ctx)
html_content = get_template('email/{}.html'.format(template_name)).render(ctx)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [self.user.email])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
if self.async_mail:
async_flag="async"
else:
async_flag=getattr(settings, "DEFAULT_SEND_EMAIL", "async")
if async_flag == "async":
t = EmailThread(msg)
t.start()
return t
else:
return msg.send() > 0
|
agpl-3.0
|
Python
|
3c9de69112c8158877e4b0060ef0ab89c083f376
|
Build 1.14.0.1 package for Windows
|
GStreamer/cerbero,atsushieno/cerbero,centricular/cerbero,fluendo/cerbero,atsushieno/cerbero,nirbheek/cerbero,fluendo/cerbero,centricular/cerbero,centricular/cerbero,GStreamer/cerbero,fluendo/cerbero,atsushieno/cerbero,fluendo/cerbero,nirbheek/cerbero,centricular/cerbero,GStreamer/cerbero,atsushieno/cerbero,atsushieno/cerbero,GStreamer/cerbero,GStreamer/cerbero,nirbheek/cerbero,fluendo/cerbero,nirbheek/cerbero,centricular/cerbero
|
packages/custom.py
|
packages/custom.py
|
# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
from cerbero.packages import package
from cerbero.enums import License
class GStreamer:
url = "http://gstreamer.freedesktop.org"
version = '1.14.0.1'
vendor = 'GStreamer Project'
licenses = [License.LGPL]
org = 'org.freedesktop.gstreamer'
|
# -*- Mode: Python -*- vi:si:et:sw=4:sts=4:ts=4:syntax=python
from cerbero.packages import package
from cerbero.enums import License
class GStreamer:
url = "http://gstreamer.freedesktop.org"
version = '1.14.0'
vendor = 'GStreamer Project'
licenses = [License.LGPL]
org = 'org.freedesktop.gstreamer'
|
lgpl-2.1
|
Python
|
2250fdef5528bb59ca2c3218110d637484737659
|
fix pilutil.imresize test. Patch by Mark Wiebe.
|
mhogg/scipy,giorgiop/scipy,richardotis/scipy,pbrod/scipy,befelix/scipy,zaxliu/scipy,pnedunuri/scipy,niknow/scipy,pschella/scipy,nmayorov/scipy,Stefan-Endres/scipy,sauliusl/scipy,chatcannon/scipy,vhaasteren/scipy,tylerjereddy/scipy,Shaswat27/scipy,gfyoung/scipy,jonycgn/scipy,jsilter/scipy,ogrisel/scipy,newemailjdm/scipy,Gillu13/scipy,perimosocordiae/scipy,person142/scipy,josephcslater/scipy,trankmichael/scipy,sriki18/scipy,juliantaylor/scipy,nvoron23/scipy,efiring/scipy,lukauskas/scipy,niknow/scipy,WarrenWeckesser/scipy,jonycgn/scipy,njwilson23/scipy,haudren/scipy,andim/scipy,sauliusl/scipy,rgommers/scipy,futurulus/scipy,mhogg/scipy,nonhermitian/scipy,aman-iitj/scipy,nvoron23/scipy,Stefan-Endres/scipy,jakevdp/scipy,mortada/scipy,zerothi/scipy,futurulus/scipy,lhilt/scipy,witcxc/scipy,befelix/scipy,gfyoung/scipy,gertingold/scipy,haudren/scipy,maniteja123/scipy,sargas/scipy,surhudm/scipy,zxsted/scipy,gdooper/scipy,Dapid/scipy,Dapid/scipy,zerothi/scipy,gfyoung/scipy,Dapid/scipy,lhilt/scipy,apbard/scipy,jor-/scipy,anielsen001/scipy,jsilter/scipy,vigna/scipy,Newman101/scipy,FRidh/scipy,mortonjt/scipy,e-q/scipy,niknow/scipy,pyramania/scipy,e-q/scipy,trankmichael/scipy,ChanderG/scipy,jamestwebber/scipy,trankmichael/scipy,grlee77/scipy,larsmans/scipy,aarchiba/scipy,argriffing/scipy,njwilson23/scipy,vigna/scipy,FRidh/scipy,nonhermitian/scipy,surhudm/scipy,dch312/scipy,cpaulik/scipy,scipy/scipy,dominicelse/scipy,grlee77/scipy,mikebenfield/scipy,maniteja123/scipy,newemailjdm/scipy,felipebetancur/scipy,scipy/scipy,giorgiop/scipy,Newman101/scipy,WarrenWeckesser/scipy,Kamp9/scipy,Kamp9/scipy,ndchorley/scipy,vhaasteren/scipy,mortonjt/scipy,felipebetancur/scipy,mortonjt/scipy,lukauskas/scipy,aarchiba/scipy,ales-erjavec/scipy,cpaulik/scipy,nvoron23/scipy,petebachant/scipy,vigna/scipy,mtrbean/scipy,mgaitan/scipy,jsilter/scipy,ales-erjavec/scipy,grlee77/scipy,pizzathief/scipy,minhlongdo/scipy,nonhermitian/scipy,hainm/scipy,argriffing/scipy,maciejkula/scipy,vanpact/scipy,mdhaber/scipy,ndchorley/scipy,sriki18/scipy,jor-/scipy,dominicelse/scipy,kleskjr/scipy,sauliusl/scipy,jseabold/scipy,futurulus/scipy,hainm/scipy,perimosocordiae/scipy,behzadnouri/scipy,pnedunuri/scipy,fernand/scipy,mingwpy/scipy,mgaitan/scipy,andim/scipy,Gillu13/scipy,fernand/scipy,giorgiop/scipy,Gillu13/scipy,Shaswat27/scipy,newemailjdm/scipy,vhaasteren/scipy,zaxliu/scipy,hainm/scipy,bkendzior/scipy,Shaswat27/scipy,argriffing/scipy,rgommers/scipy,mdhaber/scipy,kleskjr/scipy,efiring/scipy,andyfaff/scipy,anntzer/scipy,e-q/scipy,perimosocordiae/scipy,niknow/scipy,petebachant/scipy,dominicelse/scipy,Srisai85/scipy,scipy/scipy,ales-erjavec/scipy,haudren/scipy,ndchorley/scipy,pnedunuri/scipy,raoulbq/scipy,woodscn/scipy,sonnyhu/scipy,gef756/scipy,giorgiop/scipy,gef756/scipy,anielsen001/scipy,aeklant/scipy,richardotis/scipy,efiring/scipy,matthew-brett/scipy,lhilt/scipy,vanpact/scipy,woodscn/scipy,josephcslater/scipy,andyfaff/scipy,dch312/scipy,Kamp9/scipy,matthewalbani/scipy,Stefan-Endres/scipy,andyfaff/scipy,njwilson23/scipy,Stefan-Endres/scipy,pbrod/scipy,zerothi/scipy,ChanderG/scipy,behzadnouri/scipy,ortylp/scipy,hainm/scipy,sauliusl/scipy,Gillu13/scipy,endolith/scipy,minhlongdo/scipy,maciejkula/scipy,vigna/scipy,richardotis/scipy,gertingold/scipy,mgaitan/scipy,teoliphant/scipy,matthewalbani/scipy,fernand/scipy,arokem/scipy,efiring/scipy,Srisai85/scipy,fredrikw/scipy,piyush0609/scipy,vanpact/scipy,pnedunuri/scipy,mortonjt/scipy,mortonjt/scipy,bkendzior/scipy,surhudm/scipy,mikebenfield/scipy,trankmichael/scipy,mingwpy/scipy,piyush0609/scipy,rmcgibbo/scipy,jjhelmus/scipy,haudren/scipy,tylerjereddy/scipy,anntzer/scipy,ilayn/scipy,matthew-brett/scipy,mdhaber/scipy,jseabold/scipy,ilayn/scipy,mdhaber/scipy,rgommers/scipy,apbard/scipy,Kamp9/scipy,dominicelse/scipy,rmcgibbo/scipy,ales-erjavec/scipy,aeklant/scipy,mortada/scipy,cpaulik/scipy,Eric89GXL/scipy,lukauskas/scipy,mikebenfield/scipy,dch312/scipy,maniteja123/scipy,arokem/scipy,mhogg/scipy,jseabold/scipy,ilayn/scipy,pnedunuri/scipy,futurulus/scipy,trankmichael/scipy,apbard/scipy,nvoron23/scipy,chatcannon/scipy,anntzer/scipy,josephcslater/scipy,aeklant/scipy,anntzer/scipy,argriffing/scipy,lukauskas/scipy,jjhelmus/scipy,jor-/scipy,nonhermitian/scipy,sonnyhu/scipy,kalvdans/scipy,pbrod/scipy,nvoron23/scipy,jamestwebber/scipy,befelix/scipy,Kamp9/scipy,mgaitan/scipy,gdooper/scipy,juliantaylor/scipy,chatcannon/scipy,minhlongdo/scipy,befelix/scipy,vhaasteren/scipy,chatcannon/scipy,WillieMaddox/scipy,zerothi/scipy,lhilt/scipy,lukauskas/scipy,felipebetancur/scipy,woodscn/scipy,sriki18/scipy,sriki18/scipy,WillieMaddox/scipy,WillieMaddox/scipy,argriffing/scipy,ilayn/scipy,raoulbq/scipy,zaxliu/scipy,zaxliu/scipy,rgommers/scipy,kleskjr/scipy,hainm/scipy,anielsen001/scipy,fernand/scipy,pyramania/scipy,sonnyhu/scipy,behzadnouri/scipy,person142/scipy,richardotis/scipy,kleskjr/scipy,mhogg/scipy,endolith/scipy,Dapid/scipy,aarchiba/scipy,Gillu13/scipy,witcxc/scipy,teoliphant/scipy,fernand/scipy,raoulbq/scipy,sonnyhu/scipy,matthew-brett/scipy,sargas/scipy,mortada/scipy,WillieMaddox/scipy,kalvdans/scipy,WarrenWeckesser/scipy,mortonjt/scipy,chatcannon/scipy,andim/scipy,vhaasteren/scipy,pbrod/scipy,woodscn/scipy,Newman101/scipy,fredrikw/scipy,dominicelse/scipy,ortylp/scipy,zerothi/scipy,fredrikw/scipy,andim/scipy,minhlongdo/scipy,matthew-brett/scipy,WarrenWeckesser/scipy,gef756/scipy,jsilter/scipy,Stefan-Endres/scipy,fredrikw/scipy,ndchorley/scipy,niknow/scipy,mikebenfield/scipy,raoulbq/scipy,felipebetancur/scipy,zxsted/scipy,gef756/scipy,aarchiba/scipy,dch312/scipy,rmcgibbo/scipy,petebachant/scipy,mingwpy/scipy,efiring/scipy,nmayorov/scipy,ortylp/scipy,jamestwebber/scipy,Shaswat27/scipy,minhlongdo/scipy,jsilter/scipy,befelix/scipy,kalvdans/scipy,gfyoung/scipy,mortada/scipy,bkendzior/scipy,mgaitan/scipy,rmcgibbo/scipy,aman-iitj/scipy,Srisai85/scipy,hainm/scipy,matthewalbani/scipy,perimosocordiae/scipy,aman-iitj/scipy,jor-/scipy,niknow/scipy,pschella/scipy,WillieMaddox/scipy,andim/scipy,ales-erjavec/scipy,WarrenWeckesser/scipy,piyush0609/scipy,dch312/scipy,apbard/scipy,perimosocordiae/scipy,mtrbean/scipy,futurulus/scipy,mingwpy/scipy,vigna/scipy,nmayorov/scipy,Eric89GXL/scipy,mikebenfield/scipy,njwilson23/scipy,WillieMaddox/scipy,pyramania/scipy,kalvdans/scipy,sonnyhu/scipy,witcxc/scipy,felipebetancur/scipy,mingwpy/scipy,gef756/scipy,juliantaylor/scipy,endolith/scipy,Newman101/scipy,pschella/scipy,vberaudi/scipy,vberaudi/scipy,lhilt/scipy,ogrisel/scipy,Shaswat27/scipy,raoulbq/scipy,Gillu13/scipy,njwilson23/scipy,tylerjereddy/scipy,grlee77/scipy,mdhaber/scipy,josephcslater/scipy,sargas/scipy,witcxc/scipy,kleskjr/scipy,gfyoung/scipy,juliantaylor/scipy,aeklant/scipy,pyramania/scipy,arokem/scipy,woodscn/scipy,tylerjereddy/scipy,rmcgibbo/scipy,maciejkula/scipy,mingwpy/scipy,Shaswat27/scipy,teoliphant/scipy,Kamp9/scipy,FRidh/scipy,piyush0609/scipy,mortada/scipy,person142/scipy,jakevdp/scipy,perimosocordiae/scipy,matthewalbani/scipy,ChanderG/scipy,petebachant/scipy,zxsted/scipy,jseabold/scipy,larsmans/scipy,maciejkula/scipy,pizzathief/scipy,vberaudi/scipy,witcxc/scipy,anielsen001/scipy,matthew-brett/scipy,nmayorov/scipy,jakevdp/scipy,njwilson23/scipy,ilayn/scipy,jjhelmus/scipy,aman-iitj/scipy,WarrenWeckesser/scipy,chatcannon/scipy,anielsen001/scipy,FRidh/scipy,sargas/scipy,ndchorley/scipy,jonycgn/scipy,pbrod/scipy,grlee77/scipy,giorgiop/scipy,FRidh/scipy,Srisai85/scipy,Eric89GXL/scipy,endolith/scipy,e-q/scipy,mdhaber/scipy,jonycgn/scipy,Eric89GXL/scipy,Eric89GXL/scipy,mortada/scipy,nmayorov/scipy,mhogg/scipy,aman-iitj/scipy,FRidh/scipy,zxsted/scipy,jjhelmus/scipy,sriki18/scipy,pizzathief/scipy,rmcgibbo/scipy,fernand/scipy,piyush0609/scipy,Eric89GXL/scipy,scipy/scipy,ogrisel/scipy,zxsted/scipy,mtrbean/scipy,zxsted/scipy,vanpact/scipy,zerothi/scipy,pyramania/scipy,arokem/scipy,vanpact/scipy,anielsen001/scipy,jamestwebber/scipy,andyfaff/scipy,jjhelmus/scipy,efiring/scipy,jakevdp/scipy,mhogg/scipy,minhlongdo/scipy,bkendzior/scipy,ChanderG/scipy,pizzathief/scipy,larsmans/scipy,Srisai85/scipy,jseabold/scipy,petebachant/scipy,pschella/scipy,anntzer/scipy,ChanderG/scipy,newemailjdm/scipy,sriki18/scipy,sauliusl/scipy,sargas/scipy,bkendzior/scipy,gdooper/scipy,maciejkula/scipy,kalvdans/scipy,gdooper/scipy,zaxliu/scipy,gertingold/scipy,andyfaff/scipy,ogrisel/scipy,larsmans/scipy,ChanderG/scipy,giorgiop/scipy,larsmans/scipy,vberaudi/scipy,sauliusl/scipy,behzadnouri/scipy,Dapid/scipy,josephcslater/scipy,mgaitan/scipy,felipebetancur/scipy,ales-erjavec/scipy,jamestwebber/scipy,andim/scipy,apbard/scipy,person142/scipy,pnedunuri/scipy,Newman101/scipy,jonycgn/scipy,maniteja123/scipy,endolith/scipy,mtrbean/scipy,rgommers/scipy,gdooper/scipy,aman-iitj/scipy,Stefan-Endres/scipy,teoliphant/scipy,behzadnouri/scipy,teoliphant/scipy,vberaudi/scipy,andyfaff/scipy,futurulus/scipy,pschella/scipy,raoulbq/scipy,newemailjdm/scipy,ortylp/scipy,anntzer/scipy,trankmichael/scipy,lukauskas/scipy,behzadnouri/scipy,vanpact/scipy,jseabold/scipy,haudren/scipy,vhaasteren/scipy,endolith/scipy,cpaulik/scipy,kleskjr/scipy,gertingold/scipy,Dapid/scipy,richardotis/scipy,Srisai85/scipy,larsmans/scipy,petebachant/scipy,ortylp/scipy,richardotis/scipy,surhudm/scipy,mtrbean/scipy,ogrisel/scipy,Newman101/scipy,pizzathief/scipy,nonhermitian/scipy,zaxliu/scipy,matthewalbani/scipy,argriffing/scipy,sonnyhu/scipy,newemailjdm/scipy,maniteja123/scipy,fredrikw/scipy,juliantaylor/scipy,piyush0609/scipy,woodscn/scipy,pbrod/scipy,ndchorley/scipy,vberaudi/scipy,tylerjereddy/scipy,person142/scipy,jakevdp/scipy,e-q/scipy,mtrbean/scipy,surhudm/scipy,gef756/scipy,arokem/scipy,ilayn/scipy,cpaulik/scipy,cpaulik/scipy,ortylp/scipy,aeklant/scipy,scipy/scipy,nvoron23/scipy,haudren/scipy,scipy/scipy,gertingold/scipy,jor-/scipy,fredrikw/scipy,aarchiba/scipy,surhudm/scipy,maniteja123/scipy,jonycgn/scipy
|
scipy/misc/tests/test_pilutil.py
|
scipy/misc/tests/test_pilutil.py
|
import os.path
import numpy as np
from numpy.testing import assert_, assert_equal, \
dec, decorate_methods, TestCase, run_module_suite
try:
import PIL.Image
except ImportError:
_have_PIL = False
else:
_have_PIL = True
import scipy.misc.pilutil as pilutil
# Function / method decorator for skipping PIL tests on import failure
_pilskip = dec.skipif(not _have_PIL, 'Need to import PIL for this test')
datapath = os.path.dirname(__file__)
class TestPILUtil(TestCase):
def test_imresize(self):
im = np.random.random((10,20))
for T in np.sctypes['float'] + [float]:
# 1.1 rounds to below 1.1 for float16, 1.101 works
im1 = pilutil.imresize(im,T(1.101))
assert_equal(im1.shape,(11,22))
def test_imresize2(self):
im = np.random.random((20,30))
im2 = pilutil.imresize(im, (30,40), interp='bicubic')
assert_equal(im2.shape, (30,40))
def test_imresize3(self):
im = np.random.random((15,30))
im2 = pilutil.imresize(im, (30,60), interp='nearest')
assert_equal(im2.shape, (30,60))
def test_bytescale(self):
x = np.array([0,1,2],np.uint8)
y = np.array([0,1,2])
assert_equal(pilutil.bytescale(x),x)
assert_equal(pilutil.bytescale(y),[0,127,255])
def tst_fromimage(filename, irange):
img = pilutil.fromimage(PIL.Image.open(filename))
imin,imax = irange
assert_(img.min() >= imin)
assert_(img.max() <= imax)
@_pilskip
def test_fromimage():
''' Test generator for parametric tests '''
data = {'icon.png':(0,255),
'icon_mono.png':(0,2),
'icon_mono_flat.png':(0,1)}
for fn, irange in data.iteritems():
yield tst_fromimage, os.path.join(datapath,'data',fn), irange
decorate_methods(TestPILUtil, _pilskip)
if __name__ == "__main__":
run_module_suite()
|
import os.path
import numpy as np
from numpy.testing import assert_, assert_equal, \
dec, decorate_methods, TestCase, run_module_suite
try:
import PIL.Image
except ImportError:
_have_PIL = False
else:
_have_PIL = True
import scipy.misc.pilutil as pilutil
# Function / method decorator for skipping PIL tests on import failure
_pilskip = dec.skipif(not _have_PIL, 'Need to import PIL for this test')
datapath = os.path.dirname(__file__)
class TestPILUtil(TestCase):
def test_imresize(self):
im = np.random.random((10,20))
for T in np.sctypes['float'] + [float]:
im1 = pilutil.imresize(im,T(1.1))
assert_equal(im1.shape,(11,22))
def test_imresize2(self):
im = np.random.random((20,30))
im2 = pilutil.imresize(im, (30,40), interp='bicubic')
assert_equal(im2.shape, (30,40))
def test_imresize3(self):
im = np.random.random((15,30))
im2 = pilutil.imresize(im, (30,60), interp='nearest')
assert_equal(im2.shape, (30,60))
def test_bytescale(self):
x = np.array([0,1,2],np.uint8)
y = np.array([0,1,2])
assert_equal(pilutil.bytescale(x),x)
assert_equal(pilutil.bytescale(y),[0,127,255])
def tst_fromimage(filename, irange):
img = pilutil.fromimage(PIL.Image.open(filename))
imin,imax = irange
assert_(img.min() >= imin)
assert_(img.max() <= imax)
@_pilskip
def test_fromimage():
''' Test generator for parametric tests '''
data = {'icon.png':(0,255),
'icon_mono.png':(0,2),
'icon_mono_flat.png':(0,1)}
for fn, irange in data.iteritems():
yield tst_fromimage, os.path.join(datapath,'data',fn), irange
decorate_methods(TestPILUtil, _pilskip)
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
Python
|
b3573faeff22f220990ea2c97a7c9eae26429258
|
add parse for application/json
|
hugoxia/Python,hugoxia/Python,hugoxia/Python,hugoxia/Python
|
tornado-sqlalchemy-example/app.py
|
tornado-sqlalchemy-example/app.py
|
# -*- coding: utf-8 -*-
import os
import tornado.web
import tornado.options
import tornado.ioloop
from db import db
from model import User
from tornado.escape import json_decode, to_unicode
class BaseHandler(tornado.web.RequestHandler):
@property
def db(self):
return self.application.db
def get_json_argument(self, name, default=None):
"""当Content-Type的值为application/json, 解析请求参数"""
args = json_decode(self.request.body)
name = to_unicode(name)
if name in args:
return args[name]
elif default is not None:
return default
else:
raise tornado.web.MissingArgumentError(name)
class IndexHandler(BaseHandler):
def get(self):
data = self.db.query(User).all()
a = User(username="test", password="test")
self.db.add(a)
data1 = self.db.query(User).all()
for d in data:
self.write("user: %s\n" % d.username)
self.write("==================")
for d in data1:
self.write("second %s" % d.username)
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", IndexHandler),
]
settings = dict(
debug=True,
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates")
)
tornado.web.Application.__init__(self, handlers, **settings)
self.db = db
if __name__ == '__main__':
tornado.options.parse_command_line()
Application().listen(8000)
tornado.ioloop.IOLoop.instance().start()
|
import os
import tornado.web
import tornado.options
import tornado.ioloop
from db import db
from model import User
class BaseHandler(tornado.web.RequestHandler):
@property
def db(self):
return self.application.db
class IndexHandler(BaseHandler):
def get(self):
data = self.db.query(User).all()
a = User(username="test", password="test")
self.db.add(a)
data1 = self.db.query(User).all()
for d in data:
self.write("user: %s\n" % d.username)
self.write("==================")
for d in data1:
self.write("second %s" % d.username)
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", IndexHandler),
]
settings = dict(
debug=True,
static_path=os.path.join(os.path.dirname(__file__), "static"),
template_path=os.path.join(os.path.dirname(__file__), "templates")
)
tornado.web.Application.__init__(self, handlers, **settings)
self.db = db
if __name__ == '__main__':
tornado.options.parse_command_line()
Application().listen(8000)
tornado.ioloop.IOLoop.instance().start()
|
mit
|
Python
|
bf81484b7fd55e6383ae8e0f103e5e69ddea430e
|
Update utils.py
|
AcademicTorrents/python-r-api
|
academictorrents/utils.py
|
academictorrents/utils.py
|
import hashlib
import os
import json
import datetime
import calendar
import time
def convert_bytes_to_decimal(headerBytes):
size = 0
power = len(headerBytes) - 1
for ch in headerBytes:
if isinstance(ch, int):
size += ch * 256 ** power
else:
size += int(ord(ch)) * 256 ** power
power -= 1
return size
def sha1_hash(string):
"""Return 20-byte sha1 hash of string."""
return hashlib.sha1(string).digest()
def get_timestamp_filename():
return clean_path("~/.academictorrents_timestamps.json")
def get_datastore(datastore="", path_to_config_file="~/.academictorrents.config"):
if datastore:
datastore = clean_path(datastore)
else:
datastore = json.loads(open(clean_path(path_to_config_file)).read()).get("datastore", os.getcwd() + "/datastore/")
if datastore[-1] != "/":
datastore = datastore + "/"
return datastore
def clean_path(path=None):
if path.startswith("~"):
return os.path.expanduser(path)
else:
return os.path.abspath(path)
def write_timestamp(at_hash):
filename = get_timestamp_filename()
try:
f = open(filename, 'r')
timestamps = json.load(f)
f.close()
except Exception:
timestamps = {}
timestamps[at_hash] = int(datetime.datetime.timestamp(datetime.datetime.now()))
f = open(filename, 'w')
json.dump(timestamps, f)
def read_timestamp(at_hash):
filename = get_timestamp_filename()
try:
f = open(filename, 'r')
timestamp = json.load(f).get(at_hash, 0)
f.close()
except Exception:
timestamp = 0
return timestamp
def timestamp_is_within_30_days(timestamp):
seconds_in_a_month = 86400 * 30
if timestamp > int(calendar.timegm(time.gmtime())) - seconds_in_a_month:
return True
return False
def timestamp_is_within_10_seconds(timestamp):
ten_seconds = 10
if timestamp > int(calendar.timegm(time.gmtime())) - ten_seconds:
return True
return False
def filenames_present(torrent):
return torrent.contents['info']['name'] in os.listdir(torrent.datastore)
|
import hashlib
import os
import json
import datetime
import calendar
import time
def convert_bytes_to_decimal(headerBytes):
size = 0
power = len(headerBytes) - 1
for ch in headerBytes:
if isinstance(ch, int):
size += ch * 256 ** power
else:
size += int(ord(ch)) * 256 ** power
power -= 1
return size
def sha1_hash(string):
"""Return 20-byte sha1 hash of string."""
return hashlib.sha1(string).digest()
def get_timestamp_filename():
return clean_path("~/.academictorrents_timestamps.json")
def get_datastore(datastore="", path_to_config_file="~/.academictorrents.config"):
if datastore:
datastore = clean_path(datastore)
else:
datastore = json.loads(open(clean_path(path_to_config_file)).read()).get("datastore", os.getcwd() + "/datastore/")
if datastore[-1] != "/":
datastore = datastore + "/"
return datastore
def clean_path(path=None):
if path.startswith("~"):
return os.path.expanduser(path)
else:
return os.path.abspath(path)
def write_timestamp(at_hash):
filename = get_timestamp_filename()
try:
f = open(filename, 'r')
timestamps = json.load(f)
f.close()
except Exception:
timestamps = {}
timestamps[at_hash] = int(datetime.datetime.now().strftime("%s"))
f = open(filename, 'w')
json.dump(timestamps, f)
def read_timestamp(at_hash):
filename = get_timestamp_filename()
try:
f = open(filename, 'r')
timestamp = json.load(f).get(at_hash, 0)
f.close()
except Exception:
timestamp = 0
return timestamp
def timestamp_is_within_30_days(timestamp):
seconds_in_a_month = 86400 * 30
if timestamp > int(calendar.timegm(time.gmtime())) - seconds_in_a_month:
return True
return False
def timestamp_is_within_10_seconds(timestamp):
ten_seconds = 10
if timestamp > int(calendar.timegm(time.gmtime())) - ten_seconds:
return True
return False
def filenames_present(torrent):
return torrent.contents['info']['name'] in os.listdir(torrent.datastore)
|
mit
|
Python
|
2b5ac57fd02e5e20f738f9060456542f69eeff95
|
Bump version to 4.0.0a12
|
platformio/platformio-core,platformio/platformio,platformio/platformio-core
|
platformio/__init__.py
|
platformio/__init__.py
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 0, "0a12")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 0, "0a11")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
|
apache-2.0
|
Python
|
82a1bcd4bd104ca2b45cb5dc93a44e4a16d1cbe3
|
add more QC options and colorized output for quicker review
|
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
|
scripts/asos/archive_quantity.py
|
scripts/asos/archive_quantity.py
|
""" Create a simple prinout of observation quanity in the database """
import datetime
now = datetime.datetime.utcnow()
import numpy
counts = numpy.zeros((120,12))
mslp = numpy.zeros((120,12))
metar = numpy.zeros((120,12))
import iemdb
ASOS = iemdb.connect('asos', bypass=True)
acursor = ASOS.cursor()
import sys
stid = sys.argv[1]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
acursor.execute("""SELECT extract(year from valid) as yr,
extract(month from valid) as mo, count(*),
sum(case when mslp is null or mslp < 1 then 1 else 0 end),
sum(case when metar is null or metar = '' then 1 else 0 end)
from alldata WHERE
station = %s GROUP by yr, mo ORDER by yr ASC, mo ASC""", (stid,))
for row in acursor:
counts[int(row[0]-1900),int(row[1]-1)] = row[2]
mslp[int(row[0]-1900),int(row[1]-1)] = row[3]
metar[int(row[0]-1900),int(row[1]-1)] = row[4]
def d(hits, total):
if total == 0:
return " N/A"
val = hits / float(total)
c1 = bcolors.ENDC
if val > 0.5:
c1 = bcolors.FAIL
return "%s%.2f%s" % (c1, val, bcolors.ENDC)
print 'Observation Count For %s' % (stid,)
print 'YEAR JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC'
output = False
for i in range(120):
year = 1900 + i
if year > now.year:
continue
if not output and numpy.max(counts[i,:]) == 0:
continue
output = True
if len(sys.argv) < 3:
print "%s %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i" % (year,
counts[i,0],counts[i,1],counts[i,2],counts[i,3],
counts[i,4],counts[i,5],counts[i,6],counts[i,7],
counts[i,8],counts[i,9],counts[i,10],counts[i,11])
else:
if sys.argv[2] == 'metar':
data = metar
else:
data = mslp
print "%s %4s %4s %4s %4s %4s %4s %4s %4s %4s %4s %4s %4s" % (year,
d(data[i,0], counts[i,0]),
d(data[i,1], counts[i,1]),
d(data[i,2], counts[i,2]),
d(data[i,3], counts[i,3]),
d(data[i,4], counts[i,4]),
d(data[i,5], counts[i,5]),
d(data[i,6], counts[i,6]),
d(data[i,7], counts[i,7]),
d(data[i,8], counts[i,8]),
d(data[i,9], counts[i,9]),
d(data[i,10], counts[i,10]),
d(data[i,11], counts[i,11]))
|
""" Create a simple prinout of observation quanity in the database """
import datetime
now = datetime.datetime.utcnow()
import numpy
counts = numpy.zeros((120,12))
import iemdb
ASOS = iemdb.connect('asos', bypass=True)
acursor = ASOS.cursor()
import sys
stid = sys.argv[1]
acursor.execute("""SELECT extract(year from valid) as yr,
extract(month from valid) as mo, count(*) from alldata WHERE
station = %s GROUP by yr, mo ORDER by yr ASC, mo ASC""", (stid,))
for row in acursor:
counts[int(row[0]-1900),int(row[1]-1)] = row[2]
print 'Observation Count For %s' % (stid,)
print 'YEAR JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC'
output = False
for i in range(120):
year = 1900 + i
if year > now.year:
continue
if not output and numpy.max(counts[i,:]) == 0:
continue
output = True
print "%s %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i %4i" % (year,
counts[i,0],counts[i,1],counts[i,2],counts[i,3],
counts[i,4],counts[i,5],counts[i,6],counts[i,7],
counts[i,8],counts[i,9],counts[i,10],counts[i,11])
|
mit
|
Python
|
546d8fc8b41de424a76beb03c6530a7cf505a6a3
|
add orca EarthLocation
|
tamasgal/km3pipe,tamasgal/km3pipe
|
km3pipe/constants.py
|
km3pipe/constants.py
|
# coding=utf-8
# Filename: constants.py
# pylint: disable=C0103
# pragma: no cover
"""
The constants used in KM3Pipe.
"""
from __future__ import division, absolute_import, print_function
# TODO: this module should be refactored soon!
import math
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "[email protected]"
__status__ = "Development"
# Detector related parameters
arca_frame_duration = 0.1 # s
orca_frame_duration = 0.1 # s
c = 2.99792458e8 # m/s
n_water_antares_phase = 1.3499
n_water_antares_group = 1.3797
n_water_km3net_group = 1.3787
n_water_antares = n_water_antares_group
theta_cherenkov_water_antares = math.acos(1 / n_water_antares_phase)
theta_cherenkov_water_km3net = math.acos(1 / n_water_km3net_group)
c_water_antares = c / n_water_antares_group
c_water_km3net = c / n_water_km3net_group
# Math
pi = math.pi
e = math.e
# Default values for time residuals
dt_window_l = -15 # ns
dt_window_h = +25 # ns
orca_coords = (42 + (48/60), 6 + (2/60)) # (n, e) / degree
orca_height = -2450 # m
|
# coding=utf-8
# Filename: constants.py
# pylint: disable=C0103
# pragma: no cover
"""
The constants used in KM3Pipe.
"""
from __future__ import division, absolute_import, print_function
# TODO: this module should be refactored soon!
import math
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "[email protected]"
__status__ = "Development"
# Detector related parameters
arca_frame_duration = 0.1 # s
orca_frame_duration = 0.1 # s
c = 2.99792458e8 # m/s
n_water_antares_phase = 1.3499
n_water_antares_group = 1.3797
n_water_km3net_group = 1.3787
n_water_antares = n_water_antares_group
theta_cherenkov_water_antares = math.acos(1 / n_water_antares_phase)
theta_cherenkov_water_km3net = math.acos(1 / n_water_km3net_group)
c_water_antares = c / n_water_antares_group
c_water_km3net = c / n_water_km3net_group
# Math
pi = math.pi
e = math.e
# Default values for time residuals
dt_window_l = -15 # ns
dt_window_h = +25 # ns
|
mit
|
Python
|
ce47d219076dc2ff36c58db1d91ba349b9968d61
|
Update test_bandits.py
|
yngtodd/tanuki
|
bandits/tests/test_bandits.py
|
bandits/tests/test_bandits.py
|
from sklearn.utils.testing import assert_equal
import numpy as np
import pytest
@pytest.mark.fast_test
def dummy_test():
"""
Quick test to build with Circle CI.
"""
x = 2 + 2
assert_equal(x, 4)
|
from sklearn.utils.testing import assert_equal
import numpy as np
import pytest
print("Hello tests!")
|
mit
|
Python
|
2652919c8d2e6fad8f7b3d47f5e82528b4b5214e
|
Write the last point for plot completeness
|
ECP-CANDLE/Database,ECP-CANDLE/Database
|
plots/monotone.py
|
plots/monotone.py
|
# MONOTONE
# Produce a monotonically decreasing output plot from noisy data
# Input: columns: t x
# Output: columns: t_i x_i , sampled such that x_i <= x_j
# for j > i.
from string import *
import sys
# Set PYTHONPATH=$PWD
from plottools import *
if len(sys.argv) != 3:
abort("usage: <input file> <output file>")
input_file = sys.argv[1]
output_file = sys.argv[2]
val_loss_min = sys.float_info.max
with open(input_file, "r") as fp_i, \
open(output_file, "w") as fp_o:
for line in fp_i:
(t, val_loss_string) = split(line)
val_loss = float(val_loss_string)
if val_loss < val_loss_min:
val_loss_min = val_loss
fp_o.write("%s, %f\n" % (t, val_loss_min))
# Ensure the last data point is written for the plot:
if val_loss >= val_loss_min:
fp_o.write("%s, %f\n" % (t, val_loss_min))
|
# MONOTONE
# Produce a monotonically decreasing output plot from noisy data
# Input: columns: t x
# Output: columns: t_i x_i , sampled such that x_i <= x_j
# for j > i.
from string import *
import sys
# Set PYTHONPATH=$PWD
from plottools import *
if len(sys.argv) != 3:
abort("usage: <input file> <output file>")
input_file = sys.argv[1]
output_file = sys.argv[2]
val_loss_min = sys.float_info.max
with open(input_file, "r") as fp_i, \
open(output_file, "w") as fp_o:
for line in fp_i:
(t, val_loss_string) = split(line)
val_loss = float(val_loss_string)
if val_loss < val_loss_min:
val_loss_min = val_loss
fp_o.write("%s, %f\n" % (t, val_loss_min))
|
mit
|
Python
|
6482c485982fe5039574eab797b46d5f1b93bacc
|
Refactor populate script
|
trimailov/finance,trimailov/finance,trimailov/finance
|
finance/management/commands/populate.py
|
finance/management/commands/populate.py
|
import random
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
import factory
from accounts.factories import UserFactory
from books.factories import TransactionFactory
class Command(BaseCommand):
help = "Popoulates databse with dummy data"
def handle(self, *args, **options):
if not User.objects.filter(username='admin'):
self.create_admin()
else:
self.admin = User.objects.get(username='admin')
print("admin user already exists")
self.create_transactions()
def create_admin(self):
# Factory creates simple user, so ``is_staff`` is set later
self.admin = UserFactory(username='admin', password='asdasd')
self.admin.is_staff = True
self.admin.save()
print("admin user have been created successfully")
def create_transactions(self):
TransactionFactory.create_batch(
10,
amount=factory.Sequence(lambda n: random.randrange(0, 10)),
category=random.randrange(0, 2), # random range from 0 to 1
user=self.admin,
)
print("Transactions for admin created")
|
import random
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.db import IntegrityError
import factory
from accounts.factories import UserFactory
from books.factories import TransactionFactory
class Command(BaseCommand):
help = "Popoulates databse with dummy data"
def handle(self, *args, **options):
# Factory creates simple user, so ``is_staff`` is set later
try:
admin = UserFactory(username='admin', password='asdasd')
admin.is_staff = True
admin.save()
print("admin user have been created successfully")
except IntegrityError:
admin = User.objects.get(username='admin')
print("admin user already exists")
TransactionFactory.create_batch(
10,
price=factory.Sequence(lambda n: random.randrange(0, 10)),
user=admin,
)
print("Transactions for admin created")
|
mit
|
Python
|
4475cd927dda1d8ab685507895e0fc4bde6e3b4a
|
switch window index error
|
malongge/selenium-pom
|
pages/base_page.py
|
pages/base_page.py
|
from .page import Page
class BasePage(Page):
def get_cookie_index_page(self, url, cookie):
self.get_relative_path(url)
self.maximize_window()
self.selenium.add_cookie(cookie)
self.selenium.refresh()
def switch_to_second_window(self):
handles = self.selenium.window_handles
try:
handle = handles[1]
except IndexError:
handle = handles[0]
self.selenium.switch_to_window(handle)
|
from .page import Page
class BasePage(Page):
def get_cookie_index_page(self, url, cookie):
self.get_relative_path(url)
self.maximize_window()
self.selenium.add_cookie(cookie)
self.selenium.refresh()
def switch_to_second_window(self):
handles = self.selenium.window_handles
self.selenium.switch_to_window(handles[1])
|
apache-2.0
|
Python
|
e69efded329ebbcf5ccf74ef137dc1a80bd4b4a6
|
add 2.1.2, re-run cython if needed (#13102)
|
LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack
|
var/spack/repos/builtin/packages/py-line-profiler/package.py
|
var/spack/repos/builtin/packages/py-line-profiler/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack import *
class PyLineProfiler(PythonPackage):
"""Line-by-line profiler."""
homepage = "https://github.com/rkern/line_profiler"
url = "https://pypi.io/packages/source/l/line_profiler/line_profiler-2.0.tar.gz"
version('2.1.2', sha256='efa66e9e3045aa7cb1dd4bf0106e07dec9f80bc781a993fbaf8162a36c20af5c')
version('2.0', 'fc93c6bcfac3b7cb1912cb28836d7ee6')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-cython', type='build')
depends_on('[email protected]:', type=('build', 'run'))
# See https://github.com/rkern/line_profiler/issues/166
@run_before('build')
@when('^[email protected]:')
def fix_cython(self):
cython = self.spec['py-cython'].command
for root, _, files in os.walk('.'):
for fn in files:
if fn.endswith('.pyx'):
cython(os.path.join(root, fn))
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyLineProfiler(PythonPackage):
"""Line-by-line profiler."""
homepage = "https://github.com/rkern/line_profiler"
url = "https://pypi.io/packages/source/l/line_profiler/line_profiler-2.0.tar.gz"
version('2.0', 'fc93c6bcfac3b7cb1912cb28836d7ee6')
depends_on('[email protected]:')
depends_on('py-setuptools', type='build')
depends_on('py-cython', type='build')
depends_on('[email protected]:', type=('build', 'run'))
|
lgpl-2.1
|
Python
|
381adeeec0fd1d65372d7003183d4b1ec8f2cfbf
|
Increase V8JS Stack Limit (#584)
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
dmoj/executors/V8JS.py
|
dmoj/executors/V8JS.py
|
from dmoj.executors.script_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = 'js'
name = 'V8JS'
command = 'v8dmoj'
test_program = 'print(gets());'
address_grace = 786432
nproc = -1
@classmethod
def get_version_flags(cls, command):
return [('-e', 'print(version())')]
def get_cmdline(self):
return [self.get_command(), '--stack-size=131072', self._code] # 128MB Stack Limit
|
from dmoj.executors.script_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = 'js'
name = 'V8JS'
command = 'v8dmoj'
test_program = 'print(gets());'
address_grace = 786432
nproc = -1
@classmethod
def get_version_flags(cls, command):
return [('-e', 'print(version())')]
|
agpl-3.0
|
Python
|
7fba4a676622e93416f32ee69bfa295647979c7a
|
fix path on test file
|
mcdeaton13/Tax-Calculator,jlyons871/Tax-Calculator,mcdeaton13/Tax-Calculator,rkuchan/Tax-Calculator,rkuchan/Tax-Calculator,mmessick/Tax-Calculator,xiyuw123/Tax-Calculator,xiyuw123/Tax-Calculator,mmessick/Tax-Calculator,jlyons871/Tax-Calculator
|
taxcalc/tests/test_calculate.py
|
taxcalc/tests/test_calculate.py
|
import os
import sys
cur_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(cur_path, "../../"))
sys.path.append(os.path.join(cur_path, "../"))
import numpy as np
import pandas as pd
from numba import jit, vectorize, guvectorize
from taxcalc import *
def test_make_Calculator():
tax_dta = pd.read_csv(os.path.join(cur_path, "../../puf2.csv"))
calc = Calculator(tax_dta)
def test_make_Calculator_mods():
cur_path = os.path.abspath(os.path.dirname(__file__))
tax_dta = pd.read_csv(os.path.join(cur_path, "../../puf2.csv"))
calc1 = calculator(tax_dta)
calc2 = calculator(tax_dta, _amex=np.array([4000]))
update_calculator_from_module(calc2, constants)
update_globals_from_calculator(calc2)
assert all(calc2._amex == np.array([4000]))
|
import os
import sys
cur_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(cur_path, "../../"))
sys.path.append(os.path.join(cur_path, "../"))
import numpy as np
import pandas as pd
from numba import jit, vectorize, guvectorize
from taxcalc import *
def test_make_Calculator():
tax_dta = pd.read_csv("../../puf2.csv")
calc = Calculator(tax_dta)
def test_make_Calculator_mods():
cur_path = os.path.abspath(os.path.dirname(__file__))
tax_dta = pd.read_csv(os.path.join(cur_path, "../../puf2.csv"))
calc1 = calculator(tax_dta)
calc2 = calculator(tax_dta, _amex=np.array([4000]))
update_calculator_from_module(calc2, constants)
update_globals_from_calculator(calc2)
assert all(calc2._amex == np.array([4000]))
|
mit
|
Python
|
faf9638bc69dc79c7fdc9294cc309c40ca57d518
|
Fix process names in test_nailyd_alive
|
SmartInfrastructures/fuel-main-dev,SergK/fuel-main,huntxu/fuel-main,teselkin/fuel-main,dancn/fuel-main-dev,stackforge/fuel-web,zhaochao/fuel-main,AnselZhangGit/fuel-main,teselkin/fuel-main,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,stackforge/fuel-main,huntxu/fuel-web,AnselZhangGit/fuel-main,eayunstack/fuel-web,zhaochao/fuel-main,SmartInfrastructures/fuel-web-dev,stackforge/fuel-web,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,zhaochao/fuel-web,Fiware/ops.Fuel-main-dev,SmartInfrastructures/fuel-main-dev,zhaochao/fuel-main,prmtl/fuel-web,AnselZhangGit/fuel-main,huntxu/fuel-main,nebril/fuel-web,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,huntxu/fuel-main,prmtl/fuel-web,Fiware/ops.Fuel-main-dev,huntxu/fuel-web,huntxu/fuel-web,Fiware/ops.Fuel-main-dev,zhaochao/fuel-web,stackforge/fuel-main,teselkin/fuel-main,prmtl/fuel-web,dancn/fuel-main-dev,SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,eayunstack/fuel-web,dancn/fuel-main-dev,ddepaoli3/fuel-main-dev,eayunstack/fuel-main,zhaochao/fuel-main,AnselZhangGit/fuel-main,koder-ua/nailgun-fcert,eayunstack/fuel-main,huntxu/fuel-web,koder-ua/nailgun-fcert,nebril/fuel-web,zhaochao/fuel-web,prmtl/fuel-web,koder-ua/nailgun-fcert,nebril/fuel-web,stackforge/fuel-web,Fiware/ops.Fuel-main-dev,eayunstack/fuel-web,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SergK/fuel-main,prmtl/fuel-web,eayunstack/fuel-main,koder-ua/nailgun-fcert,SergK/fuel-main,stackforge/fuel-main,zhaochao/fuel-main,nebril/fuel-web,eayunstack/fuel-web,ddepaoli3/fuel-main-dev,huntxu/fuel-web,dancn/fuel-main-dev
|
fuelweb_test/integration/test_nailyd.py
|
fuelweb_test/integration/test_nailyd.py
|
import logging
import xmlrpclib
from fuelweb_test.integration.base import Base
from fuelweb_test.helpers import SSHClient
class TestNailyd(Base):
def __init__(self, *args, **kwargs):
super(TestNailyd, self).__init__(*args, **kwargs)
self.remote = SSHClient()
def setUp(self):
logging.info('Admin node ip: %s' % self.get_admin_node_ip())
self.ip = self.get_admin_node_ip()
def tearDown(self):
pass
def test_nailyd_alive(self):
self.remote.connect_ssh(self.ip, 'root', 'r00tme')
ps_output = self.remote.execute('ps ax')['stdout']
naily_processes = filter(lambda x: 'naily master' in x, ps_output)
logging.debug("Found %d naily master processes: %s" %
(len(naily_processes), naily_processes))
self.assertEqual(1, len(naily_processes))
naily_processes = filter(lambda x: 'naily worker' in x, ps_output)
logging.debug("Found %d naily worker processes: %s" %
(len(naily_processes), naily_processes))
self.assertEqual(True, len(naily_processes) > 1)
|
import logging
import xmlrpclib
from fuelweb_test.integration.base import Base
from fuelweb_test.helpers import SSHClient
class TestNailyd(Base):
def __init__(self, *args, **kwargs):
super(TestNailyd, self).__init__(*args, **kwargs)
self.remote = SSHClient()
def setUp(self):
logging.info('Admin node ip: %s' % self.get_admin_node_ip())
self.ip = self.get_admin_node_ip()
def tearDown(self):
pass
def test_nailyd_alive(self):
self.remote.connect_ssh(self.ip, 'root', 'r00tme')
ps_output = self.remote.execute('ps ax')['stdout']
naily_processes = filter(lambda x: '/usr/bin/nailyd' in x, ps_output)
logging.debug("Found naily processes: %s" % naily_processes)
self.assertEquals(len(naily_processes), 1)
|
apache-2.0
|
Python
|
13a64059b71fccb8315f552d8e96f130c513a540
|
Remove old code.
|
colmcoughlan/alchemy-server
|
charity_server.py
|
charity_server.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 30 01:14:12 2017
@author: colm
"""
from flask import Flask, jsonify
from parse_likecharity import refresh_charities
from datetime import datetime
app = Flask(__name__)
refresh_rate = 24 * 60 * 60 #Seconds
start_time = datetime.now()
initialized = False
# variables that are accessible from anywhere
payload = {}
@app.route("/gci")
def gci():
global payload
delta = datetime.now() - start_time
if delta.total_seconds() > refresh_rate or not(initialized):
categories, charity_dict = refresh_charities()
payload = {'categories':categories, 'charities':charity_dict}
return jsonify(payload)
if __name__ == "__main__":
categories, charity_dict = refresh_charities()
app.run(host='0.0.0.0')
print('test')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 30 01:14:12 2017
@author: colm
"""
from flask import Flask, jsonify
from parse_likecharity import refresh_charities
import threading
from datetime import datetime
app = Flask(__name__)
refresh_rate = 24 * 60 * 60 #Seconds
start_time = datetime.now()
# variables that are accessible from anywhere
payload = {}
# lock to control access to variable
dataLock = threading.Lock()
# thread handler
backgroundThread = threading.Thread()
def update_charities():
print('Updating charities in background thread')
global payload
global backgroundThread
with dataLock:
categories, charity_dict = refresh_charities()
payload = {'categories':categories, 'charities':charity_dict}
print('Running!')
# Set the next thread to happen
backgroundThread = threading.Timer(refresh_rate, update_charities, ())
backgroundThread.start()
@app.route("/gci")
def gci():
global payload
delta = datetime.now() - start_time
if delta.total_seconds() > refresh_rate:
categories, charity_dict = refresh_charities()
payload = {'categories':categories, 'charities':charity_dict}
return jsonify(payload)
if __name__ == "__main__":
update_charities()
app.run(host='0.0.0.0')
backgroundThread.cancel()
print('test')
|
mit
|
Python
|
d6de45a751034fc6721886b670219eb55c589886
|
reduce load size
|
leeopop/2015-CS570-Project
|
test.py
|
test.py
|
from loader import *
from create_feature import *
def main():
total_data = load_all(file_list=['Author', 'Paper', 'PaperAuthor'])
load_title_lda(total_data)
paper_topic(total_data)
author_topic(total_data)
paper_author_topic_sum(total_data)
save_topic_sum(total_data)
pass
if __name__ == '__main__':
main()
|
from loader import *
from create_feature import *
def main():
total_data = load_all()
load_title_lda(total_data)
paper_topic(total_data)
author_topic(total_data)
paper_author_topic_sum(total_data)
save_topic_sum(total_data)
pass
if __name__ == '__main__':
main()
|
mit
|
Python
|
21bbf9ec71c2d63f5c826dfdc3641927692cb202
|
test test
|
jantman/kvmdash,jantman/kvmdash,jantman/kvmdash
|
test.py
|
test.py
|
from flask import Flask
import pytest
def test_app():
app = Flask(__name__)
app.testing = True
@app.route("/")
def hello():
return "Hello World!"
# app.run() # this actually works here...
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
assert response.data == "Hello World!"
print response.headers
assert False
|
from flask import Flask
import pytest
def test_app():
app = Flask(__name__)
app.testing = True
@app.route("/")
def hello():
return "Hello World!"
# app.run() # this actually works here...
client = app.test_client()
response = client.get("/")
assert response.status_code == 200
assert response.data == "Hello World!"
print response.headers
assert False
|
agpl-3.0
|
Python
|
727078f0d7105138310f0870f8ab3a751e0f72da
|
Fix linting issues in test runner
|
cornell-cup/cs-minibot,cornell-cup/cs-minibot,cornell-cup/cs-minibot,cornell-cup/cs-minibot,cornell-cup/cs-minibot
|
test.py
|
test.py
|
"""
Run all tests in this project.
"""
import unittest
if __name__ == "__main__":
loader = unittest.TestLoader()
tests = loader.discover(".", pattern="test_*.py")
runner = unittest.TextTestRunner()
runner.run(tests)
|
# Run all tests in this project
import os
import sys
import unittest
if __name__=="__main__":
loader = unittest.TestLoader()
tests = loader.discover(".", pattern="test_*.py")
runner = unittest.TextTestRunner()
runner.run(tests)
|
apache-2.0
|
Python
|
26fc8789445c22f85467387bec7eeb6eccedc2c5
|
Stop before starting when restarting
|
matrix-org/synapse,howethomas/synapse,howethomas/synapse,matrix-org/synapse,iot-factory/synapse,iot-factory/synapse,matrix-org/synapse,rzr/synapse,illicitonion/synapse,rzr/synapse,illicitonion/synapse,TribeMedia/synapse,matrix-org/synapse,iot-factory/synapse,howethomas/synapse,TribeMedia/synapse,TribeMedia/synapse,illicitonion/synapse,matrix-org/synapse,rzr/synapse,illicitonion/synapse,howethomas/synapse,rzr/synapse,howethomas/synapse,matrix-org/synapse,illicitonion/synapse,TribeMedia/synapse,rzr/synapse,iot-factory/synapse,TribeMedia/synapse,iot-factory/synapse
|
synapse/app/synctl.py
|
synapse/app/synctl.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import subprocess
import signal
SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
CONFIGFILE="homeserver.yaml"
PIDFILE="homeserver.pid"
GREEN="\x1b[1;32m"
NORMAL="\x1b[m"
def start():
if not os.path.exists(CONFIGFILE):
sys.stderr.write(
"No config file found\n"
"To generate a config file, run '%s -c %s --generate-config"
" --server-name=<server name>'\n" % (
" ".join(SYNAPSE), CONFIGFILE
)
)
sys.exit(1)
print "Starting ...",
args = SYNAPSE
args.extend(["--daemonize", "-c", CONFIGFILE, "--pid-file", PIDFILE])
subprocess.check_call(args)
print GREEN + "started" + NORMAL
def stop():
if os.path.exists(PIDFILE):
pid = int(open(PIDFILE).read())
os.kill(pid, signal.SIGTERM)
print GREEN + "stopped" + NORMAL
def main():
action = sys.argv[1] if sys.argv[1:] else "usage"
if action == "start":
start()
elif action == "stop":
stop()
elif action == "restart":
stop()
start()
else:
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
sys.exit(1)
if __name__=='__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import subprocess
import signal
SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
CONFIGFILE="homeserver.yaml"
PIDFILE="homeserver.pid"
GREEN="\x1b[1;32m"
NORMAL="\x1b[m"
def start():
if not os.path.exists(CONFIGFILE):
sys.stderr.write(
"No config file found\n"
"To generate a config file, run '%s -c %s --generate-config"
" --server-name=<server name>'\n" % (
" ".join(SYNAPSE), CONFIGFILE
)
)
sys.exit(1)
print "Starting ...",
args = SYNAPSE
args.extend(["--daemonize", "-c", CONFIGFILE, "--pid-file", PIDFILE])
subprocess.check_call(args)
print GREEN + "started" + NORMAL
def stop():
if os.path.exists(PIDFILE):
pid = int(open(PIDFILE).read())
os.kill(pid, signal.SIGTERM)
print GREEN + "stopped" + NORMAL
def main():
action = sys.argv[1] if sys.argv[1:] else "usage"
if action == "start":
start()
elif action == "stop":
stop()
elif action == "restart":
start()
stop()
else:
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
sys.exit(1)
if __name__=='__main__':
main()
|
apache-2.0
|
Python
|
6c02b743ad3859e05eeb980298e54acf3fbd9788
|
Add __len__ to FlagField (#3981)
|
allenai/allennlp,allenai/allennlp,allenai/allennlp,allenai/allennlp
|
allennlp/data/fields/flag_field.py
|
allennlp/data/fields/flag_field.py
|
from typing import Any, Dict, List
from overrides import overrides
from allennlp.data.fields.field import Field
class FlagField(Field[Any]):
"""
A class representing a flag, which must be constant across all instances in a batch.
This will be passed to a `forward` method as a single value of whatever type you pass in.
"""
def __init__(self, flag_value: Any) -> None:
self.flag_value = flag_value
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> Any:
return self.flag_value
@overrides
def empty_field(self):
# Because this has to be constant across all instances in a batch, we need to keep the same
# value.
return FlagField(self.flag_value)
def __str__(self) -> str:
return f"FlagField({self.flag_value})"
def __len__(self) -> int:
return 1
@overrides
def batch_tensors(self, tensor_list: List[Any]) -> Any:
if len(set(tensor_list)) != 1:
raise ValueError(
f"Got different values in a FlagField when trying to batch them: {tensor_list}"
)
return tensor_list[0]
|
from typing import Any, Dict, List
from overrides import overrides
from allennlp.data.fields.field import Field
class FlagField(Field[Any]):
"""
A class representing a flag, which must be constant across all instances in a batch.
This will be passed to a `forward` method as a single value of whatever type you pass in.
"""
def __init__(self, flag_value: Any) -> None:
self.flag_value = flag_value
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> Any:
return self.flag_value
@overrides
def empty_field(self):
# Because this has to be constant across all instances in a batch, we need to keep the same
# value.
return FlagField(self.flag_value)
def __str__(self) -> str:
return f"FlagField({self.flag_value})"
@overrides
def batch_tensors(self, tensor_list: List[Any]) -> Any:
if len(set(tensor_list)) != 1:
raise ValueError(
f"Got different values in a FlagField when trying to batch them: {tensor_list}"
)
return tensor_list[0]
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.