commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
0ec01e1c5770c87faa5300b80c3b9d6bcb0df41b
Make sure to return python values, not lxml objects
vkurup/python-tcxparser,vkurup/python-tcxparser,SimonArnu/python-tcxparser
tcxparser.py
tcxparser.py
"Simple parser for Garmin TCX files." from lxml import objectify __version__ = '0.4.0' class TcxParser: def __init__(self, tcx_file): tree = objectify.parse(tcx_file) self.root = tree.getroot() self.activity = self.root.Activities.Activity @property def latitude(self): return self.activity.Lap.Track.Trackpoint.Position.LatitudeDegrees.pyval @property def longitude(self): return self.activity.Lap.Track.Trackpoint.Position.LongitudeDegrees.pyval @property def activity_type(self): return self.activity.attrib['Sport'].lower() @property def completed_at(self): return self.activity.Lap[-1].Track.Trackpoint[-1].Time.pyval @property def distance(self): return self.activity.Lap[-1].Track.Trackpoint[-2].DistanceMeters.pyval @property def distance_units(self): return 'meters' @property def duration(self): """Returns duration of workout in seconds.""" return sum(lap.TotalTimeSeconds for lap in self.activity.Lap) @property def calories(self): return sum(lap.Calories for lap in self.activity.Lap)
"Simple parser for Garmin TCX files." from lxml import objectify __version__ = '0.3.0' class TcxParser: def __init__(self, tcx_file): tree = objectify.parse(tcx_file) self.root = tree.getroot() self.activity = self.root.Activities.Activity @property def latitude(self): return self.activity.Lap.Track.Trackpoint.Position.LatitudeDegrees @property def longitude(self): return self.activity.Lap.Track.Trackpoint.Position.LongitudeDegrees @property def activity_type(self): return self.activity.attrib['Sport'].lower() @property def completed_at(self): return self.activity.Lap[-1].Track.Trackpoint[-1].Time @property def distance(self): return self.activity.Lap[-1].Track.Trackpoint[-2].DistanceMeters @property def distance_units(self): return 'meters' @property def duration(self): """Returns duration of workout in seconds.""" return sum(lap.TotalTimeSeconds for lap in self.activity.Lap) @property def calories(self): return sum(lap.Calories for lap in self.activity.Lap)
bsd-2-clause
Python
3f18e4891b64c45fbda9ae88e9b508b5bc2cb03a
Add infinite loop; Add env vars
ps-jay/temp2dash
temp2dash.py
temp2dash.py
import json import os import requests import sys import time import traceback from temperusb import TemperHandler URL = os.environ['DASHING_URL'] SCALE = float(os.environ['TEMP_SCALE']) OFFSET = float(os.environ['TEMP_OFFSET']) SENSOR = int(os.environ['TEMP_SENSOR']) SLEEP = int(os.environ['SLEEP_TIME']) th = TemperHandler() devs = th.get_devices() if len(devs) != 1: print "Expected exactly one TEMPer device, found %d" % len(devs) sys.exit(1) dev = devs[0] dev.set_calibration_data(scale=SCALE, offset=OFFSET) while True: try: temperature = dev.get_temperature(sensor=SENSOR) except Exception, err: print "\nException on getting temperature\n" print traceback.format_exc() payload = { 'auth_token': 'abcdefghijklmnopqrstuvwxyz', 'temperature': '%0.0f%s' % ( temperature, u'\N{DEGREE SIGN}', ), } sys.stdout.write(u'%0.1f%s, ' % ( temperature, u'\N{DEGREE SIGN}', )) sys.stdout.flush() try: post = requests.post(URL, data=json.dumps(payload)) except Exception, err: print "\nException on posting temperature to dashing\n" print traceback.format_exc() if post.status_code != 204: print "\nHTTP status from POST was %s (expected 204)\n" % post.status_code time.sleep(SLEEP)
import json import requests import sys from temperusb import TemperHandler URL="http://dashing:3030/widgets/inside" SCALE=1.0 OFFSET=-3.0 th = TemperHandler() devs = th.get_devices() if len(devs) != 1: print "Expected exactly one TEMPer device, found %d" % len(devs) sys.exit(1) dev = devs[0] dev.set_calibration_data(scale=SCALE, offset=OFFSET) temperature = dev.get_temperature(sensor=1) payload = { 'auth_token': 'abcdefghijklmnopqrstuvwxyz', 'temperature': '%0.0f%s' % ( temperature, u'\N{DEGREE SIGN}', ), } post = requests.post(URL, data=json.dumps(payload)) if post.status_code != 204: sys.exit(255) sys.exit(0)
mit
Python
285ca0f2a469d0d11baad1120a5b0b1d0074aef3
Update dbworker.py (#2)
Kondra007/telegram-xkcd-password-generator
dbworker.py
dbworker.py
# -*- coding: utf-8 -*- from tinydb import TinyDB, Query from tinydb.operations import increment, decrement from texts import strings from config import db_file from utils import get_language DEFAULT_WORD_COUNT = 3 DEFAULT_PREFIX_SUFFIX = True DEFAULT_SEPARATOR = True db = TinyDB(db_file) def get_settings_text(user_id, lang_code): user = get_person(user_id) text = strings.get(get_language(lang_code)).get("settings").format(num_of_words=user["word_count"], prefixes=strings.get(get_language(lang_code)).get("yes") if user["prefixes"] else strings.get(get_language(lang_code)).get("no"), separators=strings.get(get_language(lang_code)).get("yes") if user["separators"] else strings.get(get_language(lang_code)).get("no")) return text def user_exists(user_id): return bool(db.search(Query().user_id == user_id)) def get_person(user_id): # Check if user exists S = Query() person = db.search(S.user_id == user_id) if len(person) is 0: usr = {"user_id": user_id, "word_count": DEFAULT_WORD_COUNT, "prefixes": DEFAULT_PREFIX_SUFFIX, "separators": DEFAULT_SEPARATOR} db.insert(usr) return usr return person[0] def change_word_count(user_id, increase): S = Query() if increase: db.update(increment("word_count"), S.user_id == user_id) else: db.update(decrement("word_count"), S.user_id == user_id) return db.search(S.user_id == user_id)[0] def change_prefixes(user_id, enable_prefixes): S = Query() if enable_prefixes: db.update({"prefixes": True}, S.user_id == user_id) else: db.update({"prefixes": False}, S.user_id == user_id) return db.search(S.user_id == user_id)[0] def change_separators(user_id, enable_separators): S = Query() if enable_separators: db.update({"separators": True}, S.user_id == user_id) else: db.update({"separators": False}, S.user_id == user_id) return db.search(S.user_id == user_id)[0]
# -*- coding: utf-8 -*- from tinydb import TinyDB, Query from tinydb.operations import increment, decrement from texts import strings from config import db_file from utils import get_language DEFAULT_WORD_COUNT = 3 DEFAULT_PREFIX_SUFFIX = True DEFAULT_SEPARATOR = True db = TinyDB(db_file) def get_settings_text(user_id, lang_code): user = get_person(user_id) text = strings.get(get_language(lang_code)).get("settings").format(num_of_words=user["word_count"], prefixes=strings.get(get_language(lang_code)).get("yes") if user["prefixes"] else strings.get(get_language(lang_code)).get("no"), separators=strings.get(get_language(lang_code)).get("yes") if user["separators"] else strings.get(get_language(lang_code)).get("no")) return text def user_exists(user_id): return True if len(db.search(Query().user_id == user_id)) > 0 else False def get_person(user_id): # Check if user exists S = Query() person = db.search(S.user_id == user_id) if len(person) is 0: usr = {"user_id": user_id, "word_count": DEFAULT_WORD_COUNT, "prefixes": DEFAULT_PREFIX_SUFFIX, "separators": DEFAULT_SEPARATOR} db.insert(usr) return usr return person[0] def change_word_count(user_id, increase): S = Query() if increase: db.update(increment("word_count"), S.user_id == user_id) else: db.update(decrement("word_count"), S.user_id == user_id) return db.search(S.user_id == user_id)[0] def change_prefixes(user_id, enable_prefixes): S = Query() if enable_prefixes: db.update({"prefixes": True}, S.user_id == user_id) else: db.update({"prefixes": False}, S.user_id == user_id) return db.search(S.user_id == user_id)[0] def change_separators(user_id, enable_separators): S = Query() if enable_separators: db.update({"separators": True}, S.user_id == user_id) else: db.update({"separators": False}, S.user_id == user_id) return db.search(S.user_id == user_id)[0]
mit
Python
991c6bc16388e4470193462c4ce63468b22ca79a
Remove __author__
google/dpy
__init__.py
__init__.py
from ioc import * __copyright__ = "Copyright 2013 Google Inc." __license__ = "MIT, see LICENSE"
from ioc import * __author__ = "Wes Alvaro" __copyright__ = "Copyright 2013 Google Inc." __license__ = "MIT, see LICENSE"
mit
Python
19e12f1e492272bf4a69e0bc99106e78788b9c14
Add PEP8 line terminator before EOF
thismachinechills/save_skype
__init__.py
__init__.py
from extract import *
from extract import *
agpl-3.0
Python
5768d1ebcfec46e564c8b420773d911c243327ff
Fix non-threadsafe failure in serializer - now using thread local serializer instance.
commoncode/django-ddp,commoncode/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,django-ddp/django-ddp
dddp/msg.py
dddp/msg.py
"""Django DDP utils for DDP messaging.""" from dddp import THREAD_LOCAL as this from django.core.serializers import get_serializer def serializer_factory(): """Make a new DDP serializer.""" return get_serializer('ddp')() def obj_change_as_msg(obj, msg): """Generate a DDP msg for obj with specified msg type.""" serializer = this.get('serializer', serializer_factory) data = serializer.serialize([obj])[0] name = data['model'] # cast ID as string if not isinstance(data['pk'], basestring): data['pk'] = '%d' % data['pk'] payload = { 'msg': msg, 'collection': name, 'id': data['pk'], } if msg != 'removed': payload['fields'] = data['fields'] return (name, payload)
"""Django DDP utils for DDP messaging.""" import collections from django.core.serializers import get_serializer _SERIALIZER = None def obj_change_as_msg(obj, msg): """Generate a DDP msg for obj with specified msg type.""" global _SERIALIZER if _SERIALIZER is None: _SERIALIZER = get_serializer('ddp')() data = _SERIALIZER.serialize([obj])[0] name = data['model'] # cast ID as string if not isinstance(data['pk'], basestring): data['pk'] = '%d' % data['pk'] payload = { 'msg': msg, 'collection': name, 'id': data['pk'], } if msg != 'removed': payload['fields'] = data['fields'] return (name, payload)
mit
Python
0a42ec9eeccc5969bf1eb8a92cd7d66ade4daf76
Make executable
Merlin04/ddgquery,Merlin04/ddgquery
ddgquery.py
ddgquery.py
#! /usr/bin/env python import os, time # use python3 while True: os.system("espeak -v en-us 'What would you like to know about?'") #time.sleep(4) query = input("What would you like to know about?\n") if query == "help": print("Add -u to get a helpful URL\nAdd -l to launch the URL in your browser\nAdd -s to get a DuckDuckGo search\nType 'about' to learn more.") elif query == "about": print("This uses the Duck Duck Go Zero-Click Info API. This program is written by Python and is written by Merlin04.") elif query == "quit": break else: os.system('ddg ' + query + ' | espeak -v en-us')
import os, time # use python3 while True: os.system("espeak -v en-us 'What would you like to know about?'") #time.sleep(4) query = input("What would you like to know about?\n") if query == "help": print("Add -u to get a helpful URL\nAdd -l to launch the URL in your browser\nAdd -s to get a DuckDuckGo search\nType 'about' to learn more.") elif query == "about": print("This uses the Duck Duck Go Zero-Click Info API. This program is written by Python and is written by Merlin04.") elif query == "quit": break else: os.system('ddg ' + query + ' | espeak -v en-us')
mit
Python
338e2ba155df0759113c65ced6be6714092b9aaf
Use Alex's awesome new version of the GtkQuartz theme engine
bl8/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild
packages/gtk-quartz-engine.py
packages/gtk-quartz-engine.py
Package ('gtk-quartz-engine', 'master', sources = [ 'git://github.com/nirvanai/gtk-quartz-engine.git' ], override_properties = { 'configure': 'libtoolize --force --copy && ' 'aclocal && ' 'autoheader && ' 'automake --add-missing && ' 'autoconf && ' './configure --prefix=%{prefix}' } )
Package ('gtk-quartz-engine', 'master', sources = [ 'git://github.com/jralls/gtk-quartz-engine.git' ], override_properties = { 'configure': 'libtoolize --force --copy && ' 'aclocal && ' 'autoheader && ' 'automake --add-missing && ' 'autoconf && ' './configure --prefix=%{prefix}' } )
mit
Python
12130cef6c9b08e0928ed856972ace3c2000e6f8
Fix error accessing class variable
ueg1990/mooc_aggregator_restful_api
mooc_aggregator_restful_api/udacity.py
mooc_aggregator_restful_api/udacity.py
''' This module retrieves the course catalog and overviews of the Udacity API Link to Documentation: https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf ''' import json import requests class UdacityAPI(object): ''' This class defines attributes and methods for Udaciy API ''' UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses' def __init__(self): self.response = requests.get(UdacityAPI.UDACITY_API_ENDPOINT) self.courses = self.response.json()['courses'] self.tracks = self.response.json()['tracks'] def status_code(self): ''' Return status code of response object ''' return self.response.status_code def get_courses(self): ''' Return list of course objects for all courses offered by Udacity ''' return self.courses def get_tracks(self): ''' Return list of tracks offered by Udacity ''' return self.tracks if __name__ == '__main__': udacity_object = UdacityAPI() print len(udacity_object.get_courses()) print udacity_object.get_courses()[0].keys()
''' This module retrieves the course catalog and overviews of the Udacity API Link to Documentation: https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf ''' import json import requests class UdacityAPI(object): ''' This class defines attributes and methods for Udaciy API ''' UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses' def __init__(self): self.response = requests.get(UDACITY_API_ENDPOINT) self.courses = self.response.json()['courses'] self.tracks = self.response.json()['tracks'] def status_code(self): ''' Return status code of response object ''' return self.response.status_code def get_courses(self): ''' Return list of course objects for all courses offered by Udacity ''' return self.courses def get_tracks(self): ''' Return list of tracks offered by Udacity ''' return self.tracks if __name__ == '__main__': udacity_object = UdacityAPI() print len(udacity_object.get_courses()) print udacity_object.get_courses()[0].keys()
mit
Python
ac63fca5b1e688fb465431fd1760db6b1c766fea
Bump to version 0.14
pudo/spendb,johnjohndoe/spendb,pudo/spendb,johnjohndoe/spendb,USStateDept/FPA_Core,openspending/spendb,openspending/spendb,USStateDept/FPA_Core,openspending/spendb,spendb/spendb,spendb/spendb,nathanhilbert/FPA_Core,CivicVision/datahub,johnjohndoe/spendb,pudo/spendb,CivicVision/datahub,USStateDept/FPA_Core,CivicVision/datahub,nathanhilbert/FPA_Core,nathanhilbert/FPA_Core,spendb/spendb
openspending/_version.py
openspending/_version.py
__version__ = '0.14.0'
__version__ = '0.13.1'
agpl-3.0
Python
b7b23f9840af377f37617f3bbb79556342d74133
replace prints with calls to logger
ihadzic/jim,ihadzic/jim,ihadzic/jim
__main__.py
__main__.py
#/usr/bin/env python2 import web import log import ConfigParser _log = log.TrivialLogger() _config_file_list = ['./jim.cfg', '/etc/jim.cfg'] _config_file_parser = ConfigParser.RawConfigParser() _config_ok = True try: _config_file_list = _config_file_parser.read(_config_file_list) except: _log.error("cannot parse configuration file(s)") _config_ok = False if len(_config_file_list) == 0: _log.error("no configuration file found") _config_ok = False else: _log.info("using configuration file {}".format(_config_file_list[0])) if _config_ok: _log.info("starting server") web.run_server() _log.info("server exited")
#/usr/bin/env python2 import web import ConfigParser _config_file_list = ['./jim.cfg', '/etc/jim.cfg'] _config_file_parser = ConfigParser.RawConfigParser() _config_ok = True try: _config_file_list = _config_file_parser.read(_config_file_list) except: print("cannot parse configuration file(s)") _config_ok = False if len(_config_file_list) == 0: print("no configuration file found") _config_ok = False else: print("using configuration file {}".format(_config_file_list[0])) if _config_ok: print("starting server") web.run_server() print("server exited")
mit
Python
6a5a7a1e1eafa91543d8e274e63d258332149a29
Update __version__.py
crcresearch/orcidfind
orcidfind/__version__.py
orcidfind/__version__.py
# Single source of metadata about the project that's used by setup.py and # docs/conf.py # Some segments of public version identifer (PEP 440) VERSION_RELEASE = "0.1" VERSION_PRE_RELEASE = "a5" # e.g., "a4", "b1", "rc3" or "" (final release) VERSION_POST_RELEASE = "" # e.g., ".post1" VERSION = VERSION_RELEASE + VERSION_PRE_RELEASE + VERSION_POST_RELEASE
# Single source of metadata about the project that's used by setup.py and # docs/conf.py # Some segments of public version identifer (PEP 440) VERSION_RELEASE = "0.1" VERSION_PRE_RELEASE = "a4" # e.g., "a4", "b1", "rc3" or "" (final release) VERSION_POST_RELEASE = "" # e.g., ".post1" VERSION = VERSION_RELEASE + VERSION_PRE_RELEASE + VERSION_POST_RELEASE
apache-2.0
Python
d032d2597525e02fd71a524c5a9619c09c640365
Bump version number.
akx/coffin
coffin/__init__.py
coffin/__init__.py
""" Coffin ~~~~~~ `Coffin <http://www.github.com/dcramer/coffin>` is a package that resolves the impedance mismatch between `Django <http://www.djangoproject.com/>` and `Jinja2 <http://jinja.pocoo.org/2/>` through various adapters. The aim is to use Coffin as a drop-in replacement for Django's template system to whatever extent is reasonable. :copyright: 2008 by Christopher D. Leary :license: BSD, see LICENSE for more details. """ __all__ = ('__version__', '__build__', '__docformat__', 'get_revision') __version__ = (0, 3, '6') __docformat__ = 'restructuredtext en' import os def _get_git_revision(path): revision_file = os.path.join(path, 'refs', 'heads', 'master') if not os.path.exists(revision_file): return None fh = open(revision_file, 'r') try: return fh.read() finally: fh.close() def get_revision(): """ :returns: Revision number of this branch/checkout, if available. None if no revision number can be determined. """ package_dir = os.path.dirname(__file__) checkout_dir = os.path.normpath(os.path.join(package_dir, '..')) path = os.path.join(checkout_dir, '.git') if os.path.exists(path): return _get_git_revision(path) return None __build__ = get_revision()
""" Coffin ~~~~~~ `Coffin <http://www.github.com/dcramer/coffin>` is a package that resolves the impedance mismatch between `Django <http://www.djangoproject.com/>` and `Jinja2 <http://jinja.pocoo.org/2/>` through various adapters. The aim is to use Coffin as a drop-in replacement for Django's template system to whatever extent is reasonable. :copyright: 2008 by Christopher D. Leary :license: BSD, see LICENSE for more details. """ __all__ = ('__version__', '__build__', '__docformat__', 'get_revision') __version__ = (0, 3, '6', 'dev') __docformat__ = 'restructuredtext en' import os def _get_git_revision(path): revision_file = os.path.join(path, 'refs', 'heads', 'master') if not os.path.exists(revision_file): return None fh = open(revision_file, 'r') try: return fh.read() finally: fh.close() def get_revision(): """ :returns: Revision number of this branch/checkout, if available. None if no revision number can be determined. """ package_dir = os.path.dirname(__file__) checkout_dir = os.path.normpath(os.path.join(package_dir, '..')) path = os.path.join(checkout_dir, '.git') if os.path.exists(path): return _get_git_revision(path) return None __build__ = get_revision()
bsd-3-clause
Python
5261e7b75718b866f95285bd03171c861175dccc
Move question into random_questions function
andrewlrogers/srvy
collection/srvy.py
collection/srvy.py
#!/usr/bin/python import sys import time from time import sleep from datetime import datetime import random import sqlite3 import csv from configparser import ConfigParser try: from gpiozero import Button except ImportError: print("gpiozero is not installed.") pass try: import pygame except ImportError: print("pygame is not installed.") pass # VARIABLES question_csv_location = '../archive/questions.csv' sqlite_file = '../archive/srvy.db' # FUNCTIONS def module_installed(module): if module in sys.modules: return True else: return False def get_current_questions(file_location): """Add each question from a text file to a list. Questions should be separated by newlines.""" with open(file_location, 'r') as csv_file: readCSV = csv.reader(csv_file, delimiter=',', quotechar='"') questions = [] for row in readCSV: if row: question = row[0] questions.append(question) return questions def random_questions(): """pulls returns a random question into main loop.""" question = get_current_questions(question_csv_location) return random.choice(question) def add_response_to_database(question, opinion): """Add response to SQLite 3 database""" conn = sqlite3.connect(sqlite_file) c = conn.cursor() current_date = datetime.now() current_unix_time = time.time() try: c.execute('''INSERT INTO responses (pythonDateTime, unixTime, question, opinion) VALUES (?,?,?,?)''', (current_date, current_unix_time, question, opinion)) print("Successfully added response to database.") print("Thank you!") except Exception as e: print(e) conn.commit() conn.close() main() def main(): qs = random_questions() # calls questions function that returns random question. print(qs) while True: opinion = input("Opinion [y/n]: ") if opinion == "y": sleep(.5) opinion = 1 add_response_to_database(qs, opinion) elif opinion == "n": sleep(.5) opinion = -1 add_response_to_database(qs, opinion) main()
#!/usr/bin/python import sys import time from time import sleep from datetime import datetime import random import sqlite3 import csv from configparser import ConfigParser try: from gpiozero import Button except ImportError: print("gpiozero is not installed.") pass try: import pygame except ImportError: print("pygame is not installed.") pass # VARIABLES question_csv_location = '../archive/questions.csv' sqlite_file = '../archive/srvy.db' # FUNCTIONS def module_installed(module): if module in sys.modules: return True else: return False def get_current_questions(file_location): """Add each question from a text file to a list. Questions should be separated by newlines.""" with open(file_location, 'r') as csv_file: readCSV = csv.reader(csv_file, delimiter=',', quotechar='"') questions = [] for row in readCSV: if row: question = row[0] questions.append(question) return questions def random_questions(): """pulls returns a random question into main loop.""" return random.choice(question) def add_response_to_database(question, opinion): """Add response to SQLite 3 database""" conn = sqlite3.connect(sqlite_file) c = conn.cursor() current_date = datetime.now() current_unix_time = time.time() try: c.execute('''INSERT INTO responses (pythonDateTime, unixTime, question, opinion) VALUES (?,?,?,?)''', (current_date, current_unix_time, question, opinion)) print("Successfully added response to database.") print("Thank you!") except Exception as e: print(e) conn.commit() conn.close() main() def main(): qs = random_questions() # calls questions function that returns random question. print(qs) while True: opinion = input("Opinion [y/n]: ") if opinion == "y": sleep(.5) opinion = 1 add_response_to_database(qs, opinion) elif opinion == "n": sleep(.5) opinion = -1 add_response_to_database(qs, opinion) question = get_current_questions(question_csv_location) main()
mit
Python
93e8e63c3cf8d360af018b6ce3abe224b8ad374c
Add further testinfra tests
betacloud/ansible-docker,betacloud/ansible-docker
molecule/default/tests/test_default.py
molecule/default/tests/test_default.py
def test_apt_preferences_docker_compose_file(host): f = host.file("/etc/apt/preferences.d/docker-compose") assert f.exists assert f.is_file def test_apt_preferences_docker_file(host): f = host.file("/etc/apt/preferences.d/docker") assert f.exists assert f.is_file def test_systemd_overlay_file(host): f = host.file("/etc/systemd/system/docker.service.d/overlay.conf") assert f.exists assert f.is_file def test_limits_file(host): f = host.file("/etc/security/limits.d/docker.conf") assert f.exists assert f.is_file
def test_apt_preferences_docker_compose_file(host): f = host.file("/etc/apt/preferences.d/docker-compose") assert f.exists assert f.is_file def test_apt_preferences_docker_file(host): f = host.file("/etc/apt/preferences.d/docker") assert f.exists assert f.is_file
apache-2.0
Python
8dc853e90b587b9245b87c14f5cb2e93215d3283
Change test_output data structure to dict of dict
barbagroup/pygbe,barbagroup/pygbe,barbagroup/pygbe
tests/convergence_tests/sphere_lspr.py
tests/convergence_tests/sphere_lspr.py
from pygbe.util import an_solution from convergence_lspr import (mesh_ratio, run_convergence, picklesave, pickleload, report_results, mesh) def main(): print('{:-^60}'.format('Running sphere_lspr test')) try: test_outputs = pickleload() except FileNotFoundError: test_outputs = {} problem_folder = 'input_files' # dirichlet_surface param = 'sphere_complex.param' test_name = 'sphere_complex' if test_name not in test_outputs.keys(): N, iterations, expected_rate, Cext_0, Time = run_convergence( mesh, test_name, problem_folder, param) test_outputs[test_name] = {'N': N, 'iterations': iterations, 'expected_rate': expected_rate, 'Cext_0': Cext_0, 'Time': Time} picklesave(test_outputs) # load data for analysis N = test_outputs['sphere_complex']['N'] iterations = test_outputs['sphere_complex']['iterations'] expected_rate = test_outputs['sphere_complex']['expected_rate'] Cext_0 = test_outputs['sphere_complex']['Cext_0'] Time = test_outputs['sphere_complex']['Time'] total_time = Time #This test is for 10 nm radius silver sphere in water, at wavelength 380 nm radius = 10. wavelength = 380. diel_out = 1.7972083599999999 + 1j * 8.504766399999999e-09 #water value extrapolated diel_in = -3.3876520488233184 + 1j * 0.19220746083441781 #silver value extrapolated analytical = an_solution.Cext_analytical(radius, wavelength, diel_out, diel_in) error = abs(Cext_0 - analytical) / abs(analytical) report_results(error, N, expected_rate, iterations, Cext_0, analytical, total_time, test_name='sphere_complex') if __name__ == "__main__": main()
from pygbe.util import an_solution from convergence_lspr import (mesh_ratio, run_convergence, picklesave, pickleload, report_results, mesh) def main(): print('{:-^60}'.format('Running sphere_lspr test')) try: test_outputs = pickleload() except FileNotFoundError: test_outputs = {} problem_folder = 'input_files' # dirichlet_surface param = 'sphere_complex.param' test_name = 'sphere_complex' if test_name not in test_outputs.keys(): N, iterations, expected_rate, Cext_0, Time = run_convergence( mesh, test_name, problem_folder, param) test_outputs[test_name] = [N, iterations, expected_rate, Cext_0, Time] picklesave(test_outputs) # load data for analysis N, iterations, expected_rate = test_outputs['sphere_complex'][:3] Cext_0 = test_outputs['sphere_complex'][3] Time = test_outputs['sphere_complex'][-1] total_time = Time #This test is for 10 nm radius silver sphere in water, at wavelength 380 nm radius = 10. wavelength = 380. diel_out = 1.7972083599999999 + 1j * 8.504766399999999e-09 #water value extrapolated diel_in = -3.3876520488233184 + 1j * 0.19220746083441781 #silver value extrapolated analytical = an_solution.Cext_analytical(radius, wavelength, diel_out, diel_in) error = abs(Cext_0 - analytical) / abs(analytical) report_results(error, N, expected_rate, iterations, Cext_0, analytical, total_time, test_name='sphere_complex') if __name__ == "__main__": main()
bsd-3-clause
Python
0ac869ce67017c9ffb8a8b32ff57346980144371
use global es in reindexers
dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
corehq/ex-submodules/pillowtop/reindexer/reindexer.py
corehq/ex-submodules/pillowtop/reindexer/reindexer.py
from corehq.elastic import get_es_new from pillowtop.es_utils import set_index_reindex_settings, \ set_index_normal_settings, get_index_info_from_pillow, initialize_mapping_if_necessary from pillowtop.pillow.interface import PillowRuntimeContext class PillowReindexer(object): def __init__(self, pillow, change_provider): self.pillow = pillow self.change_provider = change_provider def clean_index(self): """ Cleans the index. This can be called prior to reindex to ensure starting from a clean slate. Should be overridden on a case-by-case basis by subclasses. """ pass def reindex(self, start_from=None): reindexer_context = PillowRuntimeContext(do_set_checkpoint=False) for change in self.change_provider.iter_changes(start_from=start_from): self.pillow.processor(change, reindexer_context) class ElasticPillowReindexer(PillowReindexer): def __init__(self, pillow, change_provider, elasticsearch, index_info): super(ElasticPillowReindexer, self).__init__(pillow, change_provider) self.es = elasticsearch self.index_info = index_info def clean_index(self): if self.es.indices.exists(self.index_info.index): self.es.indices.delete(index=self.index_info.index) def reindex(self, start_from=None): if not start_from: # when not resuming force delete and create the index self._prepare_index_for_reindex() super(ElasticPillowReindexer, self).reindex(start_from) self._prepare_index_for_usage() def _prepare_index_for_reindex(self): if not self.es.indices.exists(self.index_info.index): self.es.indices.create(index=self.index_info.index, body=self.index_info.meta) initialize_mapping_if_necessary(self.es, self.index_info) set_index_reindex_settings(self.es, self.index_info.index) def _prepare_index_for_usage(self): set_index_normal_settings(self.es, self.index_info.index) self.es.indices.refresh(self.index_info.index) def get_default_reindexer_for_elastic_pillow(pillow, change_provider): return ElasticPillowReindexer( pillow=pillow, change_provider=change_provider, elasticsearch=get_es_new(), index_info=get_index_info_from_pillow(pillow), )
from pillowtop.es_utils import set_index_reindex_settings, \ set_index_normal_settings, get_index_info_from_pillow, initialize_mapping_if_necessary from pillowtop.pillow.interface import PillowRuntimeContext class PillowReindexer(object): def __init__(self, pillow, change_provider): self.pillow = pillow self.change_provider = change_provider def clean_index(self): """ Cleans the index. This can be called prior to reindex to ensure starting from a clean slate. Should be overridden on a case-by-case basis by subclasses. """ pass def reindex(self, start_from=None): reindexer_context = PillowRuntimeContext(do_set_checkpoint=False) for change in self.change_provider.iter_changes(start_from=start_from): self.pillow.processor(change, reindexer_context) class ElasticPillowReindexer(PillowReindexer): def __init__(self, pillow, change_provider, elasticsearch, index_info): super(ElasticPillowReindexer, self).__init__(pillow, change_provider) self.es = elasticsearch self.index_info = index_info def clean_index(self): if self.es.indices.exists(self.index_info.index): self.es.indices.delete(index=self.index_info.index) def reindex(self, start_from=None): if not start_from: # when not resuming force delete and create the index self._prepare_index_for_reindex() super(ElasticPillowReindexer, self).reindex(start_from) self._prepare_index_for_usage() def _prepare_index_for_reindex(self): if not self.es.indices.exists(self.index_info.index): self.es.indices.create(index=self.index_info.index, body=self.index_info.meta) initialize_mapping_if_necessary(self.es, self.index_info) set_index_reindex_settings(self.es, self.index_info.index) def _prepare_index_for_usage(self): set_index_normal_settings(self.es, self.index_info.index) self.es.indices.refresh(self.index_info.index) def get_default_reindexer_for_elastic_pillow(pillow, change_provider): return ElasticPillowReindexer( pillow=pillow, change_provider=change_provider, elasticsearch=pillow.get_es_new(), index_info=get_index_info_from_pillow(pillow), )
bsd-3-clause
Python
f5600008defcd5fe4c9c397c0b7170f6f5e9a5e4
Add header info and submodule imports to init
jkitzes/macroeco
__init__.py
__init__.py
__author__ = "Justin Kitzes, Mark Wilber, Chloe Lewis" __copyright__ = "Copyright 2012, Regents of University of California" __credits__ = [] __license__ = "BSD 2-clause" __version__ = "0.1" __maintainer__ = "Justin Kitzes" __email__ = "[email protected]" __status__ = "Development" import compare import data import empirical import output import utils.workflow as workflow
bsd-2-clause
Python
5281d535f67dfa2cebd8f70ee1f342c213d11b29
change filename
benleb/PyGlow,bjornt/PyGlow
__init__.py
__init__.py
from .PyGlow import *
from .pyglow import *
mit
Python
67406893c1b9b727f313a374affe9868ec986fa6
Bump to 2.6.2c1.
pypa/setuptools,pypa/setuptools,pypa/setuptools
__init__.py
__init__.py
"""distutils The main package for the Python Module Distribution Utilities. Normally used from a setup script as from distutils.core import setup setup (...) """ # This module should be kept compatible with Python 2.1. __revision__ = "$Id$" # Distutils version # # Please coordinate with Marc-Andre Lemburg <[email protected]> when adding # new features to distutils that would warrant bumping the version number. # # In general, major and minor version should loosely follow the Python # version number the distutils code was shipped with. # #--start constants-- __version__ = "2.6.2c1" #--end constants--
"""distutils The main package for the Python Module Distribution Utilities. Normally used from a setup script as from distutils.core import setup setup (...) """ # This module should be kept compatible with Python 2.1. __revision__ = "$Id$" # Distutils version # # Please coordinate with Marc-Andre Lemburg <[email protected]> when adding # new features to distutils that would warrant bumping the version number. # # In general, major and minor version should loosely follow the Python # version number the distutils code was shipped with. # #--start constants-- __version__ = "2.6.1" #--end constants--
mit
Python
b913e6d1b4323dbc52fbe2697dc9bf7fa2b80c24
Add Python 2 deprecation warning, closes #1179
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
__init__.py
__init__.py
from __future__ import absolute_import, division, print_function import logging import os import sys import warnings if sys.version_info.major == 2: warnings.warn( "Python 2 is no longer fully supported. Please consider using the DIALS 2.2 release branch. " "For more information on Python 2.7 support please go to https://github.com/dials/dials/issues/1175.", DeprecationWarning, ) logging.getLogger("dials").addHandler(logging.NullHandler()) # Invert FPE trap defaults, https://github.com/cctbx/cctbx_project/pull/324 if "boost.python" in sys.modules: import boost.python boost.python.ext.trap_exceptions( bool(os.getenv("BOOST_ADAPTBX_TRAP_FPE")), bool(os.getenv("BOOST_ADAPTBX_TRAP_INVALID")), bool(os.getenv("BOOST_ADAPTBX_TRAP_OVERFLOW")), ) elif not os.getenv("BOOST_ADAPTBX_TRAP_FPE") and not os.getenv( "BOOST_ADAPTBX_TRAP_OVERFLOW" ): os.environ["BOOST_ADAPTBX_FPE_DEFAULT"] = "1" # Intercept easy_mp exceptions to extract stack traces before they are lost at # the libtbx process boundary/the easy_mp API. In the case of a subprocess # crash we print the subprocess stack trace, which will be most useful for # debugging parallelized sections of DIALS code. import libtbx.scheduling.stacktrace as _lss def _stacktrace_tracer(error, trace, intercepted_call=_lss.set_last_exception): """Intercepts and prints ephemeral stacktraces.""" if error and trace: print( "\n\neasy_mp crash detected; subprocess trace: ----\n%s%s\n%s\n\n" % ("".join(trace), error, "-" * 46) ) return intercepted_call(error, trace) if _lss.set_last_exception.__doc__ != _stacktrace_tracer.__doc__: # ensure function is only redirected once _lss.set_last_exception = _stacktrace_tracer
from __future__ import absolute_import, division, print_function import logging import os import sys logging.getLogger("dials").addHandler(logging.NullHandler()) # Invert FPE trap defaults, https://github.com/cctbx/cctbx_project/pull/324 if "boost.python" in sys.modules: import boost.python boost.python.ext.trap_exceptions( bool(os.getenv("BOOST_ADAPTBX_TRAP_FPE")), bool(os.getenv("BOOST_ADAPTBX_TRAP_INVALID")), bool(os.getenv("BOOST_ADAPTBX_TRAP_OVERFLOW")), ) elif not os.getenv("BOOST_ADAPTBX_TRAP_FPE") and not os.getenv( "BOOST_ADAPTBX_TRAP_OVERFLOW" ): os.environ["BOOST_ADAPTBX_FPE_DEFAULT"] = "1" # Intercept easy_mp exceptions to extract stack traces before they are lost at # the libtbx process boundary/the easy_mp API. In the case of a subprocess # crash we print the subprocess stack trace, which will be most useful for # debugging parallelized sections of DIALS code. import libtbx.scheduling.stacktrace as _lss def _stacktrace_tracer(error, trace, intercepted_call=_lss.set_last_exception): """Intercepts and prints ephemeral stacktraces.""" if error and trace: print( "\n\neasy_mp crash detected; subprocess trace: ----\n%s%s\n%s\n\n" % ("".join(trace), error, "-" * 46) ) return intercepted_call(error, trace) if _lss.set_last_exception.__doc__ != _stacktrace_tracer.__doc__: # ensure function is only redirected once _lss.set_last_exception = _stacktrace_tracer
bsd-3-clause
Python
9d085f0478ca55b59390515c82ca3e367cef5522
Replace Bootstrap's html5shiv with es5-shim.
peterhil/ninhursag,peterhil/ninhursag,peterhil/skeleton,peterhil/skeleton,peterhil/skeleton,peterhil/ninhursag,peterhil/ninhursag
app/assets.py
app/assets.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from flask.ext.assets import Environment, Bundle css_application = Bundle( 'less/main.less', filters='less', debug=False, output='gen/app.css' ) css_all = Bundle( # 'vendor/some/library.css', css_application, filters='cssmin', output='gen/app.min.css' ) js_vendor = Bundle( 'vendor/jquery/dist/jquery.js', 'vendor/angular/angular.js', 'vendor/angular-animate/angular-animate.js', 'vendor/angular-aria/angular-aria.js', 'vendor/angular-cookies/angular-cookies.js', 'vendor/angular-messages/angular-messages.js', 'vendor/angular-resource/angular-resource.js', 'vendor/angular-route/angular-route.js', 'vendor/angular-sanitize/angular-sanitize.js', 'vendor/angular-touch/angular-touch.js', 'vendor/bootstrap/dist/js/bootstrap.js', 'vendor/lodash/dist/lodash.js', # 'vendor/modernizr/dist/modernizr-build.js', # TODO Customize this filters='uglifyjs', output='gen/vendor.min.js' ) js_ie = Bundle( 'vendor/es5-shim/es5-shim.js', filters='uglifyjs', output='gen/ie.min.js' ) js_main = Bundle( 'libs/ba-debug.js', Bundle( 'coffee/app.coffee', 'coffee/init.coffee', # Must be loaded after app.coffee but before anything else. 'coffee/notify.coffee', 'scripts/app.coffee', 'scripts/service/config.coffee', 'scripts/controllers/listing.coffee', filters='coffeescript', output='gen/app.js' ), filters='uglifyjs', output='gen/app.min.js' ) def init_app(app): webassets = Environment(app) webassets.register('css_all', css_all) webassets.register('js_vendor', js_vendor) webassets.register('js_ie', js_ie) webassets.register('js_main', js_main) webassets.manifest = 'cache' if not app.debug else False webassets.cache = not app.debug webassets.debug = app.debug
#!/usr/bin/env python # -*- coding: utf-8 -*- from flask.ext.assets import Environment, Bundle css_application = Bundle( 'less/main.less', filters='less', debug=False, output='gen/app.css' ) css_all = Bundle( # 'vendor/some/library.css', css_application, filters='cssmin', output='gen/app.min.css' ) js_vendor = Bundle( 'vendor/jquery/dist/jquery.js', 'vendor/angular/angular.js', 'vendor/angular-animate/angular-animate.js', 'vendor/angular-aria/angular-aria.js', 'vendor/angular-cookies/angular-cookies.js', 'vendor/angular-messages/angular-messages.js', 'vendor/angular-resource/angular-resource.js', 'vendor/angular-route/angular-route.js', 'vendor/angular-sanitize/angular-sanitize.js', 'vendor/angular-touch/angular-touch.js', 'vendor/bootstrap/dist/js/bootstrap.js', 'vendor/lodash/dist/lodash.js', # 'vendor/modernizr/dist/modernizr-build.js', # TODO Customize this filters='uglifyjs', output='gen/vendor.min.js' ) js_ie = Bundle( 'vendor/bootstrap/assets/js/html5shiv.js', 'vendor/bootstrap/assets/js/respond.min.js', filters='uglifyjs', output='gen/ie.min.js' ) js_main = Bundle( 'libs/ba-debug.js', Bundle( 'coffee/app.coffee', 'coffee/init.coffee', # Must be loaded after app.coffee but before anything else. 'coffee/notify.coffee', 'scripts/app.coffee', 'scripts/service/config.coffee', 'scripts/controllers/listing.coffee', filters='coffeescript', output='gen/app.js' ), filters='uglifyjs', output='gen/app.min.js' ) def init_app(app): webassets = Environment(app) webassets.register('css_all', css_all) webassets.register('js_vendor', js_vendor) webassets.register('js_ie', js_ie) webassets.register('js_main', js_main) webassets.manifest = 'cache' if not app.debug else False webassets.cache = not app.debug webassets.debug = app.debug
mit
Python
12c57f6b785167c4f9e6427520360ce64d845e96
Fix documentation links in Edward2 docstring.
tensorflow/probability,tensorflow/probability
tensorflow_probability/python/experimental/edward2/__init__.py
tensorflow_probability/python/experimental/edward2/__init__.py
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Edward2 probabilistic programming language. For user guides, see: + [Overview]( https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/experimental/edward2/README.md) + [Upgrading from Edward to Edward2]( https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/experimental/edward2/Upgrading_From_Edward_To_Edward2.md) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=wildcard-import from tensorflow_probability.python.experimental.edward2.generated_random_variables import * from tensorflow_probability.python.experimental.edward2.generated_random_variables import as_random_variable from tensorflow_probability.python.experimental.edward2.generated_random_variables import rv_dict from tensorflow_probability.python.experimental.edward2.interceptor import get_next_interceptor from tensorflow_probability.python.experimental.edward2.interceptor import interceptable from tensorflow_probability.python.experimental.edward2.interceptor import interception from tensorflow_probability.python.experimental.edward2.interceptor import tape from tensorflow_probability.python.experimental.edward2.program_transformations import make_log_joint_fn from tensorflow_probability.python.experimental.edward2.program_transformations import make_value_setter from tensorflow_probability.python.experimental.edward2.random_variable import RandomVariable # pylint: enable=wildcard-import from tensorflow.python.util.all_util import remove_undocumented _allowed_symbols = list(rv_dict.keys()) + [ "RandomVariable", "as_random_variable", "interception", "get_next_interceptor", "interceptable", "make_log_joint_fn", "make_value_setter", "tape", ] remove_undocumented(__name__, _allowed_symbols)
# Copyright 2018 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Edward2 probabilistic programming language. For user guides, see: + [Overview]( https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/edward2/README.md) + [Upgrading from Edward to Edward2]( https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/edward2/Upgrading_From_Edward_To_Edward2.md) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=wildcard-import from tensorflow_probability.python.experimental.edward2.generated_random_variables import * from tensorflow_probability.python.experimental.edward2.generated_random_variables import as_random_variable from tensorflow_probability.python.experimental.edward2.generated_random_variables import rv_dict from tensorflow_probability.python.experimental.edward2.interceptor import get_next_interceptor from tensorflow_probability.python.experimental.edward2.interceptor import interceptable from tensorflow_probability.python.experimental.edward2.interceptor import interception from tensorflow_probability.python.experimental.edward2.interceptor import tape from tensorflow_probability.python.experimental.edward2.program_transformations import make_log_joint_fn from tensorflow_probability.python.experimental.edward2.program_transformations import make_value_setter from tensorflow_probability.python.experimental.edward2.random_variable import RandomVariable # pylint: enable=wildcard-import from tensorflow.python.util.all_util import remove_undocumented _allowed_symbols = list(rv_dict.keys()) + [ "RandomVariable", "as_random_variable", "interception", "get_next_interceptor", "interceptable", "make_log_joint_fn", "make_value_setter", "tape", ] remove_undocumented(__name__, _allowed_symbols)
apache-2.0
Python
5b8725caecd01ccb4d0d3e0c40b910cbdf19258b
Fix country form input
Dbastos1710/Spotify_Youtube
spotify_country_top_n.py
spotify_country_top_n.py
import requests import webbrowser import json import urllib.request import urllib.parse import re token = "Bearer " + input("OAuth Token: ") #BQDWxOubOFzx8fjeDi9E3Npt_fd9GiGXVgdiC3tS9LWHgajM3dRe2w3DjVVtjv0ZgHZAKt6zw2cD9PEBcLf-TFxtpOnb89THvPNMH-gbAO9Ho_8eSchxzO7JdaQ1Rg6eLBmzGIPjUp-5NM9Umpk62uKuAwPw7kSB0fb_B1uYdR4YkztfMsW5_OwXJukHyN0Cp2ztHR5V4_-5oFlHuTfPmyDcKZK8yreVwFUZuYB_VMPe_4pNhmu3PwlcePsKel9irRRsw41ly0mk1FcL3XFFHHXMHBHblYEu7hSccB8sqecdVZD9-w7PdcYS" headers = { 'Accept': 'application/json', 'Authorization': token} params = { 'country': input("Country in ISO 3166-1 alpha-2 form: "), 'limit': input("Maximum number of tracks: "), 'offset' : input("Mininum number of tracks: ") } r = requests.get('https://api.spotify.com/v1/browse/new-releases', headers=headers, params = params) print_json = r.json() albums_name = [] for i in range(int(params['offset']), int(params['limit'])): a = print_json['albums']['items'][i]['name'] albums_name.append(a) def youtube(s): query_string = urllib.parse.urlencode({"search_query" : s}) html_content = urllib.request.urlopen("http://www.youtube.com/results?" + query_string) search_results = re.findall(r'href=\"\/watch\?v=(.{11})', html_content.read().decode()) return("http://www.youtube.com/watch?v=" + search_results[0]) for i in albums_name: webbrowser.open(youtube(i))
import requests import webbrowser import json import urllib.request import urllib.parse import re token = "Bearer " + input("OAuth Token: ") #BQDWxOubOFzx8fjeDi9E3Npt_fd9GiGXVgdiC3tS9LWHgajM3dRe2w3DjVVtjv0ZgHZAKt6zw2cD9PEBcLf-TFxtpOnb89THvPNMH-gbAO9Ho_8eSchxzO7JdaQ1Rg6eLBmzGIPjUp-5NM9Umpk62uKuAwPw7kSB0fb_B1uYdR4YkztfMsW5_OwXJukHyN0Cp2ztHR5V4_-5oFlHuTfPmyDcKZK8yreVwFUZuYB_VMPe_4pNhmu3PwlcePsKel9irRRsw41ly0mk1FcL3XFFHHXMHBHblYEu7hSccB8sqecdVZD9-w7PdcYS" headers = { 'Accept': 'application/json', 'Authorization': token} params = { 'country': input("Country: "), 'limit': input("Maximum number of tracks: "), 'offset' : input("Mininum number of tracks: ") } r = requests.get('https://api.spotify.com/v1/browse/new-releases', headers=headers, params = params) print_json = r.json() albums_name = [] for i in range(int(params['offset']), int(params['limit'])): a = print_json['albums']['items'][i]['name'] albums_name.append(a) def youtube(s): query_string = urllib.parse.urlencode({"search_query" : s}) html_content = urllib.request.urlopen("http://www.youtube.com/results?" + query_string) search_results = re.findall(r'href=\"\/watch\?v=(.{11})', html_content.read().decode()) return("http://www.youtube.com/watch?v=" + search_results[0]) for i in albums_name: webbrowser.open(youtube(i))
mit
Python
a1052c02a11539d34a7c12c7a86d103c2b445b52
Fix and improve BRIEF example
vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,Britefury/scikit-image,rjeli/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,Midafi/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,juliusbierk/scikit-image,rjeli/scikit-image,SamHames/scikit-image,dpshelio/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,emon10005/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,blink1073/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,Britefury/scikit-image,bsipocz/scikit-image,pratapvardhan/scikit-image,bennlich/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,jwiggins/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,jwiggins/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,bennlich/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,robintw/scikit-image,chintak/scikit-image,dpshelio/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,SamHames/scikit-image,emon10005/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,warmspringwinds/scikit-image,Hiyorimi/scikit-image,youprofit/scikit-image,newville/scikit-image,ClinicalGraphics/scikit-image,youprofit/scikit-image,oew1v07/scikit-image,ajaybhat/scikit-image,newville/scikit-image
doc/examples/plot_brief.py
doc/examples/plot_brief.py
""" ======================= BRIEF binary descriptor ======================= This example demonstrates the BRIEF binary description algorithm. The descriptor consists of relatively few bits and can be computed using a set of intensity difference tests. The short binary descriptor results in low memory footprint and very efficient matching based on the Hamming distance metric. BRIEF does not provide rotation-invariance. Scale-invariance can be achieved by detecting and extracting features at different scales. """ from skimage import data from skimage import transform as tf from skimage.feature import (match_descriptors, corner_peaks, corner_harris, plot_matches, BRIEF) from skimage.color import rgb2gray import matplotlib.pyplot as plt img1 = rgb2gray(data.lena()) tform = tf.AffineTransform(scale=(1.2, 1.2), translation=(0, -100)) img2 = tf.warp(img1, tform) img3 = tf.rotate(img1, 25) keypoints1 = corner_peaks(corner_harris(img1), min_distance=5) keypoints2 = corner_peaks(corner_harris(img2), min_distance=5) keypoints3 = corner_peaks(corner_harris(img3), min_distance=5) extractor = BRIEF() extractor.extract(img1, keypoints1) keypoints1 = keypoints1[extractor.mask_] descriptors1 = extractor.descriptors_ extractor.extract(img2, keypoints2) keypoints2 = keypoints2[extractor.mask_] descriptors2 = extractor.descriptors_ extractor.extract(img3, keypoints3) keypoints3 = keypoints3[extractor.mask_] descriptors3 = extractor.descriptors_ matches12 = match_descriptors(descriptors1, descriptors2, cross_check=True) matches13 = match_descriptors(descriptors1, descriptors3, cross_check=True) fig, ax = plt.subplots(nrows=2, ncols=1) plt.gray() plot_matches(ax[0], img1, img2, keypoints1, keypoints2, matches12) ax[0].axis('off') plot_matches(ax[1], img1, img3, keypoints1, keypoints3, matches13) ax[1].axis('off') plt.show()
""" ======================= BRIEF binary descriptor ======================= This example demonstrates the BRIEF binary description algorithm. The descriptor consists of relatively few bits and can be computed using a set of intensity difference tests. The short binary descriptor results in low memory footprint and very efficient matching based on the Hamming distance metric. However, BRIEF does not provide rotation-invariance and scale-invariance can be achieved by detecting and extracting features at different scales. The ORB feature detection and binary description algorithm is an extension to the BRIEF method and provides rotation and scale-invariance, see `skimage.feature.ORB`. """ from skimage import data from skimage import transform as tf from skimage.feature import (match_descriptors, corner_peaks, corner_harris, plot_matches, BRIEF) from skimage.color import rgb2gray import matplotlib.pyplot as plt img1 = rgb2gray(data.lena()) tform = tf.AffineTransform(scale=(1.2, 1.2), translation=(0, -100)) img2 = tf.warp(img1, tform) img3 = tf.rotate(img1, 25) keypoints1 = corner_peaks(corner_harris(img1), min_distance=5) keypoints2 = corner_peaks(corner_harris(img2), min_distance=5) keypoints3 = corner_peaks(corner_harris(img3), min_distance=5) extractor = BRIEF() extractor.extract(img1, keypoints1) keypoints1 = keypoints1[extractor.mask_] descriptors1 = extractor.descriptors_ extractor.extract(img2, keypoints2) keypoints2 = keypoints2[extractor.mask_] descriptors2 = extractor.descriptors_ extractor.extract(img3, keypoints3) keypoints3 = keypoints3[extractor.mask_] descriptors3 = extractor.descriptors_ matches12 = match_descriptors(descriptors1, descriptors2, cross_check=True) matches13 = match_descriptors(descriptors1, descriptors3, cross_check=True) fig, ax = plt.subplots(nrows=2, ncols=1) plt.gray() plot_matches(ax[0], img1, img2, keypoints1, keypoints2, matches12) ax[0].axis('off') plot_matches(ax[1], img1, img3, keypoints1, keypoints3, matches13) ax[1].axis('off') plt.show()
bsd-3-clause
Python
284befadcfd3e4785067d827c67958d01b80d4a2
fix method name (underscore prefix)
PaintScratcher/perfrunner,vmx/perfrunner,EricACooper/perfrunner,vmx/perfrunner,pavel-paulau/perfrunner,dkao-cb/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,thomas-couchbase/perfrunner,dkao-cb/perfrunner,mikewied/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,mikewied/perfrunner,thomas-couchbase/perfrunner,hsharsha/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,hsharsha/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,PaintScratcher/perfrunner,EricACooper/perfrunner
perfrunner/tests/rebalance.py
perfrunner/tests/rebalance.py
import time from perfrunner.tests import PerfTest from multiprocessing import Event def with_delay(method): def wrapper(self, *args, **kwargs): time.sleep(self.rebalance_settings.start_after) method(self, *args, **kwargs) time.sleep(self.rebalance_settings.stop_after) self.shutdown_event.set() return wrapper class RebalanceTest(PerfTest): def __init__(self, *args, **kwargs): super(RebalanceTest, self).__init__(*args, **kwargs) self.shutdown_event = Event() self.rebalance_settings = self.test_config.get_rebalance_settings() @with_delay def rebalance_in(self): for cluster in self.cluster_spec.get_clusters(): master = cluster[0] known_nodes = cluster[:self.rebalance_settings.nodes_after] ejected_nodes = [] self.rest.rebalance(master, known_nodes, ejected_nodes) self.monitor.monitor_rebalance(master) class StaticRebalanceTest(RebalanceTest): def run(self): self._run_load_phase() self._compact_bucket() self.reporter.start() self.rebalance_in() value = self.reporter.finish('Rebalance') self.reporter.post_to_sf(self, value) self._debug()
import time from perfrunner.tests import PerfTest from multiprocessing import Event def with_delay(method): def wrapper(self, *args, **kwargs): time.sleep(self.rebalance_settings.start_after) method(self, *args, **kwargs) time.sleep(self.rebalance_settings.stop_after) self.shutdown_event.set() return wrapper class RebalanceTest(PerfTest): def __init__(self, *args, **kwargs): super(RebalanceTest, self).__init__(*args, **kwargs) self.shutdown_event = Event() self.rebalance_settings = self.test_config.get_rebalance_settings() @with_delay def rebalance_in(self): for cluster in self.cluster_spec.get_clusters(): master = cluster[0] known_nodes = cluster[:self.rebalance_settings.nodes_after] ejected_nodes = [] self.rest.rebalance(master, known_nodes, ejected_nodes) self.monitor.monitor_rebalance(master) class StaticRebalanceTest(RebalanceTest): def _run(self): self._run_load_phase() self._compact_bucket() self.reporter.start() self.rebalance_in() value = self.reporter.finish('Rebalance') self.reporter.post_to_sf(self, value) self._debug()
apache-2.0
Python
624745fbb311877f5b2251cf3eefe0cc15b5cca2
add some code for document
markshao/pagrant
pagrant/commands/test.py
pagrant/commands/test.py
#!/usr/bin/python #coding:utf8 __author__ = ['markshao'] import os from optparse import Option from nose import main from pagrant.basecommand import Command from pagrant.commands.init import PAGRANT_CONFIG_FILE_NAME from pagrant.environment import Environment from pagrant.exceptions import PagrantConfigError, TestError class TestCommand(Command): name = "test" usage = """%prog [options] [nose-options]""" summary = "execute the test suites|cases with the options" def __init__(self): super(TestCommand, self).__init__() self.parser.add_option(Option( # Writes the log levels explicitely to the log' '--newvm', dest='newvm', action='store_true', default=False, help="if set --newvm , the test will fisrt create the new vm against " \ "the Pagrantfile and destroy them after test" )) self.environment = None def run(self, args): if not os.path.exists(PAGRANT_CONFIG_FILE_NAME): raise PagrantConfigError( "The Pagrantfile should exist in the current folder , have to stop the test case execution") # validate the Pagrantfile config self.environment = Environment(os.path.abspath(PAGRANT_CONFIG_FILE_NAME), self.logger) # deal with the parameter options, nose_args = self.parser.parse_args(args) if options.newvm: self.logger.warn("start init the virtual environment for the test execution") self.environment.create_machines() self.environment.start_machines() self.logger.warn("finish init the virtual environment for the test execution") # the init is always needed self.environment.init_test_context() try: main(nose_args) except Exception, e: raise TestError(e.message) finally: if options.newvm: self.environment.stop_machines() self.environment.destroy_machines()
#!/usr/bin/python #coding:utf8 __author__ = ['markshao'] import os from optparse import Option from nose import main from pagrant.basecommand import Command from pagrant.commands.init import PAGRANT_CONFIG_FILE_NAME from pagrant.environment import Environment from pagrant.exceptions import PagrantConfigError, TestError class TestCommand(Command): name = "test" usage = """%prog [options] """ summary = "execute the test suites|cases with the options" def __init__(self): super(TestCommand, self).__init__() self.parser.add_option(Option( # Writes the log levels explicitely to the log' '--newvm', dest='newvm', action='store_true', default=False, )) self.environment = None def run(self, args): if not os.path.exists(PAGRANT_CONFIG_FILE_NAME): raise PagrantConfigError( "The Pagrantfile should exist in the current folder , have to stop the test case execution") # validate the Pagrantfile config self.environment = Environment(os.path.abspath(PAGRANT_CONFIG_FILE_NAME), self.logger) # deal with the parameter options, nose_args = self.parser.parse_args(args) if options.newvm: self.logger.warn("start init the virtual environment for the test execution") self.environment.create_machines() self.environment.start_machines() self.logger.warn("finish init the virtual environment for the test execution") # the init is always needed self.environment.init_test_context() try: main(nose_args) except Exception, e: raise TestError(e.message) finally: if options.newvm: self.environment.stop_machines() self.environment.destroy_machines()
mit
Python
f3517847990f2007956c319a7784dbfc2d73b91a
Remove formatting
jmcs/ellipsis
ellipsis.py
ellipsis.py
#!/usr/bin/python3 # -*- coding: utf-8 -*- import os import subprocess import sys cwd = os.getcwd() home = os.getenv('HOME') devnull = open(os.devnull, 'w') def find_svn_root(path): try: svn_cmd = ['/usr/bin/svn', 'info'] svn_info = subprocess.check_output(svn_cmd, stderr=devnull).decode() info = dict() for line in svn_info.splitlines(): if ':' in line: key, value = line.split(':', maxsplit=1) info[key]=value.strip() return info.get('Working Copy Root Path') except: return False def find_git_root(path): try: git_cmd = ['/usr/bin/git', 'rev-parse', '--show-toplevel'] git_root = subprocess.check_output(git_cmd, stderr=devnull) git_root = git_root[:-1] # remove new_line return git_root.decode() except: return False git_root = find_git_root(cwd) svn_root = find_svn_root(cwd) if git_root: repo_name = os.path.split(git_root)[-1] cwd = cwd.replace(git_root, repo_name) elif svn_root: repo_name = svn_root.split('/')[-1] cwd = cwd.replace(svn_root, repo_name) elif cwd.startswith(home): cwd = cwd.replace(home,'~') components = cwd.split('/') if len(components) > 3: first = components[0] last = components[-1] cwd = "{}/…/{}".format(first, last) print(cwd)
#!/usr/bin/python3 # -*- coding: utf-8 -*- import os import subprocess import sys cwd = os.getcwd() home = os.getenv('HOME') devnull = open(os.devnull, 'w') def find_svn_root(path): try: svn_cmd = ['/usr/bin/svn', 'info'] svn_info = subprocess.check_output(svn_cmd, stderr=devnull).decode() info = dict() for line in svn_info.splitlines(): if ':' in line: key, value = line.split(':', maxsplit=1) info[key]=value.strip() return info.get('Working Copy Root Path') except: return False def find_git_root(path): try: git_cmd = ['/usr/bin/git', 'rev-parse', '--show-toplevel'] git_root = subprocess.check_output(git_cmd, stderr=devnull) git_root = git_root[:-1] # remove new_line return git_root.decode() except: return False git_root = find_git_root(cwd) svn_root = find_svn_root(cwd) if git_root: repo_name = os.path.split(git_root)[-1] git_tag = "\033[1;31m{0}\033[1;37m".format(repo_name) cwd = cwd.replace(git_root, repo_name) elif svn_root: repo_name = svn_root.split('/')[-1] svn_tag = "\033[1;34m{0}\033[1;37m".format(repo_name) cwd = cwd.replace(svn_root, svn_tag) elif cwd.startswith(home): cwd = cwd.replace(home,'~') components = cwd.split('/') if len(components) > 3: first = components[0] last = components[-1] cwd = "{}/…/{}".format(first, last) print("\033[1;37m{cwd}\033[0m".format(cwd=cwd))
mit
Python
c9ed81608ea6d017dbe23e012d0e137c1ce9ef10
remove eddy from test
kaczmarj/neurodocker,kaczmarj/neurodocker
neurodocker/interfaces/tests/test_fsl.py
neurodocker/interfaces/tests/test_fsl.py
"""Tests for neurodocker.interfaces.FSL""" from neurodocker.interfaces.tests import utils class TestFSL(object): def test_docker(self): specs = { 'pkg_manager': 'yum', 'instructions': [ ('base', 'centos:7'), ('fsl', {'version': '5.0.11'}), ('user', 'neuro'), ] } bash_test_file = "test_fsl.sh" utils.test_docker_container_from_specs( specs=specs, bash_test_file=bash_test_file) def test_singularity(self): specs = { 'pkg_manager': 'yum', 'instructions': [ ('base', 'docker://centos:7'), ('fsl', {'version': '5.0.10', 'eddy_5011': True}), ('user', 'neuro'), ] } utils.test_singularity_container_from_specs(specs=specs)
"""Tests for neurodocker.interfaces.FSL""" from neurodocker.interfaces.tests import utils class TestFSL(object): def test_docker(self): specs = { 'pkg_manager': 'yum', 'instructions': [ ('base', 'centos:7'), ('fsl', {'version': '5.0.10', 'eddy_5011': True}), ('user', 'neuro'), ] } bash_test_file = "test_fsl.sh" utils.test_docker_container_from_specs( specs=specs, bash_test_file=bash_test_file) def test_singularity(self): specs = { 'pkg_manager': 'yum', 'instructions': [ ('base', 'docker://centos:7'), ('fsl', {'version': '5.0.10', 'eddy_5011': True}), ('user', 'neuro'), ] } utils.test_singularity_container_from_specs(specs=specs)
apache-2.0
Python
4824b929bfa7a18a9f7796a9b93ad17909feeb56
Switch parameter has been added.
jbosboom/streamjit,jbosboom/streamjit
lib/opentuner/streamjit/sjparameters.py
lib/opentuner/streamjit/sjparameters.py
import deps #fix sys.path import opentuner from opentuner.search.manipulator import (IntegerParameter, FloatParameter, SwitchParameter) class sjIntegerParameter(IntegerParameter): def __init__(self, name, min, max,value, javaClassPath = None, **kwargs): self.value = value self.javaClassPath = javaClassPath super(sjIntegerParameter, self).__init__(name, min, max, **kwargs) def getValue(self): return self.value def getJavaClassPath(self): return self.javaClassPath class sjFloatParameter(FloatParameter): def __init__(self, name, min, max,value, javaClassPath = None, **kwargs): self.value = value self.javaClassPath = javaClassPath super(sjIntegerParameter, self).__init__(name, min, max, **kwargs) def getValue(self): return self.value def getJavaClassPath(self): return self.javaClassPath class sjSwitchParameter(SwitchParameter): def __init__(self, name, universeType, universe,value, javaClassPath = None, **kwargs): self.value = value self.javaClassPath = javaClassPath self.universeType = universeType self.universe = universe super(sjSwitchParameter, self).__init__(name, len(universe), **kwargs) def getValue(self): return self.value def getJavaClassPath(self): return self.javaClassPath def getUniverse(self): return self.universe def getUniverseType(self): return self.universeType if __name__ == '__main__': ip = IntegerParameter("suman", 2, 7) sjip = sjIntegerParameter("ss", 3, 56, 45) sjsw = sjSwitchParameter('sjswtch', 'java.lang.Integer', [1, 2, 3, 4], 2, 'edu.mit.streamjit.impl.common.Configuration$SwitchParameter') print sjsw.getUniverse() print sjip.getValue()
import deps #fix sys.path import opentuner from opentuner.search.manipulator import (IntegerParameter, FloatParameter) class sjIntegerParameter(IntegerParameter): def __init__(self, name, min, max,value, javaClassPath = None, **kwargs): self.value = value self.javaClassPath = javaClassPath super(sjIntegerParameter, self).__init__(name, min, max, **kwargs) def getValue(self): return self.value def getJavaClassPath(self): return self.javaClassPath class sjFloatParameter(FloatParameter): def __init__(self, name, min, max,value, javaClassPath = None, **kwargs): self.value = value self.javaClassPath = javaClassPath super(sjIntegerParameter, self).__init__(name, min, max, **kwargs) def getValue(self): return self.value def getJavaClassPath(self): return self.javaClassPath if __name__ == '__main__': ip = IntegerParameter("suman", 2, 7) sjip = sjIntegerParameter("ss", 3, 56, 45) print sjip.getValue()
mit
Python
531dcc85b3579712ab5576a50e7dd10457444fb4
remove old class definitions
TUW-GEO/ecmwf_models
ecmwf_models/__init__.py
ecmwf_models/__init__.py
import pkg_resources try: __version__ = pkg_resources.get_distribution(__name__).version except: __version__ = 'unknown'
import pkg_resources try: __version__ = pkg_resources.get_distribution(__name__).version except: __version__ = 'unknown' from ecmwf_models.interface import ERAInterimImg from ecmwf_models.interface import ERAInterimDs
mit
Python
977cf58125a204010197c95827457843503e2c5b
Disable BSF Campus for RCA Alliance Française
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
ideascube/conf/kb_rca_alliancefrancaise.py
ideascube/conf/kb_rca_alliancefrancaise.py
# -*- coding: utf-8 -*- """KoomBook conf""" from .kb import * # noqa LANGUAGE_CODE = 'fr' IDEASCUBE_NAME = 'Alliance française de Bangui' # Disable BSF Campus for now HOME_CARDS = [card for card in HOME_CARDS if card['id'] != 'bsfcampus']
# -*- coding: utf-8 -*- """KoomBook conf""" from .kb import * # noqa LANGUAGE_CODE = 'fr' IDEASCUBE_NAME = 'Alliance française de Bangui'
agpl-3.0
Python
5ba36cb51fc2d93dd05430c3f5a0d24262b32985
Remove unnecessary type change
oreilly-japan/deep-learning-from-scratch
common/gradient.py
common/gradient.py
# coding: utf-8 import numpy as np def _numerical_gradient_1d(f, x): h = 1e-4 # 0.0001 grad = np.zeros_like(x) for idx in range(x.size): tmp_val = x[idx] x[idx] = float(tmp_val) + h fxh1 = f(x) # f(x+h) x[idx] = tmp_val - h fxh2 = f(x) # f(x-h) grad[idx] = (fxh1 - fxh2) / (2*h) x[idx] = tmp_val # 値を元に戻す return grad def numerical_gradient_2d(f, X): if X.ndim == 1: return _numerical_gradient_1d(f, X) else: grad = np.zeros_like(X) for idx, x in enumerate(X): grad[idx] = _numerical_gradient_1d(f, x) return grad def numerical_gradient(f, x): h = 1e-4 # 0.0001 grad = np.zeros_like(x) it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite']) while not it.finished: idx = it.multi_index tmp_val = x[idx] x[idx] = tmp_val + h fxh1 = f(x) # f(x+h) x[idx] = tmp_val - h fxh2 = f(x) # f(x-h) grad[idx] = (fxh1 - fxh2) / (2*h) x[idx] = tmp_val # 値を元に戻す it.iternext() return grad
# coding: utf-8 import numpy as np def _numerical_gradient_1d(f, x): h = 1e-4 # 0.0001 grad = np.zeros_like(x) for idx in range(x.size): tmp_val = x[idx] x[idx] = float(tmp_val) + h fxh1 = f(x) # f(x+h) x[idx] = tmp_val - h fxh2 = f(x) # f(x-h) grad[idx] = (fxh1 - fxh2) / (2*h) x[idx] = tmp_val # 値を元に戻す return grad def numerical_gradient_2d(f, X): if X.ndim == 1: return _numerical_gradient_1d(f, X) else: grad = np.zeros_like(X) for idx, x in enumerate(X): grad[idx] = _numerical_gradient_1d(f, x) return grad def numerical_gradient(f, x): h = 1e-4 # 0.0001 grad = np.zeros_like(x) it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite']) while not it.finished: idx = it.multi_index tmp_val = x[idx] x[idx] = float(tmp_val) + h fxh1 = f(x) # f(x+h) x[idx] = tmp_val - h fxh2 = f(x) # f(x-h) grad[idx] = (fxh1 - fxh2) / (2*h) x[idx] = tmp_val # 値を元に戻す it.iternext() return grad
mit
Python
ab42cc26e8a994974cb5beb1550715e6f838d7cb
fix file outputting
Treeki/network-q-formats
BUN/bunpack.py
BUN/bunpack.py
#!/usr/bin/env python import struct, sys from PIL import Image u8 = struct.Struct('<B') def convert_palette(pal): result = [] s = struct.Struct('<BBB') for i in range(len(pal) // 3): result.append(s.unpack_from(pal, i * 3)) return result def extract_image(data, offset, pal): width = u8.unpack_from(data, offset + 2)[0] height = u8.unpack_from(data, offset + 3)[0] if width == 0 or height == 0: return None colours = convert_palette(pal) img = Image.new('RGB', (width,height)) pix = img.load() offset += 4 for y in range(height): x = 0 while True: block_width = u8.unpack_from(data, offset)[0] if block_width == 0: # end of row offset += 1 break spacing_before = u8.unpack_from(data, offset + 1)[0] offset += 2 x += spacing_before for _ in range(block_width): index = u8.unpack_from(data, offset + 1)[0] pix[x,y] = colours[index] x += 1 offset += 1 return img def main(argv): import argparse, string import os.path parser = argparse.ArgumentParser(description='Converts Network Q .BUN files into .PNG') parser.add_argument('input', metavar='infile', type=str, nargs=1, help='the input file (.BUN)') parser.add_argument('-p', '--pal', type=str, help='optional palette file (.PAL)') args = parser.parse_args() path,ext = os.path.splitext(args.input[0]) try: palpath = args.input[1] except: palpath = path if ext != '.BUN': print('File does not have .BUN extension!') return filename = os.path.split(path)[1] try: f = open(path + '.BUN', 'rb') except IOError as e: print('Unable to open BUN file!') return else: data = f.read() f.close() try: f = open(palpath + '.PAL', 'rb') except IOError as e: print('Unable to open PAL file!') return else: pal_data = f.read() f.close() # read the file header (list of offsets) first_offset = struct.unpack_from('<I', data, 0)[0] image_count = first_offset // 4 offsets = [] for i in range(image_count): offsets.append(struct.unpack_from('<I', data, i * 4)[0]) if not os.path.exists(filename): try: os.mkdir(filename) except: print('Unable to create path ' + filename + '!') return for i, offset in enumerate(offsets): img = extract_image(data, offset, pal_data) if img is not None: img.save(filename + '/%d.png' % i) if __name__ == "__main__": main(sys.argv[1:])
#!/usr/bin/env python import struct, sys from PIL import Image u8 = struct.Struct('<B') def convert_palette(pal): result = [] s = struct.Struct('<BBB') for i in range(len(pal) // 3): result.append(s.unpack_from(pal, i * 3)) return result def extract_image(data, offset, pal): width = u8.unpack_from(data, offset + 2)[0] height = u8.unpack_from(data, offset + 3)[0] if width == 0 or height == 0: return None colours = convert_palette(pal) img = Image.new('RGB', (width,height)) pix = img.load() offset += 4 for y in range(height): x = 0 while True: block_width = u8.unpack_from(data, offset)[0] if block_width == 0: # end of row offset += 1 break spacing_before = u8.unpack_from(data, offset + 1)[0] offset += 2 x += spacing_before for _ in range(block_width): index = u8.unpack_from(data, offset + 1)[0] pix[x,y] = colours[index] x += 1 offset += 1 return img def main(argv): import argparse, string import os.path parser = argparse.ArgumentParser(description='Converts Network Q .BUN files into .PNG') parser.add_argument('input', metavar='infile', type=str, nargs=1, help='the input file (.BUN)') parser.add_argument('-p', '--pal', type=str, help='optional palette file (.PAL)') args = parser.parse_args() path,ext = os.path.splitext(args.input[0]) try: palpath = args.input[1] except: palpath = path if ext != '.BUN': print('File does not have .BUN extension!') return filename = os.path.split(path) try: f = open(path + '.BUN', 'rb') except IOError as e: print('Unable to open BUN file!') return else: data = f.read() f.close() try: f = open(palpath + '.PAL', 'rb') except IOError as e: print('Unable to open PAL file!') return else: pal_data = f.read() f.close() # read the file header (list of offsets) first_offset = struct.unpack_from('<I', data, 0)[0] image_count = first_offset // 4 offsets = [] for i in range(image_count): offsets.append(struct.unpack_from('<I', data, i * 4)[0]) for i, offset in enumerate(offsets): img = extract_image(data, offset, pal_data) if img is not None: img.save(filename + '/%d.png' % i) if __name__ == "__main__": main(sys.argv[1:])
mit
Python
d5cb8ea39236f52f3ee9d2f9f8485dc5f737a5bb
Send a message every few minutes to keep Travis happy
benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus
allTests.py
allTests.py
#!/usr/bin/env python #Copyright (C) 2011 by Benedict Paten ([email protected]) # #Released under the MIT license, see LICENSE.txt import unittest import os from threading import Thread import time from cactus.setup.cactus_setupTest import TestCase as setupTest from cactus.blast.cactus_blastTest import TestCase as blastTest from cactus.pipeline.cactus_workflowTest import TestCase as workflowTest from cactus.pipeline.cactus_evolverTest import TestCase as evolverTest from cactus.bar.cactus_barTest import TestCase as barTest from cactus.phylogeny.cactus_phylogenyTest import TestCase as phylogenyTest from cactus.faces.cactus_fillAdjacenciesTest import TestCase as adjacenciesTest from cactus.reference.cactus_referenceTest import TestCase as referenceTest from cactus.hal.cactus_halTest import TestCase as halTest from cactus.api.allTests import TestCase as apiTest from cactus.caf.allTests import TestCase as cafTest from cactus.normalisation.cactus_normalisationTest import TestCase as normalisationTest from cactus.progressive.allTests import allSuites as progressiveSuite from cactus.shared.commonTest import TestCase as commonTest from cactus.preprocessor.allTests import allSuites as preprocessorTest def keepAlive(): """Keep Travis tests from failing prematurely by outputting to stdout every few minutes.""" while True: time.sleep(240) print "Still working..." def allSuites(): allTests = unittest.TestSuite() allTests.addTests([unittest.makeSuite(i) for i in [setupTest, workflowTest, evolverTest, barTest, phylogenyTest, adjacenciesTest, referenceTest, apiTest, normalisationTest, halTest, commonTest]] + [progressiveSuite()]) if "SON_TRACE_DATASETS" in os.environ: allTests.addTests([unittest.makeSuite(blastTest), preprocessorTest()]) return allTests def main(): keepAliveThread = Thread(target=keepAlive) # The keepalive thread will die when the main thread dies keepAliveThread.daemon = True keepAliveThread.start() suite = allSuites() runner = unittest.TextTestRunner(verbosity=2) i = runner.run(suite) return len(i.failures) + len(i.errors) if __name__ == '__main__': import sys sys.exit(main())
#!/usr/bin/env python #Copyright (C) 2011 by Benedict Paten ([email protected]) # #Released under the MIT license, see LICENSE.txt import unittest import os from cactus.setup.cactus_setupTest import TestCase as setupTest from cactus.blast.cactus_blastTest import TestCase as blastTest from cactus.pipeline.cactus_workflowTest import TestCase as workflowTest from cactus.pipeline.cactus_evolverTest import TestCase as evolverTest from cactus.bar.cactus_barTest import TestCase as barTest from cactus.phylogeny.cactus_phylogenyTest import TestCase as phylogenyTest from cactus.faces.cactus_fillAdjacenciesTest import TestCase as adjacenciesTest from cactus.reference.cactus_referenceTest import TestCase as referenceTest from cactus.hal.cactus_halTest import TestCase as halTest from cactus.api.allTests import TestCase as apiTest from cactus.caf.allTests import TestCase as cafTest from cactus.normalisation.cactus_normalisationTest import TestCase as normalisationTest from cactus.progressive.allTests import allSuites as progressiveSuite from cactus.shared.commonTest import TestCase as commonTest from cactus.preprocessor.allTests import allSuites as preprocessorTest def allSuites(): allTests = unittest.TestSuite() allTests.addTests([unittest.makeSuite(i) for i in [setupTest, workflowTest, evolverTest, barTest, phylogenyTest, adjacenciesTest, referenceTest, apiTest, normalisationTest, halTest, commonTest]] + [progressiveSuite()]) if "SON_TRACE_DATASETS" in os.environ: allTests.addTests([unittest.makeSuite(blastTest), preprocessorTest()]) return allTests def main(): suite = allSuites() runner = unittest.TextTestRunner(verbosity=2) i = runner.run(suite) return len(i.failures) + len(i.errors) if __name__ == '__main__': import sys sys.exit(main())
mit
Python
a94bbac73a40f85e0239bbab72c0ffce5258f707
Update test_geocoding.py
sdpython/actuariat_python,sdpython/actuariat_python,sdpython/actuariat_python
_unittests/ut_data/test_geocoding.py
_unittests/ut_data/test_geocoding.py
# -*- coding: utf-8 -*- """ @brief test log(time=16s) """ import os import unittest import warnings import pandas from pyquickhelper.loghelper import fLOG, get_password from pyquickhelper.pycode import ( add_missing_development_version, get_temp_folder, is_travis_or_appveyor, ExtTestCase) class TestGeocoding(ExtTestCase): def setUp(self): add_missing_development_version(["pyensae", "pymyinstall", "pyrsslocal"], __file__, hide=__name__ == "__main__") @unittest.skipIf(is_travis_or_appveyor() is not None, "no keys") def test_geocoding(self): fLOG( __file__, self._testMethodName, OutputPrint=__name__ == "__main__") temp = get_temp_folder(__file__, "temp_geocoding") from actuariat_python.data import geocode data = os.path.join(os.path.abspath( os.path.dirname(__file__)), "data", "bureau.txt") df = pandas.read_csv(data, sep="\t", encoding="utf-8") he = df.head(n=5) every = os.path.join(temp, "every.csv") # we retrieve an encrypted key bing_key = get_password("bing", "actuariat_python,key") self.assertNotEmpty(bing_key) fLOG(bing_key) coders = ["Nominatim"] if bing_key: coders.append(("bing", bing_key)) fLOG("geocoding 1", len(he)) # test res = geocode(he, save_every=every, every=1, index=False, encoding="utf-8", coders=coders, fLOG=fLOG) self.assertExists(every) # fLOG(res) out = os.path.join(temp, "geo.csv") res.to_csv(out, sep="\t", encoding="utf-8", index=False) res.to_excel(out + ".xlsx", index=False) fLOG("geocoding 2", len(res)) res = geocode(he, save_every=every, every=1, index=False, encoding="utf-8", coders=coders, fLOG=fLOG) self.assertExists(every) fLOG(res) if __name__ == "__main__": unittest.main()
# -*- coding: utf-8 -*- """ @brief test log(time=16s) """ import os import unittest import warnings import pandas from pyquickhelper.loghelper import fLOG from pyquickhelper.pycode import add_missing_development_version, get_temp_folder, is_travis_or_appveyor, ExtTestCase class TestGeocoding(ExtTestCase): def setUp(self): add_missing_development_version(["pyensae", "pymyinstall", "pyrsslocal"], __file__, hide=__name__ == "__main__") @unittest.skipIf(is_travis_or_appveyor() is not None, "no keys") def test_geocoding(self): fLOG( __file__, self._testMethodName, OutputPrint=__name__ == "__main__") temp = get_temp_folder(__file__, "temp_geocoding") from actuariat_python.data import geocode data = os.path.join(os.path.abspath( os.path.dirname(__file__)), "data", "bureau.txt") df = pandas.read_csv(data, sep="\t", encoding="utf-8") he = df.head(n=5) every = os.path.join(temp, "every.csv") # we retrieve an encrypted key with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) import keyring bing_key = keyring.get_password("bing", "actuariat_python,key") self.assertNotEmpty(bing_key) fLOG(bing_key) coders = ["Nominatim"] if bing_key: coders.append(("bing", bing_key)) fLOG("geocoding 1", len(he)) # test res = geocode(he, save_every=every, every=1, index=False, encoding="utf-8", coders=coders, fLOG=fLOG) self.assertExists(every) # fLOG(res) out = os.path.join(temp, "geo.csv") res.to_csv(out, sep="\t", encoding="utf-8", index=False) res.to_excel(out + ".xlsx", index=False) fLOG("geocoding 2", len(res)) res = geocode(he, save_every=every, every=1, index=False, encoding="utf-8", coders=coders, fLOG=fLOG) self.assertExists(every) fLOG(res) if __name__ == "__main__": unittest.main()
mit
Python
82acbc312b36bfdf4e1a0a1c26019d2c5879e036
Fix context processor settings to support Django 1.7
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
nodeconductor/server/admin/settings.py
nodeconductor/server/admin/settings.py
ADMIN_INSTALLED_APPS = ( 'fluent_dashboard', 'admin_tools', 'admin_tools.theming', 'admin_tools.menu', 'admin_tools.dashboard', 'django.contrib.admin', ) # FIXME: Move generic (not related to admin) context processors to base_settings # Note: replace 'django.core.context_processors' with 'django.template.context_processors' in Django 1.8+ ADMIN_TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.media', 'django.core.context_processors.request', # required by django-admin-tools >= 0.7.0 'django.core.context_processors.static', 'django.core.context_processors.tz', ) ADMIN_TEMPLATE_LOADERS = ( 'admin_tools.template_loaders.Loader', # required by django-admin-tools >= 0.7.0 ) FLUENT_DASHBOARD_APP_ICONS = { 'structure/customer': 'system-users.png', 'structure/servicesettings': 'preferences-other.png', 'structure/project': 'folder.png', 'structure/projectgroup': 'folder-bookmark.png', 'backup/backup': 'document-export-table.png', 'backup/backupschedule': 'view-resource-calendar.png', 'billing/invoice': 'help-donate.png', 'cost_tracking/pricelistitem': 'view-bank-account.png', 'cost_tracking/priceestimate': 'feed-subscribe.png', 'cost_tracking/defaultpricelistitem': 'view-calendar-list.png' } ADMIN_TOOLS_INDEX_DASHBOARD = 'nodeconductor.server.admin.dashboard.CustomIndexDashboard' ADMIN_TOOLS_APP_INDEX_DASHBOARD = 'nodeconductor.server.admin.dashboard.CustomAppIndexDashboard' ADMIN_TOOLS_MENU = 'nodeconductor.server.admin.menu.CustomMenu' # Should be specified, otherwise all Applications dashboard will be included. FLUENT_DASHBOARD_APP_GROUPS = ()
ADMIN_INSTALLED_APPS = ( 'fluent_dashboard', 'admin_tools', 'admin_tools.theming', 'admin_tools.menu', 'admin_tools.dashboard', 'django.contrib.admin', ) # FIXME: Move generic (not related to admin) context processors to base_settings ADMIN_TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.core.context_processors.request', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.core.context_processors.tz', 'django.template.context_processors.request', # required by django-admin-tools >= 0.7.0 ) ADMIN_TEMPLATE_LOADERS = ( 'admin_tools.template_loaders.Loader', # required by django-admin-tools >= 0.7.0 ) FLUENT_DASHBOARD_APP_ICONS = { 'structure/customer': 'system-users.png', 'structure/servicesettings': 'preferences-other.png', 'structure/project': 'folder.png', 'structure/projectgroup': 'folder-bookmark.png', 'backup/backup': 'document-export-table.png', 'backup/backupschedule': 'view-resource-calendar.png', 'billing/invoice': 'help-donate.png', 'cost_tracking/pricelistitem': 'view-bank-account.png', 'cost_tracking/priceestimate': 'feed-subscribe.png', 'cost_tracking/defaultpricelistitem': 'view-calendar-list.png' } ADMIN_TOOLS_INDEX_DASHBOARD = 'nodeconductor.server.admin.dashboard.CustomIndexDashboard' ADMIN_TOOLS_APP_INDEX_DASHBOARD = 'nodeconductor.server.admin.dashboard.CustomAppIndexDashboard' ADMIN_TOOLS_MENU = 'nodeconductor.server.admin.menu.CustomMenu' # Should be specified, otherwise all Applications dashboard will be included. FLUENT_DASHBOARD_APP_GROUPS = ()
mit
Python
a0e65ec74447984b97afa7a3405199eebd49269a
Add DB calls
habibmasuro/omniwallet,OmniLayer/omniwallet,habibmasuro/omniwallet,VukDukic/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,Nevtep/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,habibmasuro/omniwallet,achamely/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,VukDukic/omniwallet,achamely/omniwallet,Nevtep/omniwallet,achamely/omniwallet
api/mastercoin_verify.py
api/mastercoin_verify.py
import os import glob import re from flask import Flask, request, jsonify, abort, json import psycopg2, psycopg2.extras import msc_apps sqlconn = msc_apps.sql_connect() data_dir_root = os.environ.get('DATADIR') app = Flask(__name__) app.debug = True @app.route('/properties') def properties(): prop_glob = glob.glob(data_dir_root + '/properties/*.json') response = [] for property_file in prop_glob: with open(property_file, 'r') as f: prop = json.load(f)[0] response.append({ 'currencyID': prop['currencyId'], 'name': prop['propertyName'] }) json_response = json.dumps( sorted(response, key=lambda x: int(x['currencyID']) )) return json_response @app.route('/addresses') def addresses(): currency_id = request.args.get('currency_id') response = [] #addr_glob = glob.glob(data_dir_root + '/addr/*.json') currency_id = re.sub(r'\D+', '', currency_id) #check alphanumeric sqlconn.execute("select * from addressbalances where propertyid=" + str(currency_id)) ROWS= sqlconn.fetchall() for addrrow in ROWS: res = { 'address': addrrow[0] } if currency_id == '0': #BTC res['balance'] = ('%.8f' % float(addrrow[4])).rstrip('0').rstrip('.') response.append(res) else: res['balance'] = ('%.8f' % float(addrrow[4])).rstrip('0').rstrip('.') res['reserved_balance'] = ('%.8f' % float(addrrow[5])).rstrip('0').rstrip('.') response.append(res) json_response = json.dumps(response) return json_response @app.route('/transactions/<address>') def transactions(address=None): currency_id = request.args.get('currency_id') print address, currency_id if address == None: abort(400) currency_id = re.sub(r'\D+', '', currency_id) #check alphanumeric sqlconn.execute("select * from addressesintxs a, transactions t where a.address=\'"+address+"\' and a.txdbserialnum = t.txdbserialnum and a.propertyid=" + str(currency_id)) ROWS= sqlconn.fetchall() transactions = [] for txrow in ROWS: transactions.append(txrow[9]) return jsonify({ 'address': address, 'transactions': transactions })
import os import glob import re from flask import Flask, request, jsonify, abort, json data_dir_root = os.environ.get('DATADIR') app = Flask(__name__) app.debug = True @app.route('/properties') def properties(): prop_glob = glob.glob(data_dir_root + '/properties/*.json') response = [] for property_file in prop_glob: with open(property_file, 'r') as f: prop = json.load(f)[0] response.append({ 'currencyID': prop['currencyId'], 'name': prop['propertyName'] }) json_response = json.dumps( sorted(response, key=lambda x: int(x['currencyID']) )) return json_response @app.route('/addresses') def addresses(): currency_id = request.args.get('currency_id') response = [] addr_glob = glob.glob(data_dir_root + '/addr/*.json') for address_file in addr_glob: with open(address_file, 'r') as f: addr = json.load(f) res = { 'address': addr['address'] } if currency_id == '0': #BTC btc_balance = [x['value'] for x in addr['balance'] if x['symbol'] == 'BTC'][0] res['balance'] = ('%.8f' % float(btc_balance)).rstrip('0').rstrip('.') response.append(res) else: adjust_currency_id = currency_id if currency_id == '1' or currency_id == '2': adjust_currency_id = str(int(currency_id) - 1) # Mastercoin-tools is off by one on currency id from the spec if adjust_currency_id in addr: res['balance'] = ('%.8f' % float(addr[adjust_currency_id]['balance'])).rstrip('0').rstrip('.') res['reserved_balance'] = ('%.8f' % float(addr[adjust_currency_id]['total_reserved'])).rstrip('0').rstrip('.') response.append(res) json_response = json.dumps(response) return json_response @app.route('/transactions/<address>') def transactions(address=None): currency_id = request.args.get('currency_id') print address, currency_id if address == None: abort(400) if not exists(address): abort(404) addr = read(address) transactions = [] tx_lists = ['accept_transactions', 'bought_transactions', 'exodus_transactions', 'offer_transactions', 'received_transactions', 'sent_transactions', 'sold_transactions'] if currency_id == '0': return jsonify({ 'address': address, 'transactions': transactions }) # Punt on bitcoin transactions since we don't store them if currency_id == '1' or currency_id == '2': currency_id = str(int(currency_id) - 1) # Mastercoin-tools is off by one on currency id from the spec if currency_id in addr: for tx_i in tx_lists: for tx in addr[currency_id][tx_i]: transactions.append(tx_clean(tx)) return jsonify({ 'address': address, 'transactions': transactions }) # Utilities def tx_clean(tx): clean = { 'tx_hash': tx['tx_hash'], 'valid': True, 'accepted_amount': tx['formatted_amount'] } if 'bitcoin_required' in tx: clean['bought_amount'] = tx['bitcoin_required'] return clean def read(address): if not re.match('^[a-zA-Z0-9_]+$', address): raise ValueError('Non Alphanumeric address') filename = data_dir_root + '/addr/' + address + '.json' with open(filename, 'r') as f: return json.load(f) def exists(address): filename = data_dir_root + '/addr/' + address + '.json' return os.path.exists(filename)
agpl-3.0
Python
23865e7155974dbc9a9be3d9e6c51ed7b96200ea
add next to profile form
felinx/poweredsites,felinx/poweredsites,felinx/poweredsites
poweredsites/forms/profile.py
poweredsites/forms/profile.py
# -*- coding: utf-8 -*- # # Copyright(c) 2010 poweredsites.org # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from formencode import validators from tornado.options import options from poweredsites.forms.base import BaseForm, URL from poweredsites.libs import const class ProfileForm(BaseForm): email = validators.Email(not_empty=True, resolve_domain=False, max=120) username = validators.PlainText(not_empty=True, strip=True) blog_name = validators.String(not_empty=False, max=40, strip=True) blog_url = URL(not_empty=False, max=600, add_http=True) next = validators.String(not_empty=False, max=600) def __after__(self): try: v = self._values length = len(v["username"]) if length < 3 or length > 40: self.add_error("username", "Username should be more than three and less than forty charaters.") self._handler.db.execute( "UPDATE user SET username = %s, email = %s, status_ = %s, \ blog_name = %s, blog_url = %s WHERE id = %s", v['username'].lower(), v['email'], const.Status.ACTIVE, \ v['blog_name'], v['blog_url'], self._handler.current_user.id ) self._handler.set_secure_cookie("user", v['username'], domain=options.cookie_domain) except Exception, e: logging.error(str(e)) self.add_error("username", "Save profile error, please try it later.")
# -*- coding: utf-8 -*- # # Copyright(c) 2010 poweredsites.org # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from formencode import validators from tornado.options import options from poweredsites.forms.base import BaseForm, URL from poweredsites.libs import const class ProfileForm(BaseForm): email = validators.Email(not_empty=True, resolve_domain=False, max=120) username = validators.PlainText(not_empty=True, strip=True) blog_name = validators.String(not_empty=False, max=40, strip=True) blog_url = URL(not_empty=False, max=600, add_http=True) def __after__(self): try: v = self._values length = len(v["username"]) if length < 3 or length > 40: self.add_error("username", "Username should be more than three and less than forty charaters.") self._handler.db.execute( "UPDATE user SET username = %s, email = %s, status_ = %s, \ blog_name = %s, blog_url = %s WHERE id = %s", v['username'].lower(), v['email'], const.Status.ACTIVE, \ v['blog_name'], v['blog_url'], self._handler.current_user.id ) self._handler.set_secure_cookie("user", v['username'], domain=options.cookie_domain) except Exception, e: logging.error(str(e)) self.add_error("username", "Save profile error, please try it later.")
apache-2.0
Python
c22d4f5aa412b6aa624212bf5728c94fbef5d375
Modify attributes for Bucketlist Model, Modify relationship between User model and Bucketlist Model
brayoh/bucket-list-api
app/models.py
app/models.py
from datetime import datetime from passlib.apps import custom_app_context as pwd_context from app import db class User(db.Model): """This class represents the users database table.""" __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(255), unique=True) password = db.Column(db.String(255), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) bucketlist = db.relationship('BucketList', backref='user') def __init__(self, username, password): self.username = username self.password = self.hash_password(password) def hash_password(self, password): return pwd_context.encrypt(password) def verify_password(self, password): return pwd_context.verify(password, self.password) def __repr__(self): return '<User %r>' % self.username class BucketList(db.Model): """This is class represents the bucketlist database table.""" __tablename__ = 'bucketlist' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(255)) user_id = db.Column(db.Integer, db.ForeignKey("users.id")) items = db.relationship('Item', backref='bucketlist', cascade='all, delete', lazy='dynamic') created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) class Item(db.Model): """This class represents bucketlist items table. """ __tablename__ = 'bucketlist_items' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(255), nullable=False) bucketlist_id = db.Column(db.Integer, db.ForeignKey( 'bucketlist.id', ondelete='CASCADE'), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) done = db.Column(db.Boolean, default=False) def __repr__(self): return '<Item %s>' % (self.name)
from datetime import datetime from passlib.apps import custom_app_context as pwd_context from app import db class User(db.Model): """This class represents the users database table.""" __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(255), unique=True) password = db.Column(db.String(255), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) bucketlist = db.relationship('BucketList', backref='user') def __init__(self, username, password): self.username = username self.password = self.hash_password(password) def hash_password(self, password): return pwd_context.encrypt(password) def verify_password(self, password): return pwd_context.verify(password, self.password) def __repr__(self): return '<User %r>' % self.username class Bucketlist(db.Model): """This is class represents the bucketlist database table.""" __tablename__ = 'bucketlists' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(255)) user_id = db.Column(db.Integer, db.ForeignKey("user.id")) items = db.relationship('Item', backref='bucketlist', cascade='all, delete', lazy='dynamic') created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) class Item(db.Model): """This class represents bucketlist items table. """ __tablename__ = 'bucketlist_items' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(255), nullable=False) bucketlist_id = db.Column(db.Integer, db.ForeignKey( 'bucketlist.id', ondelete='CASCADE'), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow().isoformat()) done = db.Column(db.Boolean, default=False) def __repr__(self): return '<Item %s>' % (self.name)
mit
Python
4ceb0b3cb2b952a491b4173313559c7b4bc06c2b
Update __init__.py
CENDARI/editorsnotes,CENDARI/editorsnotes,CENDARI/editorsnotes,CENDARI/editorsnotes,CENDARI/editorsnotes
editorsnotes/__init__.py
editorsnotes/__init__.py
__version__ = '0.2.1' VERSION = __version__
__version__ = '2.0.1' VERSION = __version__
agpl-3.0
Python
053785b92dc925b27ba036a2b560ab509180fd1e
Add Lowdown to Sphinx extensions load list.
4degrees/lucidity,nebukadhezer/lucidity
doc/conf.py
doc/conf.py
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. '''Lucidity documentation build configuration file''' import os import re # -- General ------------------------------------------------------------------ # Extensions extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.extlinks', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', 'lowdown' ] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Lucidity' copyright = u'2013, Martin Pengelly-Phillips' # Version with open( os.path.join( os.path.dirname(__file__), '..', 'source', 'lucidity', '_version.py' ) ) as _version_file: _version = re.match( r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL ).group(1) version = _version release = _version # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['static', 'template'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of prefixes to ignore for module listings modindex_common_prefix = ['lucidity.'] # -- HTML output -------------------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named 'default.css' will overwrite the builtin 'default.css'. html_static_path = ['static'] # If True, copy source rst files to output for reference html_copy_source = True # -- Autodoc ------------------------------------------------------------------ autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance'] autodoc_member_order = 'bysource' def autodoc_skip(app, what, name, obj, skip, options): '''Don't skip __init__ method for autodoc.''' if name == '__init__': return False return skip # -- Intersphinx -------------------------------------------------------------- intersphinx_mapping = {'python':('http://docs.python.org/', None)} # -- Setup -------------------------------------------------------------------- def setup(app): app.connect('autodoc-skip-member', autodoc_skip)
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. '''Lucidity documentation build configuration file''' import os import re # -- General ------------------------------------------------------------------ # Extensions extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.extlinks', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', ] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Lucidity' copyright = u'2013, Martin Pengelly-Phillips' # Version with open( os.path.join( os.path.dirname(__file__), '..', 'source', 'lucidity', '_version.py' ) ) as _version_file: _version = re.match( r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL ).group(1) version = _version release = _version # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['static', 'template'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of prefixes to ignore for module listings modindex_common_prefix = ['lucidity.'] # -- HTML output -------------------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named 'default.css' will overwrite the builtin 'default.css'. html_static_path = ['static'] # If True, copy source rst files to output for reference html_copy_source = True # -- Autodoc ------------------------------------------------------------------ autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance'] autodoc_member_order = 'bysource' def autodoc_skip(app, what, name, obj, skip, options): '''Don't skip __init__ method for autodoc.''' if name == '__init__': return False return skip # -- Intersphinx -------------------------------------------------------------- intersphinx_mapping = {'python':('http://docs.python.org/', None)} # -- Setup -------------------------------------------------------------------- def setup(app): app.connect('autodoc-skip-member', autodoc_skip)
apache-2.0
Python
2d8644d5cc0085db4615de6bfabdd024a6a19469
fix demo issue
nextoa/comb
comb/demo/redis.py
comb/demo/redis.py
# -*- coding: utf-8 -*- import comb.slot import comb.mq.redis as RedisHelper import redis class Slot(comb.slot.Slot): def initialize(self): """ This block is execute before thread initial Example:: class UserSlot(Slot): def initialize(self,*args,**kwargs): self.attr = kwargs.get('attr',None) def slot(self, result): ... """ if self.extra_loader.options.get('--force1'): self.threads_num = 1 print "Force thread nums to 1" self.db = redis.Redis() def __enter__(self): data = RedisHelper.push(self.db,'mq1','aaaa') if not data: return False return data['_id'] def __exit__(self, exc_type, exc_val, exc_tb): data = RedisHelper.pop(self.db,'mq1') def slot(self, result): print "call slot,current data is:", result pass @staticmethod def options(): return ( "Extra options:", ('--force1','force 1 thread'), )
# -*- coding: utf-8 -*- import comb.slot import comb.mq.redis as RedisHelper import redis class Slot(comb.slot.Slot): def initialize(self): """ This block is execute before thread initial Example:: class UserSlot(Slot): def initialize(self,*args,**kwargs): self.attr = kwargs.get('attr',None) def slot(self, result): ... """ if self.extra_loader.options.get('--force1'): self.threads_num = 1 print "Force thread nums to 1" self.db = redis.Redis() def __enter__(self): data = RedisHelper.pop(self.db,'mq1','aaaa') if not data: return False return data['_id'] def __exit__(self, exc_type, exc_val, exc_tb): data = RedisHelper.push(self.db,'mq1') def slot(self, result): print "call slot,current data is:", result pass @staticmethod def options(): return ( "Extra options:", ('--force1','force 1 thread'), )
mit
Python
379ae8c6dc026ff33d28b4df00e5d435fc4fc85a
FIX depends
ingadhoc/account-invoicing
account_invoice_control/__openerp__.py
account_invoice_control/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Account Invoice Control', 'author': 'ADHOC SA', 'version': '8.0.0.0.0', 'license': 'AGPL-3', 'category': 'Accounting & Finance', 'depends': ['sale', 'purchase'], 'description': ''' Account Invoice Control ======================= ''', 'test': [], 'data': [ 'security/security.xml', 'views/invoice_view.xml', ], 'website': 'www.adhoc.com.ar', 'installable': True, }
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Account Invoice Control', 'author': 'ADHOC SA', 'version': '8.0.0.0.0', 'license': 'AGPL-3', 'category': 'Accounting & Finance', 'depends': ['account'], 'description': ''' Account Invoice Control ======================= ''', 'test': [], 'data': [ 'security/security.xml', 'views/invoice_view.xml', ], 'website': 'www.adhoc.com.ar', 'installable': True, }
agpl-3.0
Python
889159bf9f8d8a067ad0e7c740b68f73da83ef6c
test some floats too.
synthicity/activitysim,synthicity/activitysim
activitysim/abm/test/test_skims.py
activitysim/abm/test/test_skims.py
from collections import OrderedDict from future.utils import iteritems import numpy as np import pytest from activitysim.abm.tables import skims @pytest.fixture(scope="session") def matrix_dimension(): return 5922 @pytest.fixture(scope="session") def num_of_matrices(): return 845 @pytest.fixture(scope="session") def skim_info(num_of_matrices, matrix_dimension): time_periods = ['EA', 'AM', 'MD', 'PM', 'NT'] omx_keys = OrderedDict() omx_key1_block_offsets = OrderedDict() omx_block_offsets = OrderedDict() omx_blocks = OrderedDict() omx_blocks['skim_arc_skims_0'] = num_of_matrices for i in range(0, num_of_matrices + 1): key1_name = 'm{}'.format(i // len(time_periods) + 1) time_period = time_periods[i % len(time_periods)] omx_keys[(key1_name, time_period)] = '{}__{}'.format(key1_name, time_period) omx_block_offsets[(key1_name, time_period)] = (0, i) if 0 == i % len(time_periods): omx_key1_block_offsets[key1_name] = (0, i) skim_info = { 'omx_name': 'arc_skims', 'omx_shape': (matrix_dimension, matrix_dimension), 'num_skims': num_of_matrices, 'dtype': np.float32, 'omx_keys': omx_keys, 'key1_block_offsets': omx_key1_block_offsets, 'block_offsets': omx_block_offsets, 'blocks': omx_blocks } return skim_info def test_multiply_large_numbers(skim_info, num_of_matrices, matrix_dimension): omx_shape = skim_info['omx_shape'] blocks = skim_info['blocks'] for block_name, block_size in iteritems(blocks): # If overflow, this number will go negative assert int(skims.multiply_large_numbers(omx_shape) * block_size) == \ num_of_matrices * matrix_dimension ** 2 def test_multiple_large_floats(): calculated_value = skims.multiply_large_numbers([6205.1, 5423.2, 932.4, 15.4]) actual_value = 483200518316.9472 assert abs(calculated_value - actual_value) < 0.0001
from collections import OrderedDict from future.utils import iteritems import numpy as np import pytest from activitysim.abm.tables import skims @pytest.fixture(scope="session") def matrix_dimension(): return 5922 @pytest.fixture(scope="session") def num_of_matrices(): return 845 @pytest.fixture(scope="session") def skim_info(num_of_matrices, matrix_dimension): time_periods = ['EA', 'AM', 'MD', 'PM', 'NT'] omx_keys = OrderedDict() omx_key1_block_offsets = OrderedDict() omx_block_offsets = OrderedDict() omx_blocks = OrderedDict() omx_blocks['skim_arc_skims_0'] = num_of_matrices for i in range(0, num_of_matrices + 1): key1_name = 'm{}'.format(i // len(time_periods) + 1) time_period = time_periods[i % len(time_periods)] omx_keys[(key1_name, time_period)] = '{}__{}'.format(key1_name, time_period) omx_block_offsets[(key1_name, time_period)] = (0, i) if 0 == i % len(time_periods): omx_key1_block_offsets[key1_name] = (0, i) skim_info = { 'omx_name': 'arc_skims', 'omx_shape': (matrix_dimension, matrix_dimension), 'num_skims': num_of_matrices, 'dtype': np.float32, 'omx_keys': omx_keys, 'key1_block_offsets': omx_key1_block_offsets, 'block_offsets': omx_block_offsets, 'blocks': omx_blocks } return skim_info def test_multiply_large_numbers(skim_info, num_of_matrices, matrix_dimension): omx_shape = skim_info['omx_shape'] blocks = skim_info['blocks'] for block_name, block_size in iteritems(blocks): # If overflow, this number will go negative assert int(skims.multiply_large_numbers(omx_shape) * block_size) == \ num_of_matrices * matrix_dimension ** 2
agpl-3.0
Python
387e0729ea7c92920f15abcc04eaa52a320447fd
return url for eval.
mzweilin/HashTag-Understanding,mzweilin/HashTag-Understanding,mzweilin/HashTag-Understanding
job.py
job.py
import lib.search.bing_search as bing import lib.tweet.parseTwitter as twitter from lib.querygen.tweets2query import QueryGenerator import lib.summarization.tagdef as tagdef from lib.summarization import extractor import string import logging logging.basicConfig(level=logging.INFO) logger = logging def main(): import sys if len(sys.argv) >= 2: hashtag = '#'+sys.argv[1] job = Job(hashtag) urls = job.execute() print(urls) class Job: def __init__(self, hashtag): self.hashtag = hashtag.strip("#" + string.whitespace) def execute(self): results = {} results['references'] = self.getURLs() #results['similar-tags'] = self.getSimilarHashTags() #results['tagdef-summary'] = self.getTagDefSummary() urls = results['references']['ubd'] + results['references']['wiki'] + results['references']['web'] + results['references']['news'] #results['summary'] = self.getSummary(urls) return urls def getSimilarHashTags(self): return twitter.retrieveRelatedHashtags('#' + self.hashtag) def getSummary(self, urls): num_sentences = 10 return extractor.summarize(urls, num_sentences) def getTagDefSummary(self): return tagdef.lookup(self.hashtag) def getURLs(self): generator = QueryGenerator() tweets = twitter.retrieveTweetText('#'+self.hashtag, 5) queries = generator.gen_query_list('#'+self.hashtag, tweets) logger.info(generator.preview_counters()) logger.info(queries) urls_ubd = bing.group_search(queries, 2, on_ubd=True, weight_step=3) urls_wiki = bing.group_search(queries, 2, on_wiki=True) urls_news = bing.group_search(queries, 2, category='News', on_wiki=False) urls_web = bing.group_search(queries, 2, on_wiki=False) return {'ubd':urls_ubd, 'wiki': urls_wiki, 'news': urls_news, 'web': urls_web} if __name__ == "__main__": main()
import lib.search.bing_search as bing import lib.tweet.parseTwitter as twitter from lib.querygen.tweets2query import QueryGenerator import lib.summarization.tagdef as tagdef from lib.summarization import extractor import string import logging logging.basicConfig(level=logging.INFO) logger = logging def main(): import sys if len(sys.argv) >= 2: hashtag = '#'+sys.argv[1] job = Job(hashtag) urls = job.execute() print(urls) class Job: def __init__(self, hashtag): self.hashtag = hashtag.strip("#" + string.whitespace) def execute(self): results = {} results['references'] = self.getURLs() #results['similar-tags'] = self.getSimilarHashTags() #results['tagdef-summary'] = self.getTagDefSummary() urls = results['references']['ubd'] + results['references']['wiki'] + results['references']['web'] + results['references']['news'] #results['summary'] = self.getSummary(urls) return results def getSimilarHashTags(self): return twitter.retrieveRelatedHashtags('#' + self.hashtag) def getSummary(self, urls): num_sentences = 10 return extractor.summarize(urls, num_sentences) def getTagDefSummary(self): return tagdef.lookup(self.hashtag) def getURLs(self): generator = QueryGenerator() tweets = twitter.retrieveTweetText('#'+self.hashtag, 5) queries = generator.gen_query_list('#'+self.hashtag, tweets) logger.info(generator.preview_counters()) logger.info(queries) urls_ubd = bing.group_search(queries, 2, on_ubd=True, weight_step=3) urls_wiki = bing.group_search(queries, 2, on_wiki=True) urls_news = bing.group_search(queries, 2, category='News', on_wiki=False) urls_web = bing.group_search(queries, 2, on_wiki=False) return {'ubd':urls_ubd, 'wiki': urls_wiki, 'news': urls_news, 'web': urls_web} if __name__ == "__main__": main()
apache-2.0
Python
e4649b40ee5ba1bb9c7d43acb4e599b210f9dd4a
Rename test and function to a more appropriate ones.
bsamorodov/selenium-py-training-samorodov
php4dvd/test_deletefilm.py
php4dvd/test_deletefilm.py
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class DeleteFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_deletefilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
bsd-2-clause
Python
4644a70f20901f221fe307adc94d7cfb9059649a
Bump version
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
pytablereader/__version__.py
pytablereader/__version__.py
# encoding: utf-8 from datetime import datetime __author__ = "Tsuyoshi Hombashi" __copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__) __license__ = "MIT License" __version__ = "0.23.1" __maintainer__ = __author__ __email__ = "[email protected]"
# encoding: utf-8 from datetime import datetime __author__ = "Tsuyoshi Hombashi" __copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__) __license__ = "MIT License" __version__ = "0.23.0" __maintainer__ = __author__ __email__ = "[email protected]"
mit
Python
c4a77bf510bf23e25f259aaae8c1effa65e45a85
fix bug when trying to get a slice (of pizza)
ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt
python/ccxtpro/base/cache.py
python/ccxtpro/base/cache.py
import collections class Delegate: def __init__(self, name): self.name = name def __get__(self, instance, owner): return getattr(instance, self.name) class ArrayCache(list): # implicitly called magic methods don't invoke __getattribute__ # https://docs.python.org/3/reference/datamodel.html#special-method-lookup # all method lookups obey the descriptor protocol # this is how the implicit api is defined in ccxt under the hood __iter__ = Delegate('__iter__') __setitem__ = Delegate('__setitem__') __delitem__ = Delegate('__delitem__') __len__ = Delegate('__len__') __contains__ = Delegate('__contains__') __reversed__ = Delegate('__reversed__') def __init__(self, max_size): super(list, self).__init__() self._deque = collections.deque([], max_size) def __eq__(self, other): return list(self) == other def __getattribute__(self, item): deque = super(list, self).__getattribute__('_deque') return getattr(deque, item) def __repr__(self): return str(list(self)) def __add__(self, other): return list(self) + other def __getitem__(self, item): deque = super(list, self).__getattribute__('_deque') if isinstance(item, slice): start, stop, step = item.indices(len(deque)) return [deque[i] for i in range(start, stop, step)] else: return deque[item]
import collections class Delegate: def __init__(self, name): self.name = name def __get__(self, instance, owner): return getattr(instance, self.name) class ArrayCache(list): # implicitly called magic methods don't invoke __getattribute__ # https://docs.python.org/3/reference/datamodel.html#special-method-lookup # all method lookups obey the descriptor protocol # this is how the implicit api is defined in ccxt under the hood __iter__ = Delegate('__iter__') __getitem__ = Delegate('__getitem__') __setitem__ = Delegate('__setitem__') __delitem__ = Delegate('__delitem__') __len__ = Delegate('__len__') __contains__ = Delegate('__contains__') __reversed__ = Delegate('__reversed__') def __init__(self, max_size): super(list, self).__init__() self._deque = collections.deque([], max_size) def __eq__(self, other): return list(self) == other def __getattribute__(self, item): deque = super(list, self).__getattribute__('_deque') return getattr(deque, item) def __repr__(self): return str(list(self)) def __add__(self, other): return list(self) + other
mit
Python
07fa886690539d097b212375598d7ca3239664ba
Make option group items appear the same in the cart as text options for consistency
chrisglass/django-shop-simplevariations,chrisglass/django-shop-simplevariations
shop_simplevariations/cart_modifier.py
shop_simplevariations/cart_modifier.py
#-*- coding: utf-8 -*- from shop.cart.cart_modifiers_base import BaseCartModifier from shop_simplevariations.models import CartItemOption, CartItemTextOption class ProductOptionsModifier(BaseCartModifier): ''' This modifier adds an extra field to the cart to let the lineitem "know" about product options and their respective price. ''' def process_cart_item(self, cart_item, request): ''' This adds a list of price modifiers depending on the product options the client selected for the current cart_item (if any) ''' selected_options = CartItemOption.objects.filter(cartitem=cart_item) for selected_opt in selected_options: option_obj = selected_opt.option price = option_obj.price * cart_item.quantity data = ('%s: "%s"' % (text_opt.text_option.name,text_opt.text), price) # Don't forget to update the running total! cart_item.current_total = cart_item.current_total + price cart_item.extra_price_fields.append(data) return cart_item class TextOptionsModifier(BaseCartModifier): """ This price modifier appends all the text options it finds in the database for a given cart item to the item's extra_price_fields. """ def process_cart_item(self, cart_item, request): text_options = CartItemTextOption.objects.filter(cartitem=cart_item) for text_opt in text_options: price = text_opt.text_option.price data = ('%s: "%s"' % (text_opt.text_option.name,text_opt.text), price) # Don't forget to update the running total! cart_item.current_total = cart_item.current_total + price #Append to the cart_item's list now. cart_item.extra_price_fields.append(data) return cart_item
#-*- coding: utf-8 -*- from shop.cart.cart_modifiers_base import BaseCartModifier from shop_simplevariations.models import CartItemOption, CartItemTextOption class ProductOptionsModifier(BaseCartModifier): ''' This modifier adds an extra field to the cart to let the lineitem "know" about product options and their respective price. ''' def process_cart_item(self, cart_item, request): ''' This adds a list of price modifiers depending on the product options the client selected for the current cart_item (if any) ''' selected_options = CartItemOption.objects.filter(cartitem=cart_item) for selected_opt in selected_options: option_obj = selected_opt.option price = option_obj.price * cart_item.quantity data = (option_obj.name, price) # Don't forget to update the running total! cart_item.current_total = cart_item.current_total + price cart_item.extra_price_fields.append(data) return cart_item class TextOptionsModifier(BaseCartModifier): """ This price modifier appends all the text options it finds in the database for a given cart item to the item's extra_price_fields. """ def process_cart_item(self, cart_item, request): text_options = CartItemTextOption.objects.filter(cartitem=cart_item) for text_opt in text_options: price = text_opt.text_option.price data = ('%s: "%s"' % (text_opt.text_option.name,text_opt.text), price) # Don't forget to update the running total! cart_item.current_total = cart_item.current_total + price #Append to the cart_item's list now. cart_item.extra_price_fields.append(data) return cart_item
bsd-3-clause
Python
8ebba5de25de289046bdca46f1613a337f1aacbf
Improve CommentForm tests
pbanaszkiewicz/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy
amy/extcomments/tests.py
amy/extcomments/tests.py
from django.test import TestCase from django.urls import reverse import django_comments from workshops.models import Organization, Person class TestEmailFieldRequiredness(TestCase): def test_email_field_requiredness(self): """Regression test for #1944. Previously a user without email address would not be able to add a comment.""" # Arrange organization = Organization.objects.create( domain="example.org", fullname="Example Organisation", ) CommentForm = django_comments.get_form() data = { "honeypot": "", "comment": "Content", "name": "Ron", # required outside the request cycle **CommentForm(organization).generate_security_data(), } # Act form = CommentForm(organization, data) # Assert self.assertTrue(form.is_valid()) def test_email_field_requiredness_POST(self): """Regression test for #1944. Previously a user without email address would not be able to add a comment. This test makes a POST request with comment data.""" # Arrange person = Person.objects.create( personal="Ron", family="Weasley", username="rw", is_active=True, email="", data_privacy_agreement=True, ) organization = Organization.objects.create( domain="example.org", fullname="Example Organisation", ) CommentModel = django_comments.get_model() CommentForm = django_comments.get_form() data = { "honeypot": "", "comment": "Content", **CommentForm(organization).generate_security_data(), } # Act self.client.force_login(person) self.client.post(reverse("comments-post-comment"), data=data, follow=True) # Assert self.assertEqual(CommentModel.objects.for_model(organization).count(), 1)
from django.test import TestCase import django_comments from workshops.models import Organization, Person class TestEmailFieldRequiredness(TestCase): def test_email_field_requiredness(self): """Regression test for #1944. Previously a user without email address would not be able to add a comment.""" # Arrange person = Person.objects.create( personal="Ron", family="Weasley", username="rw", is_active=True, email="", ) person.set_password("testrwpassword") self.client.login(username="rw", password="testrwpassword") organization = Organization.objects.create( domain="example.org", fullname="Example Organisation" ) CommentForm = django_comments.get_form() data = { "honeypot": "", "comment": "Content", "name": "Ron", **CommentForm(organization).generate_security_data(), } # Act form = CommentForm(organization, data) # Assert self.assertTrue(form.is_valid())
mit
Python
afbc63d29a23170d17ce18e0c39a403de974aede
Use of websockets for the episodes listing
rkohser/gustaf,rkohser/gustaf,rkohser/gustaf
app/handlers/__init__.py
app/handlers/__init__.py
__author__ = 'roland' from handlers.mainhandler import MainHandler from handlers.showhandler import ShowHandler
__author__ = 'roland'
mit
Python
36f2c75f177b076ce54cb1d056b715edb15377f8
Bump app version number.
kernelci/kernelci-backend,kernelci/kernelci-backend
app/handlers/__init__.py
app/handlers/__init__.py
__version__ = "2015.7.4" __versionfull__ = __version__
__version__ = "2015.7.3" __versionfull__ = __version__
lgpl-2.1
Python
dd791f210379907b909c1a52492a380d17c88058
add arguments
jmlero/python-compressandmove
compressandmove.py
compressandmove.py
#!/usr/bin/env python # file.py Code # # Copyright (c) Jose M. Molero # # All rights reserved. # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. """ Code sample to Notes: - Insert notes TODO list: - TODO """ # stdlib imports import argparse import errno import os # global variables _SCRIPT_VERSION = '0.0.1' def main(): """Main function Parameters: None Returns: Nothing Raises: ValueError for invalid arguments """ # get args args = parseargs() # check parameters if len(args.localresource) < 1 or len(args.storageaccount) < 1 or \ len(args.container) < 1: raise ValueError('invalid positional arguments') if args.upload and args.download: raise ValueError('cannot force transfer direction of download ' 'and upload in the same command') if args.storageaccountkey is not None and args.saskey is not None: raise ValueError('cannot use both a sas key and storage account key') if args.pageblob and args.autovhd: raise ValueError('cannot specify both pageblob and autovhd parameters') def parseargs(): # pragma: no cover """Sets up command-line arguments and parser Parameters: Nothing Returns: Parsed arguments Raises: Nothing """ parser = argparse.ArgumentParser(description='Compress and move folders') parser.add_argument("-v", "--version", help="show program's version number and exit", action='version', version=_SCRIPT_VERSION) parser.add_argument("-f", "--folder", help='specify the folder to compress') parser.add_argument("-d", "--delete", help='delete folder at the end') return parser.parse_args() if __name__ == '__main__': main()
#!/usr/bin/env python # file.py Code # # Copyright (c) Jose M. Molero # # All rights reserved. # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. """ Code sample to Notes: - Insert notes TODO list: - TODO """ # stdlib imports import argparse import errno import os # global variables _SCRIPT_VERSION = '0.0.1' def main(): """Main function Parameters: None Returns: Nothing Raises: ValueError for invalid arguments """ # get args args = parseargs() # check parameters def parseargs(): # pragma: no cover """Sets up command-line arguments and parser Parameters: Nothing Returns: Parsed arguments Raises: Nothing """ parser = argparse.ArgumentParser(description='Compress and move folders') parser.add_argument("-v", "--version", help="show program's version number and exit", action='version', version=_SCRIPT_VERSION) return parser.parse_args() if __name__ == '__main__': main()
mit
Python
e17efc67e20e1db1f11c00853dd26da250e3655e
add access rule for event_moodle
steedos/odoo,GauravSahu/odoo,joariasl/odoo,Endika/odoo,FlorianLudwig/odoo,Drooids/odoo,n0m4dz/odoo,mustafat/odoo-1,dkubiak789/odoo,kittiu/odoo,ccomb/OpenUpgrade,0k/OpenUpgrade,ihsanudin/odoo,numerigraphe/odoo,ingadhoc/odoo,jfpla/odoo,alexcuellar/odoo,jfpla/odoo,SerpentCS/odoo,csrocha/OpenUpgrade,jfpla/odoo,tvibliani/odoo,JonathanStein/odoo,hoatle/odoo,vnsofthe/odoo,JonathanStein/odoo,salaria/odoo,savoirfairelinux/OpenUpgrade,brijeshkesariya/odoo,ramitalat/odoo,joshuajan/odoo,credativUK/OCB,mmbtba/odoo,CubicERP/odoo,odoo-turkiye/odoo,blaggacao/OpenUpgrade,jeasoft/odoo,nuuuboo/odoo,naousse/odoo,ubic135/odoo-design,Ichag/odoo,RafaelTorrealba/odoo,alexteodor/odoo,0k/OpenUpgrade,JonathanStein/odoo,dezynetechnologies/odoo,doomsterinc/odoo,BT-ojossen/odoo,juanalfonsopr/odoo,fuhongliang/odoo,osvalr/odoo,stephen144/odoo,NeovaHealth/odoo,JCA-Developpement/Odoo,bguillot/OpenUpgrade,laslabs/odoo,kittiu/odoo,bakhtout/odoo-educ,mvaled/OpenUpgrade,fdvarela/odoo8,savoirfairelinux/odoo,ygol/odoo,blaggacao/OpenUpgrade,storm-computers/odoo,cedk/odoo,gorjuce/odoo,savoirfairelinux/OpenUpgrade,microcom/odoo,abdellatifkarroum/odoo,alexcuellar/odoo,florian-dacosta/OpenUpgrade,credativUK/OCB,odoousers2014/odoo,dgzurita/odoo,pedrobaeza/odoo,rgeleta/odoo,QianBIG/odoo,ShineFan/odoo,alhashash/odoo,laslabs/odoo,guerrerocarlos/odoo,cdrooom/odoo,rgeleta/odoo,synconics/odoo,tinkhaven-organization/odoo,havt/odoo,rahuldhote/odoo,oihane/odoo,windedge/odoo,omprakasha/odoo,vnsofthe/odoo,funkring/fdoo,srsman/odoo,minhtuancn/odoo,highco-groupe/odoo,avoinsystems/odoo,JGarcia-Panach/odoo,rubencabrera/odoo,damdam-s/OpenUpgrade,mvaled/OpenUpgrade,draugiskisprendimai/odoo,elmerdpadilla/iv,jiangzhixiao/odoo,chiragjogi/odoo,dkubiak789/odoo,BT-ojossen/odoo,factorlibre/OCB,xujb/odoo,srimai/odoo,massot/odoo,florentx/OpenUpgrade,Drooids/odoo,sebalix/OpenUpgrade,deKupini/erp,xujb/odoo,srimai/odoo,wangjun/odoo,hubsaysnuaa/odoo,abdellatifkarroum/odoo,odootr/odoo,hmen89/odoo,takis/odoo,gsmartway/odoo,abstract-open-solutions/OCB,tinkhaven-organization/odoo,odootr/odoo,lgscofield/odoo,jeasoft/odoo,rahuldhote/odoo,CopeX/odoo,jpshort/odoo,datenbetrieb/odoo,tinkerthaler/odoo,BT-fgarbely/odoo,Danisan/odoo-1,tangyiyong/odoo,nagyistoce/odoo-dev-odoo,eino-makitalo/odoo,sysadminmatmoz/OCB,hopeall/odoo,sebalix/OpenUpgrade,ubic135/odoo-design,VitalPet/odoo,tvtsoft/odoo8,diagramsoftware/odoo,vnsofthe/odoo,Codefans-fan/odoo,ecosoft-odoo/odoo,gvb/odoo,jeasoft/odoo,PongPi/isl-odoo,shivam1111/odoo,PongPi/isl-odoo,Antiun/odoo,nuncjo/odoo,naousse/odoo,idncom/odoo,stonegithubs/odoo,fuhongliang/odoo,poljeff/odoo,fossoult/odoo,alhashash/odoo,VielSoft/odoo,hifly/OpenUpgrade,ovnicraft/odoo,MarcosCommunity/odoo,fuselock/odoo,PongPi/isl-odoo,agrista/odoo-saas,vrenaville/ngo-addons-backport,luistorresm/odoo,ChanduERP/odoo,waytai/odoo,abdellatifkarroum/odoo,mszewczy/odoo,apocalypsebg/odoo,nexiles/odoo,Maspear/odoo,eino-makitalo/odoo,incaser/odoo-odoo,BT-rmartin/odoo,alexteodor/odoo,rowemoore/odoo,luiseduardohdbackup/odoo,rgeleta/odoo,charbeljc/OCB,shingonoide/odoo,kybriainfotech/iSocioCRM,diagramsoftware/odoo,numerigraphe/odoo,avoinsystems/odoo,juanalfonsopr/odoo,hanicker/odoo,sebalix/OpenUpgrade,funkring/fdoo,numerigraphe/odoo,cloud9UG/odoo,n0m4dz/odoo,odooindia/odoo,eino-makitalo/odoo,vnsofthe/odoo,ehirt/odoo,bwrsandman/OpenUpgrade,osvalr/odoo,ramadhane/odoo,VielSoft/odoo,grap/OCB,nexiles/odoo,ehirt/odoo,Danisan/odoo-1,VielSoft/odoo,acshan/odoo,joariasl/odoo,tvtsoft/odoo8,takis/odoo,provaleks/o8,CatsAndDogsbvba/odoo,rowemoore/odoo,oasiswork/odoo,apanju/GMIO_Odoo,funkring/fdoo,feroda/odoo,kirca/OpenUpgrade,Kilhog/odoo,Gitlab11/odoo,luiseduardohdbackup/odoo,bguillot/OpenUpgrade,chiragjogi/odoo,datenbetrieb/odoo,stephen144/odoo,KontorConsulting/odoo,sve-odoo/odoo,x111ong/odoo,demon-ru/iml-crm,havt/odoo,alexteodor/odoo,ccomb/OpenUpgrade,ujjwalwahi/odoo,nuncjo/odoo,hip-odoo/odoo,alqfahad/odoo,shingonoide/odoo,grap/OCB,alhashash/odoo,savoirfairelinux/odoo,jusdng/odoo,jaxkodex/odoo,cloud9UG/odoo,christophlsa/odoo,thanhacun/odoo,janocat/odoo,dgzurita/odoo,joshuajan/odoo,ujjwalwahi/odoo,ovnicraft/odoo,osvalr/odoo,slevenhagen/odoo-npg,Ernesto99/odoo,windedge/odoo,hbrunn/OpenUpgrade,dalegregory/odoo,klunwebale/odoo,alexcuellar/odoo,massot/odoo,storm-computers/odoo,glovebx/odoo,numerigraphe/odoo,0k/odoo,luistorresm/odoo,rgeleta/odoo,ehirt/odoo,frouty/odoo_oph,synconics/odoo,ingadhoc/odoo,ramadhane/odoo,shingonoide/odoo,jiachenning/odoo,guewen/OpenUpgrade,takis/odoo,papouso/odoo,janocat/odoo,andreparames/odoo,lsinfo/odoo,SerpentCS/odoo,Ernesto99/odoo,xzYue/odoo,provaleks/o8,sv-dev1/odoo,shaufi/odoo,odooindia/odoo,Nowheresly/odoo,fuhongliang/odoo,oihane/odoo,mkieszek/odoo,feroda/odoo,fuselock/odoo,OpenPymeMx/OCB,factorlibre/OCB,VielSoft/odoo,guewen/OpenUpgrade,Drooids/odoo,goliveirab/odoo,odootr/odoo,CatsAndDogsbvba/odoo,nhomar/odoo,ThinkOpen-Solutions/odoo,synconics/odoo,jolevq/odoopub,CatsAndDogsbvba/odoo,nexiles/odoo,laslabs/odoo,BT-fgarbely/odoo,thanhacun/odoo,Endika/OpenUpgrade,ShineFan/odoo,tangyiyong/odoo,florian-dacosta/OpenUpgrade,dfang/odoo,sinbazhou/odoo,microcom/odoo,eino-makitalo/odoo,aviciimaxwell/odoo,nhomar/odoo,lombritz/odoo,oasiswork/odoo,oasiswork/odoo,sebalix/OpenUpgrade,n0m4dz/odoo,abenzbiria/clients_odoo,waytai/odoo,guerrerocarlos/odoo,shingonoide/odoo,bealdav/OpenUpgrade,abstract-open-solutions/OCB,vnsofthe/odoo,rubencabrera/odoo,Nick-OpusVL/odoo,pplatek/odoo,pplatek/odoo,shivam1111/odoo,mustafat/odoo-1,realsaiko/odoo,bkirui/odoo,incaser/odoo-odoo,n0m4dz/odoo,jpshort/odoo,gvb/odoo,waytai/odoo,Daniel-CA/odoo,guerrerocarlos/odoo,BT-rmartin/odoo,oihane/odoo,ramadhane/odoo,draugiskisprendimai/odoo,minhtuancn/odoo,camptocamp/ngo-addons-backport,eino-makitalo/odoo,Drooids/odoo,Bachaco-ve/odoo,gdgellatly/OCB1,simongoffin/website_version,storm-computers/odoo,sadleader/odoo,csrocha/OpenUpgrade,tarzan0820/odoo,gvb/odoo,Noviat/odoo,cedk/odoo,steedos/odoo,srsman/odoo,Grirrane/odoo,mszewczy/odoo,Grirrane/odoo,sinbazhou/odoo,QianBIG/odoo,fjbatresv/odoo,bkirui/odoo,JCA-Developpement/Odoo,takis/odoo,omprakasha/odoo,andreparames/odoo,fuhongliang/odoo,FlorianLudwig/odoo,Endika/OpenUpgrade,factorlibre/OCB,doomsterinc/odoo,Nowheresly/odoo,MarcosCommunity/odoo,alexcuellar/odoo,tarzan0820/odoo,JGarcia-Panach/odoo,OpenUpgrade-dev/OpenUpgrade,ingadhoc/odoo,janocat/odoo,markeTIC/OCB,collex100/odoo,acshan/odoo,colinnewell/odoo,xzYue/odoo,bakhtout/odoo-educ,SerpentCS/odoo,dalegregory/odoo,provaleks/o8,florentx/OpenUpgrade,BT-astauder/odoo,BT-rmartin/odoo,ihsanudin/odoo,srsman/odoo,nhomar/odoo,x111ong/odoo,alexteodor/odoo,janocat/odoo,pedrobaeza/OpenUpgrade,omprakasha/odoo,OpenUpgrade/OpenUpgrade,savoirfairelinux/OpenUpgrade,lightcn/odoo,ujjwalwahi/odoo,virgree/odoo,bobisme/odoo,dllsf/odootest,fgesora/odoo,joariasl/odoo,naousse/odoo,cpyou/odoo,deKupini/erp,bakhtout/odoo-educ,jolevq/odoopub,simongoffin/website_version,florian-dacosta/OpenUpgrade,thanhacun/odoo,bealdav/OpenUpgrade,rdeheele/odoo,Endika/odoo,apocalypsebg/odoo,TRESCLOUD/odoopub,syci/OCB,fevxie/odoo,takis/odoo,fjbatresv/odoo,charbeljc/OCB,pedrobaeza/odoo,lgscofield/odoo,ramitalat/odoo,Nowheresly/odoo,OpenUpgrade/OpenUpgrade,Noviat/odoo,x111ong/odoo,markeTIC/OCB,tvibliani/odoo,Kilhog/odoo,gvb/odoo,oasiswork/odoo,Maspear/odoo,nitinitprof/odoo,cedk/odoo,mvaled/OpenUpgrade,BT-rmartin/odoo,gavin-feng/odoo,csrocha/OpenUpgrade,OpenPymeMx/OCB,apanju/odoo,slevenhagen/odoo-npg,sv-dev1/odoo,minhtuancn/odoo,stephen144/odoo,sv-dev1/odoo,dariemp/odoo,leoliujie/odoo,Elico-Corp/odoo_OCB,sergio-incaser/odoo,Daniel-CA/odoo,OpusVL/odoo,tarzan0820/odoo,jeasoft/odoo,ChanduERP/odoo,ThinkOpen-Solutions/odoo,jusdng/odoo,juanalfonsopr/odoo,mkieszek/odoo,Nowheresly/odoo,massot/odoo,sysadminmatmoz/OCB,dfang/odoo,jiangzhixiao/odoo,camptocamp/ngo-addons-backport,ClearCorp-dev/odoo,nuuuboo/odoo,collex100/odoo,sve-odoo/odoo,lombritz/odoo,x111ong/odoo,ThinkOpen-Solutions/odoo,omprakasha/odoo,oasiswork/odoo,tinkerthaler/odoo,alexteodor/odoo,Daniel-CA/odoo,thanhacun/odoo,AuyaJackie/odoo,x111ong/odoo,demon-ru/iml-crm,arthru/OpenUpgrade,fdvarela/odoo8,Ichag/odoo,Antiun/odoo,kirca/OpenUpgrade,bplancher/odoo,stonegithubs/odoo,KontorConsulting/odoo,ujjwalwahi/odoo,syci/OCB,inspyration/odoo,kybriainfotech/iSocioCRM,abenzbiria/clients_odoo,doomsterinc/odoo,ShineFan/odoo,0k/odoo,Elico-Corp/odoo_OCB,grap/OpenUpgrade,datenbetrieb/odoo,hmen89/odoo,OpusVL/odoo,JGarcia-Panach/odoo,matrixise/odoo,Nowheresly/odoo,Endika/odoo,ecosoft-odoo/odoo,lsinfo/odoo,zchking/odoo,credativUK/OCB,SAM-IT-SA/odoo,ihsanudin/odoo,syci/OCB,nitinitprof/odoo,Codefans-fan/odoo,grap/OpenUpgrade,cysnake4713/odoo,nagyistoce/odoo-dev-odoo,brijeshkesariya/odoo,zchking/odoo,alhashash/odoo,lgscofield/odoo,jfpla/odoo,klunwebale/odoo,cloud9UG/odoo,oihane/odoo,javierTerry/odoo,ubic135/odoo-design,rschnapka/odoo,deKupini/erp,OpenUpgrade/OpenUpgrade,odoousers2014/odoo,jesramirez/odoo,tangyiyong/odoo,microcom/odoo,florentx/OpenUpgrade,dalegregory/odoo,fevxie/odoo,tangyiyong/odoo,dsfsdgsbngfggb/odoo,odoo-turkiye/odoo,tarzan0820/odoo,stephen144/odoo,joshuajan/odoo,JonathanStein/odoo,stonegithubs/odoo,sve-odoo/odoo,charbeljc/OCB,Maspear/odoo,omprakasha/odoo,kittiu/odoo,ojengwa/odoo,odootr/odoo,nhomar/odoo-mirror,abstract-open-solutions/OCB,PongPi/isl-odoo,lgscofield/odoo,chiragjogi/odoo,salaria/odoo,Adel-Magebinary/odoo,stonegithubs/odoo,numerigraphe/odoo,gavin-feng/odoo,arthru/OpenUpgrade,javierTerry/odoo,dariemp/odoo,prospwro/odoo,klunwebale/odoo,alqfahad/odoo,rdeheele/odoo,odoo-turkiye/odoo,patmcb/odoo,juanalfonsopr/odoo,florian-dacosta/OpenUpgrade,stonegithubs/odoo,alhashash/odoo,nuncjo/odoo,savoirfairelinux/OpenUpgrade,kirca/OpenUpgrade,juanalfonsopr/odoo,BT-astauder/odoo,shingonoide/odoo,ramadhane/odoo,realsaiko/odoo,dsfsdgsbngfggb/odoo,ThinkOpen-Solutions/odoo,damdam-s/OpenUpgrade,shaufi/odoo,jesramirez/odoo,PongPi/isl-odoo,fevxie/odoo,gdgellatly/OCB1,dsfsdgsbngfggb/odoo,JonathanStein/odoo,feroda/odoo,codekaki/odoo,lombritz/odoo,NL66278/OCB,erkrishna9/odoo,ehirt/odoo,alqfahad/odoo,goliveirab/odoo,windedge/odoo,tangyiyong/odoo,nexiles/odoo,Endika/odoo,nhomar/odoo-mirror,nitinitprof/odoo,Kilhog/odoo,hanicker/odoo,Nick-OpusVL/odoo,Codefans-fan/odoo,mlaitinen/odoo,leorochael/odoo,Noviat/odoo,gsmartway/odoo,BT-ojossen/odoo,tangyiyong/odoo,fgesora/odoo,dfang/odoo,christophlsa/odoo,ClearCorp-dev/odoo,laslabs/odoo,gsmartway/odoo,funkring/fdoo,shaufi/odoo,jiangzhixiao/odoo,hassoon3/odoo,apanju/GMIO_Odoo,fgesora/odoo,BT-ojossen/odoo,makinacorpus/odoo,aviciimaxwell/odoo,CopeX/odoo,incaser/odoo-odoo,ujjwalwahi/odoo,frouty/odoo_oph,ccomb/OpenUpgrade,optima-ict/odoo,idncom/odoo,Antiun/odoo,hopeall/odoo,stonegithubs/odoo,nitinitprof/odoo,Gitlab11/odoo,MarcosCommunity/odoo,jaxkodex/odoo,SAM-IT-SA/odoo,apocalypsebg/odoo,ApuliaSoftware/odoo,tarzan0820/odoo,ThinkOpen-Solutions/odoo,draugiskisprendimai/odoo,vrenaville/ngo-addons-backport,damdam-s/OpenUpgrade,abdellatifkarroum/odoo,fuselock/odoo,addition-it-solutions/project-all,hbrunn/OpenUpgrade,Eric-Zhong/odoo,agrista/odoo-saas,bwrsandman/OpenUpgrade,jaxkodex/odoo,bakhtout/odoo-educ,minhtuancn/odoo,gorjuce/odoo,0k/OpenUpgrade,bobisme/odoo,Adel-Magebinary/odoo,bguillot/OpenUpgrade,abenzbiria/clients_odoo,lombritz/odoo,SerpentCS/odoo,shingonoide/odoo,Ichag/odoo,leorochael/odoo,kirca/OpenUpgrade,pedrobaeza/OpenUpgrade,minhtuancn/odoo,ujjwalwahi/odoo,salaria/odoo,shaufi10/odoo,joariasl/odoo,nagyistoce/odoo-dev-odoo,VitalPet/odoo,zchking/odoo,mvaled/OpenUpgrade,ovnicraft/odoo,havt/odoo,CubicERP/odoo,bkirui/odoo,florian-dacosta/OpenUpgrade,joshuajan/odoo,CubicERP/odoo,hifly/OpenUpgrade,ramadhane/odoo,hoatle/odoo,frouty/odoogoeen,odootr/odoo,alqfahad/odoo,oliverhr/odoo,grap/OpenUpgrade,spadae22/odoo,grap/OCB,grap/OCB,demon-ru/iml-crm,havt/odoo,GauravSahu/odoo,naousse/odoo,simongoffin/website_version,lightcn/odoo,savoirfairelinux/odoo,codekaki/odoo,mmbtba/odoo,xzYue/odoo,simongoffin/website_version,savoirfairelinux/OpenUpgrade,ShineFan/odoo,abstract-open-solutions/OCB,dfang/odoo,vrenaville/ngo-addons-backport,ChanduERP/odoo,makinacorpus/odoo,fuselock/odoo,NeovaHealth/odoo,steedos/odoo,OSSESAC/odoopubarquiluz,matrixise/odoo,Kilhog/odoo,fdvarela/odoo8,JCA-Developpement/Odoo,deKupini/erp,slevenhagen/odoo-npg,steedos/odoo,bkirui/odoo,leoliujie/odoo,oliverhr/odoo,xzYue/odoo,JGarcia-Panach/odoo,papouso/odoo,eino-makitalo/odoo,oliverhr/odoo,provaleks/o8,dezynetechnologies/odoo,mlaitinen/odoo,incaser/odoo-odoo,mszewczy/odoo,xzYue/odoo,BT-ojossen/odoo,thanhacun/odoo,joariasl/odoo,VitalPet/odoo,juanalfonsopr/odoo,lombritz/odoo,prospwro/odoo,OpenPymeMx/OCB,hmen89/odoo,sinbazhou/odoo,nexiles/odoo,zchking/odoo,javierTerry/odoo,tinkerthaler/odoo,kirca/OpenUpgrade,rowemoore/odoo,jiangzhixiao/odoo,FlorianLudwig/odoo,xzYue/odoo,jfpla/odoo,tinkerthaler/odoo,Antiun/odoo,factorlibre/OCB,janocat/odoo,kybriainfotech/iSocioCRM,hopeall/odoo,brijeshkesariya/odoo,rubencabrera/odoo,tinkhaven-organization/odoo,bobisme/odoo,nuncjo/odoo,pplatek/odoo,hubsaysnuaa/odoo,nuuuboo/odoo,fossoult/odoo,codekaki/odoo,SAM-IT-SA/odoo,SAM-IT-SA/odoo,pedrobaeza/odoo,grap/OCB,jiachenning/odoo,ovnicraft/odoo,matrixise/odoo,storm-computers/odoo,frouty/odoogoeen,naousse/odoo,kybriainfotech/iSocioCRM,salaria/odoo,markeTIC/OCB,Endika/odoo,ygol/odoo,Danisan/odoo-1,Ernesto99/odoo,slevenhagen/odoo-npg,draugiskisprendimai/odoo,gdgellatly/OCB1,slevenhagen/odoo,alhashash/odoo,hbrunn/OpenUpgrade,bguillot/OpenUpgrade,odooindia/odoo,lombritz/odoo,prospwro/odoo,glovebx/odoo,camptocamp/ngo-addons-backport,Endika/OpenUpgrade,nexiles/odoo,apanju/GMIO_Odoo,goliveirab/odoo,ramitalat/odoo,BT-astauder/odoo,luistorresm/odoo,Drooids/odoo,virgree/odoo,gorjuce/odoo,joshuajan/odoo,syci/OCB,bplancher/odoo,rschnapka/odoo,gdgellatly/OCB1,jusdng/odoo,avoinsystems/odoo,ujjwalwahi/odoo,rschnapka/odoo,nagyistoce/odoo-dev-odoo,fossoult/odoo,OSSESAC/odoopubarquiluz,lightcn/odoo,factorlibre/OCB,guerrerocarlos/odoo,OSSESAC/odoopubarquiluz,Drooids/odoo,tangyiyong/odoo,apanju/odoo,javierTerry/odoo,VitalPet/odoo,shaufi/odoo,Nick-OpusVL/odoo,ClearCorp-dev/odoo,ApuliaSoftware/odoo,apanju/GMIO_Odoo,mszewczy/odoo,pplatek/odoo,Daniel-CA/odoo,Eric-Zhong/odoo,vnsofthe/odoo,FlorianLudwig/odoo,acshan/odoo,glovebx/odoo,TRESCLOUD/odoopub,apanju/odoo,damdam-s/OpenUpgrade,chiragjogi/odoo,tinkhaven-organization/odoo,nhomar/odoo,SAM-IT-SA/odoo,pedrobaeza/OpenUpgrade,mszewczy/odoo,Grirrane/odoo,charbeljc/OCB,Noviat/odoo,NL66278/OCB,VielSoft/odoo,frouty/odoo_oph,papouso/odoo,camptocamp/ngo-addons-backport,ingadhoc/odoo,hifly/OpenUpgrade,rubencabrera/odoo,christophlsa/odoo,CubicERP/odoo,fuselock/odoo,leoliujie/odoo,camptocamp/ngo-addons-backport,papouso/odoo,jiachenning/odoo,BT-fgarbely/odoo,pplatek/odoo,damdam-s/OpenUpgrade,doomsterinc/odoo,NL66278/OCB,patmcb/odoo,OpenUpgrade-dev/OpenUpgrade,vrenaville/ngo-addons-backport,slevenhagen/odoo,Danisan/odoo-1,shivam1111/odoo,andreparames/odoo,CopeX/odoo,aviciimaxwell/odoo,Endika/OpenUpgrade,Endika/odoo,dariemp/odoo,n0m4dz/odoo,rowemoore/odoo,Bachaco-ve/odoo,gdgellatly/OCB1,christophlsa/odoo,havt/odoo,bobisme/odoo,jolevq/odoopub,feroda/odoo,Nick-OpusVL/odoo,tinkerthaler/odoo,ramadhane/odoo,nhomar/odoo,nitinitprof/odoo,luistorresm/odoo,funkring/fdoo,steedos/odoo,ApuliaSoftware/odoo,FlorianLudwig/odoo,fuselock/odoo,guerrerocarlos/odoo,joariasl/odoo,BT-fgarbely/odoo,markeTIC/OCB,tvtsoft/odoo8,VielSoft/odoo,glovebx/odoo,lsinfo/odoo,luistorresm/odoo,ihsanudin/odoo,frouty/odoogoeen,Adel-Magebinary/odoo,Codefans-fan/odoo,leorochael/odoo,kittiu/odoo,bplancher/odoo,microcom/odoo,cloud9UG/odoo,sergio-incaser/odoo,jaxkodex/odoo,pedrobaeza/OpenUpgrade,pplatek/odoo,rahuldhote/odoo,jeasoft/odoo,shaufi/odoo,NeovaHealth/odoo,feroda/odoo,zchking/odoo,grap/OpenUpgrade,ehirt/odoo,optima-ict/odoo,oasiswork/odoo,bwrsandman/OpenUpgrade,Nick-OpusVL/odoo,shivam1111/odoo,gavin-feng/odoo,rgeleta/odoo,savoirfairelinux/odoo,fgesora/odoo,jaxkodex/odoo,camptocamp/ngo-addons-backport,ApuliaSoftware/odoo,lightcn/odoo,erkrishna9/odoo,GauravSahu/odoo,bakhtout/odoo-educ,jpshort/odoo,camptocamp/ngo-addons-backport,ubic135/odoo-design,rdeheele/odoo,dezynetechnologies/odoo,GauravSahu/odoo,provaleks/o8,ClearCorp-dev/odoo,dsfsdgsbngfggb/odoo,bkirui/odoo,lgscofield/odoo,MarcosCommunity/odoo,fuhongliang/odoo,ingadhoc/odoo,apanju/odoo,dllsf/odootest,patmcb/odoo,optima-ict/odoo,OpenPymeMx/OCB,florian-dacosta/OpenUpgrade,nuuuboo/odoo,simongoffin/website_version,jiangzhixiao/odoo,fevxie/odoo,leorochael/odoo,agrista/odoo-saas,ojengwa/odoo,zchking/odoo,highco-groupe/odoo,gvb/odoo,fdvarela/odoo8,n0m4dz/odoo,fuhongliang/odoo,eino-makitalo/odoo,microcom/odoo,mlaitinen/odoo,dariemp/odoo,realsaiko/odoo,dfang/odoo,shaufi10/odoo,inspyration/odoo,MarcosCommunity/odoo,jolevq/odoopub,thanhacun/odoo,andreparames/odoo,tinkerthaler/odoo,bakhtout/odoo-educ,takis/odoo,TRESCLOUD/odoopub,bakhtout/odoo-educ,ehirt/odoo,JonathanStein/odoo,hanicker/odoo,codekaki/odoo,dariemp/odoo,Drooids/odoo,Adel-Magebinary/odoo,highco-groupe/odoo,Elico-Corp/odoo_OCB,hubsaysnuaa/odoo,vrenaville/ngo-addons-backport,blaggacao/OpenUpgrade,tarzan0820/odoo,kittiu/odoo,sergio-incaser/odoo,ShineFan/odoo,grap/OpenUpgrade,laslabs/odoo,ovnicraft/odoo,oliverhr/odoo,ihsanudin/odoo,odoo-turkiye/odoo,KontorConsulting/odoo,waytai/odoo,spadae22/odoo,0k/OpenUpgrade,BT-fgarbely/odoo,xujb/odoo,agrista/odoo-saas,brijeshkesariya/odoo,dalegregory/odoo,sebalix/OpenUpgrade,bealdav/OpenUpgrade,spadae22/odoo,ovnicraft/odoo,srsman/odoo,oliverhr/odoo,CatsAndDogsbvba/odoo,Ernesto99/odoo,ramitalat/odoo,dkubiak789/odoo,cloud9UG/odoo,slevenhagen/odoo,mmbtba/odoo,ojengwa/odoo,prospwro/odoo,SerpentCS/odoo,gsmartway/odoo,diagramsoftware/odoo,brijeshkesariya/odoo,SerpentCS/odoo,dezynetechnologies/odoo,waytai/odoo,ygol/odoo,Bachaco-ve/odoo,frouty/odoogoeen,hanicker/odoo,bguillot/OpenUpgrade,blaggacao/OpenUpgrade,ramitalat/odoo,kybriainfotech/iSocioCRM,nuncjo/odoo,markeTIC/OCB,lgscofield/odoo,florentx/OpenUpgrade,dllsf/odootest,idncom/odoo,jaxkodex/odoo,sv-dev1/odoo,windedge/odoo,hmen89/odoo,andreparames/odoo,QianBIG/odoo,vrenaville/ngo-addons-backport,Nick-OpusVL/odoo,gvb/odoo,RafaelTorrealba/odoo,dariemp/odoo,ecosoft-odoo/odoo,osvalr/odoo,fjbatresv/odoo,savoirfairelinux/odoo,sysadminmatmoz/OCB,ChanduERP/odoo,OSSESAC/odoopubarquiluz,blaggacao/OpenUpgrade,cdrooom/odoo,diagramsoftware/odoo,guewen/OpenUpgrade,tvibliani/odoo,pplatek/odoo,FlorianLudwig/odoo,rdeheele/odoo,jpshort/odoo,Gitlab11/odoo,kifcaliph/odoo,JCA-Developpement/Odoo,slevenhagen/odoo,RafaelTorrealba/odoo,bplancher/odoo,virgree/odoo,mlaitinen/odoo,mkieszek/odoo,bplancher/odoo,nitinitprof/odoo,poljeff/odoo,Eric-Zhong/odoo,frouty/odoo_oph,gorjuce/odoo,tarzan0820/odoo,ShineFan/odoo,sv-dev1/odoo,jeasoft/odoo,hopeall/odoo,VitalPet/odoo,ygol/odoo,credativUK/OCB,NL66278/OCB,CopeX/odoo,provaleks/o8,JGarcia-Panach/odoo,salaria/odoo,OpenPymeMx/OCB,storm-computers/odoo,hifly/OpenUpgrade,blaggacao/OpenUpgrade,ChanduERP/odoo,idncom/odoo,fossoult/odoo,cpyou/odoo,klunwebale/odoo,ChanduERP/odoo,Nowheresly/odoo,ccomb/OpenUpgrade,Eric-Zhong/odoo,funkring/fdoo,ramitalat/odoo,luiseduardohdbackup/odoo,doomsterinc/odoo,KontorConsulting/odoo,spadae22/odoo,aviciimaxwell/odoo,ojengwa/odoo,arthru/OpenUpgrade,0k/OpenUpgrade,incaser/odoo-odoo,Endika/OpenUpgrade,dalegregory/odoo,collex100/odoo,havt/odoo,SAM-IT-SA/odoo,hopeall/odoo,abdellatifkarroum/odoo,OpenUpgrade/OpenUpgrade,omprakasha/odoo,shaufi10/odoo,Maspear/odoo,damdam-s/OpenUpgrade,luistorresm/odoo,tvibliani/odoo,luiseduardohdbackup/odoo,nagyistoce/odoo-dev-odoo,makinacorpus/odoo,rahuldhote/odoo,lightcn/odoo,grap/OpenUpgrade,papouso/odoo,nitinitprof/odoo,apocalypsebg/odoo,factorlibre/OCB,gorjuce/odoo,csrocha/OpenUpgrade,christophlsa/odoo,oasiswork/odoo,demon-ru/iml-crm,OpenPymeMx/OCB,nhomar/odoo-mirror,mkieszek/odoo,ecosoft-odoo/odoo,makinacorpus/odoo,sv-dev1/odoo,nexiles/odoo,rowemoore/odoo,arthru/OpenUpgrade,0k/odoo,lombritz/odoo,hassoon3/odoo,hanicker/odoo,Grirrane/odoo,TRESCLOUD/odoopub,odoousers2014/odoo,savoirfairelinux/OpenUpgrade,fjbatresv/odoo,dezynetechnologies/odoo,sebalix/OpenUpgrade,massot/odoo,rschnapka/odoo,poljeff/odoo,bwrsandman/OpenUpgrade,acshan/odoo,omprakasha/odoo,dkubiak789/odoo,jolevq/odoopub,shivam1111/odoo,jesramirez/odoo,Ernesto99/odoo,nagyistoce/odoo-dev-odoo,tinkhaven-organization/odoo,Danisan/odoo-1,FlorianLudwig/odoo,hip-odoo/odoo,joshuajan/odoo,Gitlab11/odoo,cloud9UG/odoo,CopeX/odoo,sadleader/odoo,tvtsoft/odoo8,pedrobaeza/OpenUpgrade,pedrobaeza/odoo,rubencabrera/odoo,abdellatifkarroum/odoo,rschnapka/odoo,camptocamp/ngo-addons-backport,fgesora/odoo,jpshort/odoo,shingonoide/odoo,OpusVL/odoo,hassoon3/odoo,Eric-Zhong/odoo,apocalypsebg/odoo,rahuldhote/odoo,Ichag/odoo,fjbatresv/odoo,jpshort/odoo,sinbazhou/odoo,QianBIG/odoo,makinacorpus/odoo,datenbetrieb/odoo,shaufi/odoo,mustafat/odoo-1,sinbazhou/odoo,JGarcia-Panach/odoo,csrocha/OpenUpgrade,GauravSahu/odoo,OpenUpgrade-dev/OpenUpgrade,kifcaliph/odoo,fjbatresv/odoo,frouty/odoo_oph,microcom/odoo,dgzurita/odoo,cedk/odoo,ygol/odoo,nuuuboo/odoo,kirca/OpenUpgrade,andreparames/odoo,CubicERP/odoo,realsaiko/odoo,OpenUpgrade/OpenUpgrade,sebalix/OpenUpgrade,grap/OCB,hoatle/odoo,NeovaHealth/odoo,spadae22/odoo,ChanduERP/odoo,diagramsoftware/odoo,mvaled/OpenUpgrade,poljeff/odoo,bkirui/odoo,mszewczy/odoo,bguillot/OpenUpgrade,VitalPet/odoo,numerigraphe/odoo,hip-odoo/odoo,BT-rmartin/odoo,prospwro/odoo,feroda/odoo,ccomb/OpenUpgrade,wangjun/odoo,matrixise/odoo,sysadminmatmoz/OCB,rgeleta/odoo,alexcuellar/odoo,slevenhagen/odoo-npg,dkubiak789/odoo,Danisan/odoo-1,hubsaysnuaa/odoo,credativUK/OCB,tvtsoft/odoo8,ApuliaSoftware/odoo,wangjun/odoo,addition-it-solutions/project-all,numerigraphe/odoo,apanju/odoo,odoousers2014/odoo,TRESCLOUD/odoopub,acshan/odoo,incaser/odoo-odoo,gdgellatly/OCB1,luistorresm/odoo,ClearCorp-dev/odoo,wangjun/odoo,0k/odoo,apanju/GMIO_Odoo,shaufi/odoo,hip-odoo/odoo,dgzurita/odoo,NeovaHealth/odoo,cedk/odoo,dllsf/odootest,nhomar/odoo-mirror,colinnewell/odoo,credativUK/OCB,bkirui/odoo,srimai/odoo,NeovaHealth/odoo,Ichag/odoo,Kilhog/odoo,thanhacun/odoo,realsaiko/odoo,glovebx/odoo,charbeljc/OCB,elmerdpadilla/iv,andreparames/odoo,oihane/odoo,AuyaJackie/odoo,lsinfo/odoo,guerrerocarlos/odoo,jfpla/odoo,naousse/odoo,hanicker/odoo,jiachenning/odoo,VitalPet/odoo,cedk/odoo,QianBIG/odoo,elmerdpadilla/iv,tvibliani/odoo,mvaled/OpenUpgrade,cysnake4713/odoo,ramadhane/odoo,VitalPet/odoo,sv-dev1/odoo,fuselock/odoo,0k/odoo,cpyou/odoo,laslabs/odoo,jusdng/odoo,minhtuancn/odoo,sysadminmatmoz/OCB,draugiskisprendimai/odoo,srsman/odoo,Ichag/odoo,feroda/odoo,dezynetechnologies/odoo,papouso/odoo,ApuliaSoftware/odoo,rowemoore/odoo,luiseduardohdbackup/odoo,optima-ict/odoo,avoinsystems/odoo,tinkerthaler/odoo,nagyistoce/odoo-dev-odoo,mmbtba/odoo,patmcb/odoo,collex100/odoo,hassoon3/odoo,jusdng/odoo,markeTIC/OCB,slevenhagen/odoo-npg,gvb/odoo,colinnewell/odoo,BT-ojossen/odoo,dezynetechnologies/odoo,arthru/OpenUpgrade,Bachaco-ve/odoo,tvibliani/odoo,hoatle/odoo,BT-rmartin/odoo,abstract-open-solutions/OCB,optima-ict/odoo,nhomar/odoo-mirror,diagramsoftware/odoo,slevenhagen/odoo-npg,hubsaysnuaa/odoo,collex100/odoo,abstract-open-solutions/OCB,glovebx/odoo,damdam-s/OpenUpgrade,makinacorpus/odoo,RafaelTorrealba/odoo,x111ong/odoo,stephen144/odoo,gavin-feng/odoo,abdellatifkarroum/odoo,charbeljc/OCB,OSSESAC/odoopubarquiluz,QianBIG/odoo,javierTerry/odoo,JCA-Developpement/Odoo,gorjuce/odoo,cdrooom/odoo,collex100/odoo,gdgellatly/OCB1,addition-it-solutions/project-all,kifcaliph/odoo,OpenUpgrade/OpenUpgrade,n0m4dz/odoo,frouty/odoogoeen,windedge/odoo,bwrsandman/OpenUpgrade,hifly/OpenUpgrade,dgzurita/odoo,lightcn/odoo,javierTerry/odoo,MarcosCommunity/odoo,Maspear/odoo,SAM-IT-SA/odoo,jesramirez/odoo,sve-odoo/odoo,ThinkOpen-Solutions/odoo,doomsterinc/odoo,CubicERP/odoo,sysadminmatmoz/OCB,kifcaliph/odoo,alexcuellar/odoo,xujb/odoo,KontorConsulting/odoo,christophlsa/odoo,dsfsdgsbngfggb/odoo,cloud9UG/odoo,Eric-Zhong/odoo,Bachaco-ve/odoo,RafaelTorrealba/odoo,provaleks/o8,rubencabrera/odoo,jeasoft/odoo,bobisme/odoo,slevenhagen/odoo,ojengwa/odoo,OpenPymeMx/OCB,stephen144/odoo,Daniel-CA/odoo,mmbtba/odoo,CatsAndDogsbvba/odoo,Endika/odoo,SerpentCS/odoo,highco-groupe/odoo,matrixise/odoo,chiragjogi/odoo,deKupini/erp,apanju/GMIO_Odoo,ubic135/odoo-design,aviciimaxwell/odoo,Noviat/odoo,oliverhr/odoo,erkrishna9/odoo,tvtsoft/odoo8,alqfahad/odoo,hanicker/odoo,mszewczy/odoo,CatsAndDogsbvba/odoo,mkieszek/odoo,Gitlab11/odoo,dfang/odoo,javierTerry/odoo,Elico-Corp/odoo_OCB,bplancher/odoo,mlaitinen/odoo,ThinkOpen-Solutions/odoo,chiragjogi/odoo,funkring/fdoo,vnsofthe/odoo,srimai/odoo,windedge/odoo,0k/OpenUpgrade,sadleader/odoo,frouty/odoogoeen,abenzbiria/clients_odoo,sve-odoo/odoo,cysnake4713/odoo,PongPi/isl-odoo,goliveirab/odoo,datenbetrieb/odoo,inspyration/odoo,rschnapka/odoo,naousse/odoo,Gitlab11/odoo,Kilhog/odoo,bobisme/odoo,KontorConsulting/odoo,fjbatresv/odoo,JonathanStein/odoo,MarcosCommunity/odoo,waytai/odoo,lgscofield/odoo,OpenPymeMx/OCB,odootr/odoo,sysadminmatmoz/OCB,doomsterinc/odoo,incaser/odoo-odoo,avoinsystems/odoo,synconics/odoo,dllsf/odootest,Noviat/odoo,jiangzhixiao/odoo,optima-ict/odoo,Maspear/odoo,pedrobaeza/odoo,Danisan/odoo-1,GauravSahu/odoo,guerrerocarlos/odoo,bwrsandman/OpenUpgrade,jiachenning/odoo,mustafat/odoo-1,massot/odoo,hbrunn/OpenUpgrade,jiachenning/odoo,hmen89/odoo,grap/OpenUpgrade,tinkhaven-organization/odoo,colinnewell/odoo,ccomb/OpenUpgrade,virgree/odoo,rdeheele/odoo,janocat/odoo,cysnake4713/odoo,fdvarela/odoo8,kybriainfotech/iSocioCRM,nhomar/odoo,grap/OCB,RafaelTorrealba/odoo,poljeff/odoo,kybriainfotech/iSocioCRM,ehirt/odoo,Grirrane/odoo,Endika/OpenUpgrade,erkrishna9/odoo,oihane/odoo,draugiskisprendimai/odoo,aviciimaxwell/odoo,alqfahad/odoo,gavin-feng/odoo,mlaitinen/odoo,glovebx/odoo,apanju/odoo,cdrooom/odoo,Codefans-fan/odoo,Eric-Zhong/odoo,klunwebale/odoo,steedos/odoo,lsinfo/odoo,Codefans-fan/odoo,fossoult/odoo,apanju/odoo,charbeljc/OCB,sinbazhou/odoo,cysnake4713/odoo,blaggacao/OpenUpgrade,synconics/odoo,klunwebale/odoo,storm-computers/odoo,florentx/OpenUpgrade,jpshort/odoo,inspyration/odoo,alqfahad/odoo,BT-fgarbely/odoo,virgree/odoo,x111ong/odoo,apocalypsebg/odoo,Antiun/odoo,shivam1111/odoo,dgzurita/odoo,savoirfairelinux/odoo,leorochael/odoo,gdgellatly/OCB1,idncom/odoo,wangjun/odoo,xzYue/odoo,hifly/OpenUpgrade,fossoult/odoo,hoatle/odoo,dsfsdgsbngfggb/odoo,hoatle/odoo,alexcuellar/odoo,OpenUpgrade/OpenUpgrade,arthru/OpenUpgrade,odoo-turkiye/odoo,minhtuancn/odoo,xujb/odoo,PongPi/isl-odoo,ccomb/OpenUpgrade,mustafat/odoo-1,Adel-Magebinary/odoo,windedge/odoo,cpyou/odoo,BT-fgarbely/odoo,Bachaco-ve/odoo,ygol/odoo,hassoon3/odoo,ihsanudin/odoo,Codefans-fan/odoo,JGarcia-Panach/odoo,odooindia/odoo,acshan/odoo,rgeleta/odoo,shaufi10/odoo,gorjuce/odoo,osvalr/odoo,chiragjogi/odoo,osvalr/odoo,odootr/odoo,Gitlab11/odoo,bobisme/odoo,gavin-feng/odoo,sadleader/odoo,ojengwa/odoo,goliveirab/odoo,ihsanudin/odoo,mustafat/odoo-1,Ernesto99/odoo,bealdav/OpenUpgrade,vrenaville/ngo-addons-backport,OpenUpgrade-dev/OpenUpgrade,abstract-open-solutions/OCB,sinbazhou/odoo,RafaelTorrealba/odoo,Nowheresly/odoo,mmbtba/odoo,lightcn/odoo,Antiun/odoo,jusdng/odoo,virgree/odoo,colinnewell/odoo,bealdav/OpenUpgrade,spadae22/odoo,klunwebale/odoo,fossoult/odoo,CatsAndDogsbvba/odoo,ingadhoc/odoo,odoousers2014/odoo,fevxie/odoo,Maspear/odoo,credativUK/OCB,tinkhaven-organization/odoo,addition-it-solutions/project-all,hip-odoo/odoo,codekaki/odoo,Noviat/odoo,tvibliani/odoo,colinnewell/odoo,elmerdpadilla/iv,srimai/odoo,kittiu/odoo,prospwro/odoo,srimai/odoo,Antiun/odoo,ecosoft-odoo/odoo,Adel-Magebinary/odoo,abenzbiria/clients_odoo,fgesora/odoo,gsmartway/odoo,bguillot/OpenUpgrade,bealdav/OpenUpgrade,mmbtba/odoo,wangjun/odoo,BT-astauder/odoo,gsmartway/odoo,Ernesto99/odoo,havt/odoo,ojengwa/odoo,fgesora/odoo,avoinsystems/odoo,synconics/odoo,ApuliaSoftware/odoo,shaufi10/odoo,lsinfo/odoo,jaxkodex/odoo,makinacorpus/odoo,NL66278/OCB,odoo-turkiye/odoo,dariemp/odoo,apanju/GMIO_Odoo,steedos/odoo,odoo-turkiye/odoo,BT-ojossen/odoo,leorochael/odoo,codekaki/odoo,leoliujie/odoo,guewen/OpenUpgrade,agrista/odoo-saas,hifly/OpenUpgrade,virgree/odoo,florentx/OpenUpgrade,syci/OCB,hubsaysnuaa/odoo,hoatle/odoo,dgzurita/odoo,OSSESAC/odoopubarquiluz,rschnapka/odoo,cedk/odoo,kittiu/odoo,mkieszek/odoo,colinnewell/odoo,Nick-OpusVL/odoo,jiangzhixiao/odoo,mlaitinen/odoo,apocalypsebg/odoo,slevenhagen/odoo,jfpla/odoo,kirca/OpenUpgrade,nuuuboo/odoo,CopeX/odoo,guewen/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,prospwro/odoo,addition-it-solutions/project-all,brijeshkesariya/odoo,odoousers2014/odoo,Kilhog/odoo,xujb/odoo,gavin-feng/odoo,rubencabrera/odoo,BT-astauder/odoo,sergio-incaser/odoo,synconics/odoo,nuuuboo/odoo,salaria/odoo,factorlibre/OCB,dkubiak789/odoo,datenbetrieb/odoo,spadae22/odoo,markeTIC/OCB,sergio-incaser/odoo,slevenhagen/odoo,juanalfonsopr/odoo,fuhongliang/odoo,aviciimaxwell/odoo,dalegregory/odoo,highco-groupe/odoo,goliveirab/odoo,CubicERP/odoo,Endika/OpenUpgrade,dkubiak789/odoo,Ichag/odoo,leoliujie/odoo,ingadhoc/odoo,hopeall/odoo,hip-odoo/odoo,Elico-Corp/odoo_OCB,hbrunn/OpenUpgrade,leoliujie/odoo,BT-rmartin/odoo,rschnapka/odoo,oliverhr/odoo,dsfsdgsbngfggb/odoo,xujb/odoo,acshan/odoo,csrocha/OpenUpgrade,frouty/odoogoeen,salaria/odoo,luiseduardohdbackup/odoo,codekaki/odoo,ecosoft-odoo/odoo,shaufi10/odoo,leoliujie/odoo,mustafat/odoo-1,hassoon3/odoo,hbrunn/OpenUpgrade,guewen/OpenUpgrade,ovnicraft/odoo,sergio-incaser/odoo,fevxie/odoo,OpusVL/odoo,pedrobaeza/odoo,pedrobaeza/OpenUpgrade,takis/odoo,odooindia/odoo,CopeX/odoo,diagramsoftware/odoo,Daniel-CA/odoo,papouso/odoo,patmcb/odoo,joariasl/odoo,draugiskisprendimai/odoo,Daniel-CA/odoo,patmcb/odoo,shaufi10/odoo,nuncjo/odoo,AuyaJackie/odoo,wangjun/odoo,frouty/odoo_oph,grap/OCB,mvaled/OpenUpgrade,hopeall/odoo,jusdng/odoo,brijeshkesariya/odoo,ShineFan/odoo,pedrobaeza/OpenUpgrade,zchking/odoo,elmerdpadilla/iv,shivam1111/odoo,KontorConsulting/odoo,poljeff/odoo,srimai/odoo,goliveirab/odoo,hubsaysnuaa/odoo,idncom/odoo,jesramirez/odoo,credativUK/OCB,syci/OCB,kifcaliph/odoo,janocat/odoo,nuncjo/odoo,gsmartway/odoo,srsman/odoo,AuyaJackie/odoo,oihane/odoo,bwrsandman/OpenUpgrade,waytai/odoo,poljeff/odoo,avoinsystems/odoo,luiseduardohdbackup/odoo,guewen/OpenUpgrade,lsinfo/odoo,NeovaHealth/odoo,AuyaJackie/odoo,datenbetrieb/odoo,Bachaco-ve/odoo,stonegithubs/odoo,Grirrane/odoo,christophlsa/odoo,AuyaJackie/odoo,sadleader/odoo,erkrishna9/odoo,ecosoft-odoo/odoo,dalegregory/odoo,srsman/odoo,csrocha/OpenUpgrade,Elico-Corp/odoo_OCB,addition-it-solutions/project-all,rahuldhote/odoo,VielSoft/odoo,GauravSahu/odoo,idncom/odoo,jeasoft/odoo,rahuldhote/odoo,AuyaJackie/odoo,vrenaville/ngo-addons-backport,cpyou/odoo,codekaki/odoo,collex100/odoo,rowemoore/odoo,MarcosCommunity/odoo,fevxie/odoo,patmcb/odoo,frouty/odoogoeen,Adel-Magebinary/odoo,demon-ru/iml-crm,ygol/odoo,leorochael/odoo,osvalr/odoo,OpenUpgrade-dev/OpenUpgrade
addons/event_moodle/__openerp__.py
addons/event_moodle/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Event Moodle', 'version': '0.1', 'category': 'Tools', 'complexity': "easy", 'description': """ Configure your moodle server >site administration >plugins>web sevices >manage protocols activate the xmlrpc web service >site administration >plugins>web sevices >manage tokens create a token >site administration >plugins>web sevices >overview activate webservice """, 'author': 'OpenERP SA', 'depends': ['event'], 'init_xml': [], 'data': [ 'wizard_moodle.xml', 'event_view.xml', 'security/ir.model.access.csv' ], 'demo_xml': [], 'test': [], 'installable': True, 'active': False, 'images': ['images/token.png','images/enable_webservice.png','images/active_xmlrpc.png'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Event Moodle', 'version': '0.1', 'category': 'Tools', 'complexity': "easy", 'description': """ Configure your moodle server >site administration >plugins>web sevices >manage protocols activate the xmlrpc web service >site administration >plugins>web sevices >manage tokens create a token >site administration >plugins>web sevices >overview activate webservice """, 'author': 'OpenERP SA', 'depends': ['event'], 'init_xml': [], 'data': [ 'wizard_moodle.xml', 'event_view.xml' ], 'demo_xml': [], 'test': [], 'installable': True, 'active': False, 'images': ['images/token.png','images/enable_webservice.png','images/active_xmlrpc.png'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Python
ea8ea2e6203ee6cb7580444207446c0bb82f7239
Add solution for Lesson_5_Analyzing_Data.10-Using_match_and_project.
krzyste/ud032,krzyste/ud032
Lesson_5_Analyzing_Data/10-Using_match_and_project/followers.py
Lesson_5_Analyzing_Data/10-Using_match_and_project/followers.py
#!/usr/bin/env python """ Write an aggregation query to answer this question: Of the users in the "Brasilia" timezone who have tweeted 100 times or more, who has the largest number of followers? The following hints will help you solve this problem: - Time zone is found in the "time_zone" field of the user object in each tweet. - The number of tweets for each user is found in the "statuses_count" field. To access these fields you will need to use dot notation (from Lesson 4) - Your aggregation query should return something like the following: {u'ok': 1.0, u'result': [{u'_id': ObjectId('52fd2490bac3fa1975477702'), u'followers': 2597, u'screen_name': u'marbles', u'tweets': 12334}]} Please modify only the 'make_pipeline' function so that it creates and returns an aggregation pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson, the aggregation pipeline should be a list of one or more dictionary objects. Please review the lesson examples if you are unsure of the syntax. Your code will be run against a MongoDB instance that we have provided. If you want to run this code locally on your machine, you have to install MongoDB, download and insert the dataset. For instructions related to MongoDB setup and datasets please see Course Materials. Please note that the dataset you are using here is a smaller version of the twitter dataset used in examples in this lesson. If you attempt some of the same queries that we looked at in the lesson examples, your results will be different. """ def get_db(db_name): from pymongo import MongoClient client = MongoClient('localhost:27017') db = client[db_name] return db def make_pipeline(): # complete the aggregation pipeline pipeline = [{"$match": {"user.time_zone": "Brasilia"}}, {"$match": {"user.statuses_count": {"$gte": 100}}}, {"$project": {"tweets": "$user.statuses_count", "screen_name": "$user.screen_name", "followers": "$user.followers_count"}}, {"$sort": {"followers": -1}}, {"$limit": 1}] return pipeline def aggregate(db, pipeline): result = db.tweets.aggregate(pipeline) return result if __name__ == '__main__': db = get_db('twitter') pipeline = make_pipeline() result = aggregate(db, pipeline) assert len(result["result"]) == 1 assert result["result"][0]["followers"] == 17209 import pprint pprint.pprint(result)
#!/usr/bin/env python """ Write an aggregation query to answer this question: Of the users in the "Brasilia" timezone who have tweeted 100 times or more, who has the largest number of followers? The following hints will help you solve this problem: - Time zone is found in the "time_zone" field of the user object in each tweet. - The number of tweets for each user is found in the "statuses_count" field. To access these fields you will need to use dot notation (from Lesson 4) - Your aggregation query should return something like the following: {u'ok': 1.0, u'result': [{u'_id': ObjectId('52fd2490bac3fa1975477702'), u'followers': 2597, u'screen_name': u'marbles', u'tweets': 12334}]} Please modify only the 'make_pipeline' function so that it creates and returns an aggregation pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson, the aggregation pipeline should be a list of one or more dictionary objects. Please review the lesson examples if you are unsure of the syntax. Your code will be run against a MongoDB instance that we have provided. If you want to run this code locally on your machine, you have to install MongoDB, download and insert the dataset. For instructions related to MongoDB setup and datasets please see Course Materials. Please note that the dataset you are using here is a smaller version of the twitter dataset used in examples in this lesson. If you attempt some of the same queries that we looked at in the lesson examples, your results will be different. """ def get_db(db_name): from pymongo import MongoClient client = MongoClient('localhost:27017') db = client[db_name] return db def make_pipeline(): # complete the aggregation pipeline pipeline = [ ] return pipeline def aggregate(db, pipeline): result = db.tweets.aggregate(pipeline) return result if __name__ == '__main__': db = get_db('twitter') pipeline = make_pipeline() result = aggregate(db, pipeline) assert len(result["result"]) == 1 assert result["result"][0]["followers"] == 17209 import pprint pprint.pprint(result)
agpl-3.0
Python
93cc7c44efdd01c0e6d5a218301da7686b4f7289
implement postmaster redirection
cloudfleet/despatch
app/server.py
app/server.py
import requests from salmon.routing import nolocking, route, stateless, route_like from salmon.mail import MailResponse from config import settings import json import logging log = logging.getLogger(__name__) log.level = logging.DEBUG def forward_postmaster(message, domain): log.info("===============================") log.info("received mail for %s@%s. Forwarding ..." % ("postmaster", domain)) log.debug("Content: \n %s" % message.to_message().as_string()) try: settings.relay.deliver(MailResponse(To='[email protected]', From=message.From, Subject="[%s] %s" % (domain, message['subject']), Body=message.body())) except Exception, e: log.error(str(e)) log.info("===============================") log.info("forwarded mail to admiralty") def deliver_to_blimp(message, inbox, domain): log.info("===============================") log.info("received mail for %s@%s" % (inbox, domain)) target_url = "http://blimp." + domain + "/mailbox/raw/" + inbox # FIXME change to https r = requests.post(target_url, headers={"Content-transfer-encoding": "binary"}, data=message.to_message().as_string()) log.info("Server Response: %s" % r.text) @route("(inbox)@(domain)", inbox=".+", domain=".+") @stateless def START(message, inbox=None, domain=None): if inbox == 'postmaster': return forward_postmaster(message, domain) else: return deliver_to_blimp(message, inbox, domain)
import requests from salmon.routing import nolocking, route, stateless from salmon.mail include MailResponse from config import settings import json import logging log = logging.getLogger(__name__) log.level = logging.DEBUG @route("postmaster@(domain)", inbox=".+", domain=".+") @stateless def forward_postmaster(message, to=None, host=None): logging.debug("MESSAGE to %s@%s forwarded to the relay host.", to, host) settings.relay.deliver(MailResponse(To='[email protected]', From=message.From, Subject="[%s] %s" % (host, message.Subject), Body=message.body())) @route("(inbox)@(domain)", inbox=".+", domain=".+") @stateless @nolocking def START(message, inbox=None, domain=None): log.info("===============================") log.info("received mail for %s@%s" % (inbox, domain)) target_url = "http://blimp." + domain + "/mailbox/raw/" + inbox # FIXME change to https r = requests.post(target_url, headers={"Content-transfer-encoding": "binary"}, data=message.to_message().as_string()) log.info("Server Response: %s" % r.text)
agpl-3.0
Python
cc12728d7160a10f0c182c0cccfde0fd15cadb75
Add a reset function stub
mozilla/spicedham,mozilla/spicedham
spicedham/basewrapper.py
spicedham/basewrapper.py
class BaseWrapper(object): """ A base class for backend plugins. """ def reset(self, really): """ Resets the training data to a blank slate. """ if really: raise NotImplementedError() def get_key(self, tag, key, default=None): """ Gets the value held by the tag, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, tag, keys, default=None): """ Given a list of key, tag tuples get all values. If key, tag doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(tag, key, default) for tag, key in key_tag_pairs] def set_key_list(self, tag_key_value_tuples): """ Given a list of tuples of tag, key, value set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples] def set_key(self, tag, key, value): """ Set the value held by the tag, key composite key. """ raise NotImplementedError()
class BaseWrapper(object): """ A base class for backend plugins. """ def get_key(self, tag, key, default=None): """ Gets the value held by the tag, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, tag, keys, default=None): """ Given a list of key, tag tuples get all values. If key, tag doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(tag, key, default) for tag, key in key_tag_pairs] def set_key_list(self, tag, key_value_tuples): """ Given a list of tuples of tag, key, value set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(tag, key, value) for tag, key, value in tag_key_value_tuples] def set_key(self, tag, key, value): """ Set the value held by the tag, key composite key. """ raise NotImplementedError()
mpl-2.0
Python
88cc2242ccd91d7574dab0f687c3a0c755a9a4aa
convert stock prices from strings to floats before saving/returning
jakemmarsh/neural-network-stock-predictor,jakemmarsh/neural-network-stock-predictor
analyzer.py
analyzer.py
import json, urllib2 from neuralNetwork import NN def getHistoricalData(stockSymbol): historicalPrices = [] # login to API urllib2.urlopen("http://api.kibot.com/?action=login&user=guest&password=guest") # get 10 days of data from API (business days only, could be < 10) url = "http://api.kibot.com/?action=history&symbol=" + stockSymbol + "&interval=daily&period=10&unadjusted=1&regularsession=1" apiData = urllib2.urlopen(url).read().split("\n") # get price for each day returned from API for line in apiData: if(len(line) > 0): tempLine = line.split(',') historicalPrices.append(float(tempLine[1])) return historicalPrices def analyzeSymbol(stockSymbol): historicalPrices = getHistoricalData(stockSymbol) network = NN(ni = 2, nh = 2, no = 1) # train neural network with historical prices # return prediction return True print getHistoricalData("GOOG")
import json, urllib2 from neuralNetwork import NN def getHistoricalData(stockSymbol): historicalPrices = [] # login to API urllib2.urlopen("http://api.kibot.com/?action=login&user=guest&password=guest") # get 10 days of data from API (business days only, could be < 10) url = "http://api.kibot.com/?action=history&symbol=" + stockSymbol + "&interval=daily&period=10&unadjusted=1&regularsession=1" apiData = urllib2.urlopen(url).read().split("\n") # get price for each day returned from API for line in apiData: if(len(line) > 0): tempLine = line.split(',') historicalPrices.append(tempLine[1]) return historicalPrices def analyzeSymbol(stockSymbol): historicalPrices = getHistoricalData(stockSymbol) network = NN(ni = 2, nh = 2, no = 1) # train neural network with historical prices # return prediction return True getHistoricalData("GOOG")
mit
Python
ccdc17645440cf191f9cca27f32b2211fad4ccd0
Load coordinates info into the main table
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
luigi/tasks/release/load_coordinates.py
luigi/tasks/release/load_coordinates.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import luigi from tasks.config import output from tasks.utils.pgloader import PGLoader from .utils.generic import file_pattern CONTROL_FILE = """ LOAD CSV FROM ALL FILENAMES MATCHING ~<{pattern}> IN DIRECTORY '{directory}' HAVING FIELDS ( accession, primary_accession, local_start, local_end, strand ) INTO {db_url} TARGET COLUMNS ( accession, primary_accession, local_start, local_end, strand ) WITH truncate, batch rows = 500, batch size = 32MB, prefetch rows = 500, workers = 2, concurrency = 1, skip header = 0, fields escaped by double-quote, fields terminated by ',' SET work_mem to '256 MB', maintenance_work_mem to '256 GB', search_path = '{search_path}' BEFORE LOAD DO $$ ALTER TABLE rnacen.load_rnc_coordinates SET ( autovacuum_enabled = false, toast.autovacuum_enabled = false ); $$ AFTER LOAD DO $$ ALTER TABLE rnacen.load_rnc_coordinates SET ( autovacuum_enabled = true, toast.autovacuum_enabled = true ); $$ , $$ INSERT INTO rnacen.rnc_coordinates AS t1 ( accession, primary_accession, local_start, local_end, strand, id ) SELECT accession, primary_accession, local_start, local_end, strand, NEXTVAL('rnc_coordinates_pk_seq') FROM rnacen.load_rnc_coordinates as t2 ON CONFLICT (accession, primary_accession, local_start, local_end) DO NOTHING; $$ ; """ class LoadCoordinates(PGLoader): # pylint: disable=R0904 """ This will load coordinates. The database parameter defaults to all coordinates, if a value is given then it is assumed to be the name of the database to load. All files that begin with that name will be loaded. """ database = luigi.Parameter(default='all') def control_file(self): config = output() directory = os.path.join(config.base, 'genomic_locations') return CONTROL_FILE.format( pattern=file_pattern(self.database), db_url=self.db_url(table='load_rnc_coordinates'), search_path=self.db_search_path(), directory=directory, )
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import luigi from tasks.config import output from tasks.utils.pgloader import PGLoader from .utils.generic import file_pattern CONTROL_FILE = """ LOAD CSV FROM ALL FILENAMES MATCHING ~<{pattern}> IN DIRECTORY '{directory}' HAVING FIELDS ( accession, primary_accession, local_start, local_end, strand ) INTO {db_url} TARGET COLUMNS ( accession, primary_accession, local_start, local_end, strand ) WITH truncate, batch rows = 500, batch size = 32MB, prefetch rows = 500, workers = 2, concurrency = 1, skip header = 0, fields escaped by double-quote, fields terminated by ',' SET work_mem to '256 MB', maintenance_work_mem to '256 GB', search_path = '{search_path}' BEFORE LOAD DO $$ ALTER TABLE rnacen.load_rnc_coordinates SET ( autovacuum_enabled = false, toast.autovacuum_enabled = false ); $$ AFTER LOAD DO $$ ALTER TABLE rnacen.load_rnc_coordinates SET ( autovacuum_enabled = true, toast.autovacuum_enabled = true ); $$ ; """ class LoadCoordinates(PGLoader): # pylint: disable=R0904 """ This will load coordinates. The database parameter defaults to all coordinates, if a value is given then it is assumed to be the name of the database to load. All files that begin with that name will be loaded. """ database = luigi.Parameter(default='all') def control_file(self): config = output() directory = os.path.join(config.base, 'genomic_locations') return CONTROL_FILE.format( pattern=file_pattern(self.database), db_url=self.db_url(table='load_rnc_coordinates'), search_path=self.db_search_path(), directory=directory, )
apache-2.0
Python
ff6b9eddc27ee2b897ab20198d562ef1dfe257d5
support get docker info
dc-project/blog,dc-project/blog
app/dash.py
app/dash.py
#!/usr/bin/env python3 # coding=utf-8 """ @version:0.1 @author: ysicing @file: blog/dash.py @time: 2017/9/20 22:46 """ from flask import Blueprint, render_template,jsonify from app.plugins.docker import DockerApi dash = Blueprint('dash', __name__) docker = DockerApi(host=None, timeout=None) @dash.route('/dash/') def dash_index(): return render_template('dash.html') @dash.route('/dash/docker') def dash_docker_info(): return jsonify(docker.get_docker_version())
#!/usr/bin/env python3 # coding=utf-8 """ @version:0.1 @author: ysicing @file: blog/dash.py @time: 2017/9/20 22:46 """ from flask import Blueprint, render_template dash = Blueprint('dash', __name__) @dash.route('/dash/') def dash_index(): return render_template('dash.html')
agpl-3.0
Python
95945f98b3c4689dc1fb5066f5102154cc4a6a28
bump version
objectified/vdist,objectified/vdist
setup.py
setup.py
from setuptools import setup, find_packages setup( name='vdist', version='0.3.5', description='Create OS packages from Python projects using Docker containers', long_description='Create OS packages from Python projects using Docker containers', author='L. Brouwer', author_email='[email protected]', license='MIT', url='https://github.com/objectified/vdist', packages=find_packages(), install_requires=['jinja2==2.7.3', 'docker-py==0.7.2'], package_data={'': ['internal_profiles.json', '*.sh']}, tests_require=['pytest'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Unix', 'Operating System :: POSIX', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='python docker deployment packaging', )
from setuptools import setup, find_packages setup( name='vdist', version='0.3.4', description='Create OS packages from Python projects using Docker containers', long_description='Create OS packages from Python projects using Docker containers', author='L. Brouwer', author_email='[email protected]', license='MIT', url='https://github.com/objectified/vdist', packages=find_packages(), install_requires=['jinja2==2.7.3', 'docker-py==0.7.2'], package_data={'': ['internal_profiles.json', '*.sh']}, tests_require=['pytest'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Unix', 'Operating System :: POSIX', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='python docker deployment packaging', )
mit
Python
7dcbb064e9bd87e30d322e695452ab140c30b5ed
Support for --version
benjamin-hodgson/Contexts
src/contexts/__main__.py
src/contexts/__main__.py
import sys from .plugin_discovery import load_plugins from . import run_with_plugins, main def cmd(): if '--version' in sys.argv: print_version() sys.exit(0) try: import colorama except ImportError: pass else: colorama.init() plugin_list = load_plugins() exit_code = run_with_plugins(plugin_list) sys.exit(exit_code) def print_version(): import pkg_resources version = pkg_resources.require('contexts')[0].version py_version = '.'.join(str(i) for i in sys.version_info[0:3]) print("Contexts version " + version) print("Running on Python version " + py_version) if __name__ == "__main__": cmd()
import sys from .plugin_discovery import load_plugins from . import run_with_plugins, main def cmd(): try: import colorama except ImportError: pass else: colorama.init() plugin_list = load_plugins() exit_code = run_with_plugins(plugin_list) sys.exit(exit_code) if __name__ == "__main__": cmd()
mit
Python
cdaa708e185b252ddebb542e89a9c4d5e6740f2c
Include old (>24h) messages in news feed
balanceofcowards/pyttrss
feedline.py
feedline.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Rapidly display fresh headlines from a TinyTinyRSS instance on the command line. (c) 2017 Andreas Fischer <[email protected]> """ import subprocess import argparse import getpass import json import os.path import readchar from ttrss import TinyTinyRSS def get_conn(): """ Get connection details either from a config file, the commandline, or via user input. """ conn = {} if os.path.isfile('pyttrss.cfg'): with open('pyttrss.cfg', 'r') as cfgfile: conn = json.load(cfgfile) parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-u', '--user', help='Username') parser.add_argument('-p', '--password', help='Password') parser.add_argument('-U', '--url', help='Server URL') args = parser.parse_args() # Preference: Commandline > Configfile > User input conn['user'] = args.user or conn['user'] or raw_input("Enter username: ") conn['password'] = args.password or conn['password'] or getpass.getpass() conn['url'] = args.url or conn['url'] or raw_input("Enter server URL: ") return conn if __name__ == "__main__": with TinyTinyRSS(get_conn()) as ttrss: print "Unread articles:", ttrss.getUnread() read_art_ids = [] for article in ttrss.getHeadlines(feed_id=-4, view_mode="unread"): outstr = u"{:>20} | {}".format(article['feed_title'][:20], article['title']) print outstr #print article['feed_title'][:20], "\t", article['title'] char = readchar.readchar() if char == "o": subprocess.call(['xdg-open', article['link']]) elif char == "s": continue elif char == "q": break read_art_ids.append(article['id']) ttrss.updateArticle(read_art_ids, 0, 2)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Rapidly display fresh headlines from a TinyTinyRSS instance on the command line. (c) 2017 Andreas Fischer <[email protected]> """ import subprocess import argparse import getpass import json import os.path import readchar from ttrss import TinyTinyRSS def get_conn(): """ Get connection details either from a config file, the commandline, or via user input. """ conn = {} if os.path.isfile('pyttrss.cfg'): with open('pyttrss.cfg', 'r') as cfgfile: conn = json.load(cfgfile) parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-u', '--user', help='Username') parser.add_argument('-p', '--password', help='Password') parser.add_argument('-U', '--url', help='Server URL') args = parser.parse_args() # Preference: Commandline > Configfile > User input conn['user'] = args.user or conn['user'] or raw_input("Enter username: ") conn['password'] = args.password or conn['password'] or getpass.getpass() conn['url'] = args.url or conn['url'] or raw_input("Enter server URL: ") return conn if __name__ == "__main__": with TinyTinyRSS(get_conn()) as ttrss: print "Unread articles:", ttrss.getUnread() read_art_ids = [] for article in ttrss.getHeadlines(feed_id=-3): outstr = u"{:>20} | {}".format(article['feed_title'][:20], article['title']) print outstr #print article['feed_title'][:20], "\t", article['title'] char = readchar.readchar() if char == "o": subprocess.call(['xdg-open', article['link']]) elif char == "s": continue elif char == "q": break read_art_ids.append(article['id']) ttrss.updateArticle(read_art_ids, 0, 2)
mit
Python
d0b6e2b9b3a936ea16a7c48fd951bb4f297c1190
Update setup.py to point to correct site
python-daisychain/daisychain
setup.py
setup.py
try: from setuptools import setup, find_packages except: from distutils.core import setup, find_packages install_requires = ['py3compat >= 0.2'] setup( name='daisychain', version='0.1', description='Configuration-based OO-dependency resolution workflow engine', author='Jeff Edwards', author_email='[email protected]', url='https://github.com/python-daisychain/daisychain', license='MIT License', packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), scripts = ['bin/daisy-chain'], install_requires=install_requires )
try: from setuptools import setup, find_packages except: from distutils.core import setup, find_packages install_requires = ['py3compat >= 0.2'] setup( name='daisychain', version='0.1', description='Configuration-based OO-dependency resolution workflow engine', author='Jeff Edwards', author_email='[email protected]', url='https://github.com/python-daisy/daisychain', license='MIT License', packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), scripts = ['bin/daisy-chain'], install_requires=install_requires )
mit
Python
a88a9ad6ed64c3bf4b5a9e40a41a68e9581654e7
Fix nox config. (#4599)
googleapis/python-bigquery-datatransfer,googleapis/python-bigquery-datatransfer
nox.py
nox.py
# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import os import nox @nox.session @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) def unit_tests(session, python_version): """Run the unit test suite.""" session.interpreter = 'python{}'.format(python_version) session.virtualenv_dirname = 'unit-' + python_version session.install('pytest') session.install('-e', '.') session.run('py.test', '--quiet', os.path.join('tests', 'unit')) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' session.install('docutils', 'pygments') session.run('python', 'setup.py', 'check', '--restructuredtext', '--strict') @nox.session @nox.parametrize('py', ['2.7', '3.6']) def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest') session.install('../test_utils/') session.install('.') # Run py.test against the system tests. session.run('py.test', '--quiet', 'tests/system/')
# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import os import nox @nox.session @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) def unit_tests(session, python_version): """Run the unit test suite.""" session.interpreter = 'python{}'.format(python_version) session.virtualenv_dirname = 'unit-' + python_version session.install('pytest') session.install('-e', '.') session.run('py.test', '--quiet', os.path.join('tests', 'unit')) @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' session.install('docutils', 'pygments') session.run('python', 'setup.py', 'check', '--restructuredtext', '--strict') @nox.session @nox.parametrize('py', ['2.7', '3.6']) def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) session.install('../test_utils/') session.install('.') # Run py.test against the system tests. session.run('py.test', '--quiet', 'tests/system.py')
apache-2.0
Python
82cc05e882698bdf2248ae0ae1589bb6455d0ca5
update fetch
jmrnilsson/beei
fetch/rb.py
fetch/rb.py
import robotparser from utils import config from splinter import Browser def index(session): url = config.rb_url() _robot_can_fetch(session, url) def fetch(): with Browser(**config.browser_kwargs()) as browser: browser.visit(url) styles = [] group_names = browser.find_by_xpath("//*[contains(@class, 'groupname')]") for group_name in group_names: elements = group_name.find_by_xpath('following-sibling::ul[1]/li/a') for el in elements: styles.append({'group': group_name.text, 'name': el.text, 'href': el['href']}) return styles session.visit(3, url, fetch) def _robot_can_fetch(session, url): robots_text = session.get(5, config.rb_robots(), map_to=lambda r: r.text) rp = robotparser.RobotFileParser() rp.parse(robots_text) if not rp.can_fetch('*', url): raise ValueError('Robot is not allowed to fetch {}'.format(url))
import robotparser from utils import config from splinter import Browser def index(session): url = config.rb_url() _robot_can_fetch(session, url) def fetch(): with Browser(**config.browser_kwargs()) as browser: browser.visit(url) styles = [] group_names = browser.find_by_xpath("//*[contains(@class, 'groupname')]") for group_name in group_names: elements = group_name.find_by_xpath('following-sibling::ul[1]/li/a') for el in elements: styles.append({'group': group_name.text, 'name': el.text, 'href': el['href']}) return styles session.visit(5, url, fetch) def _robot_can_fetch(session, url): robots_text = session.get(5, config.rb_robots(), map_to=lambda r: r.text) rp = robotparser.RobotFileParser() rp.parse(robots_text) if not rp.can_fetch('*', url): raise ValueError('Robot is not allowed to fetch {}'.format(url))
mit
Python
5839d76a0e29a3fa6b07a460ff3f0d8cf9b889b7
Remove alpha release
remind101/stacker_blueprints,remind101/stacker_blueprints
setup.py
setup.py
import os from setuptools import setup, find_packages src_dir = os.path.dirname(__file__) install_requires = [ "troposphere~=1.8.0", "awacs~=0.6.0", "stacker~=0.8.1", ] tests_require = [ "nose~=1.0", "mock~=2.0.0", ] def read(filename): full_path = os.path.join(src_dir, filename) with open(full_path) as fd: return fd.read() if __name__ == "__main__": setup( name="stacker_blueprints", version="0.7.0", author="Michael Barrett", author_email="[email protected]", license="New BSD license", url="https://github.com/remind101/stacker_blueprints", description="Default blueprints for stacker", long_description=read("README.rst"), packages=find_packages(), install_requires=install_requires, tests_require=tests_require, test_suite="nose.collector", )
import os from setuptools import setup, find_packages src_dir = os.path.dirname(__file__) install_requires = [ "troposphere~=1.8.0", "awacs~=0.6.0", "stacker~=0.8.1", ] tests_require = [ "nose~=1.0", "mock~=2.0.0", ] def read(filename): full_path = os.path.join(src_dir, filename) with open(full_path) as fd: return fd.read() if __name__ == "__main__": setup( name="stacker_blueprints", version="0.7.1a1", author="Michael Barrett", author_email="[email protected]", license="New BSD license", url="https://github.com/remind101/stacker_blueprints", description="Default blueprints for stacker", long_description=read("README.rst"), packages=find_packages(), install_requires=install_requires, tests_require=tests_require, test_suite="nose.collector", )
bsd-2-clause
Python
38791c7bb480ea5c9efdb4bab3a9c785e5078153
bump to version 0.1alpha9
hookbox/hookbox,hookbox/hookbox,hookbox/hookbox,gameclosure/hookbox,gameclosure/hookbox,gameclosure/hookbox,hookbox/hookbox,gameclosure/hookbox
setup.py
setup.py
from setuptools import setup, find_packages import os, sys static_types = [ '*.js', '*.html', '*.css', '*.ico', '*.gif', '*.jpg', '*.png', '*.txt*', '*.py', '*.template' ] #if sys.platform != "win32": # _install_requires.append("Twisted") _install_requires = [ 'csp>=0.1alpha9', 'rtjp>=0.1alpha2', 'eventlet', 'paste', 'static' ] # python <= 2.5 if sys.version_info[1] <= 5: _install_requires.append('simplejson') setup( name='hookbox', version='0.1a5', author='Michael Carter', author_email='[email protected]', license='MIT License', description='HookBox is a Comet server and message queue that tightly integrates with your existing web application via web hooks and a REST interface.', long_description='', packages= find_packages(), package_data = {'': reduce(list.__add__, [ '.git' not in d and [ os.path.join(d[len('hookbox')+1:], e) for e in static_types ] or [] for (d, s, f) in os.walk(os.path.join('hookbox', 'static')) ]) }, zip_safe = False, install_requires = _install_requires, entry_points = ''' [console_scripts] hookbox = hookbox.start:main ''', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ], )
from setuptools import setup, find_packages import os, sys static_types = [ '*.js', '*.html', '*.css', '*.ico', '*.gif', '*.jpg', '*.png', '*.txt*', '*.py', '*.template' ] #if sys.platform != "win32": # _install_requires.append("Twisted") _install_requires = [ 'csp>=0.1alpha8', 'rtjp>=0.1alpha2', 'eventlet', 'paste', 'static' ] # python <= 2.5 if sys.version_info[1] <= 5: _install_requires.append('simplejson') setup( name='hookbox', version='0.1a4', author='Michael Carter', author_email='[email protected]', license='MIT License', description='HookBox is a Comet server and message queue that tightly integrates with your existing web application via web hooks and a REST interface.', long_description='', packages= find_packages(), package_data = {'': reduce(list.__add__, [ '.git' not in d and [ os.path.join(d[len('hookbox')+1:], e) for e in static_types ] or [] for (d, s, f) in os.walk(os.path.join('hookbox', 'static')) ]) }, zip_safe = False, install_requires = _install_requires, entry_points = ''' [console_scripts] hookbox = hookbox.start:main ''', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ], )
mit
Python
7156cc172b3ba87e3247367c6bf51cc24ce9a902
Update PyPI usage
urschrei/convertbng,urschrei/convertbng
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ setup.py Created by Stephan Hügel on 2015-06-21 """ from __future__ import unicode_literals import os import re import io from setuptools import setup, find_packages, Distribution def read(*names, **kwargs): with io.open( os.path.join(os.path.dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8") ) as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") class BinaryDistribution(Distribution): def is_pure(self): return False version=find_version("convertbng/util.py") setup( name='convertbng', version=version, description='Fast lon, lat to BNG conversion', author='Stephan Hügel', author_email='[email protected]', license='MIT License', url='https://github.com/urschrei/convertbng', include_package_data=True, distclass=BinaryDistribution, download_url='https://github.com/urschrei/convertbng/tarball/v%s' % version, keywords=['Geo', 'BNG'], classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], packages=find_packages(), long_description="""\ =============================== Fast lon, lat to BNG conversion =============================== Uses a Rust 1.x binary to perform fast lon, lat to BNG conversion\n This module exposes two methods:\n util.convertbng() – pass a lon, lat. Returns a tuple of Eastings, Northings\n util.convertbng_list() – pass lists (or Numpy arrays) of lons, lats. Returns a list of Easting, Northing tuples\n\n Usage ===== .. code-block:: python from convertbng.util import convertbng, convertbng_list res = convertbng(lon, lat) lons = [lon1, lon2, lon3] lats = [lat1, lat2, lat3] # assumes import numpy as np lons_np = np.array(lons) lats_np = np.array(lats) res_list = convertbng_list(lons, lats) res_list_np = convertbng_list(lons_np, lats_np) This version requires Python 2.7.x / 3.4.x""" )
#!/usr/bin/env python # -*- coding: utf-8 -*- """ setup.py Created by Stephan Hügel on 2015-06-21 """ from __future__ import unicode_literals import os import re import io from setuptools import setup, find_packages, Distribution def read(*names, **kwargs): with io.open( os.path.join(os.path.dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8") ) as fp: return fp.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") class BinaryDistribution(Distribution): def is_pure(self): return False version=find_version("convertbng/util.py") setup( name='convertbng', version=version, description='Fast lon, lat to BNG conversion', author='Stephan Hügel', author_email='[email protected]', license='MIT License', url='https://github.com/urschrei/convertbng', include_package_data=True, distclass=BinaryDistribution, download_url='https://github.com/urschrei/convertbng/tarball/v%s' % version, keywords=['Geo', 'BNG'], classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], packages=find_packages(), long_description="""\ Fast lon, lat to BNG conversion --------------------------------------------- Uses a Rust 1.0 binary to perform fast lon, lat to BNG conversion\n This module exposes two methods:\n util.convertbng() – pass a lon, lat. Returns a tuple of Eastings, Northings\n util.convertbng_list() – pass lists (or Numpy arrays) of lons, lats. Returns a list of Easting, Northing tuples\n\n Call them like so:\n from convertbng.util import convertbng, convertbng_list\n\n res = convertbng(lon, lat)\n res_list = convertbng_list([lons], [lats])\n\n This version requires Python 2.7.x / 3.4.x""" )
mit
Python
2f766e439b9d91ab4d4682245a2360bc1e5c2bb5
Update version
mpld3/mplexporter
setup.py
setup.py
import matplotlib import os MPLBE = os.environ.get('MPLBE') if MPLBE: matplotlib.use(MPLBE) try: from setuptools import setup except ImportError: from distutils.core import setup DESCRIPTION = "General Matplotlib Exporter" LONG_DESCRIPTION = open('README.md').read() NAME = "mplexporter" AUTHOR = "Jake VanderPlas" AUTHOR_EMAIL = "[email protected]" MAINTAINER = "Jake VanderPlas" MAINTAINER_EMAIL = "[email protected]" DOWNLOAD_URL = 'https://github.com/mpld3/mplexporter' URL = DOWNLOAD_URL LICENSE = 'BSD 3-clause' VERSION = '0.1.0' setup(name=NAME, version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, author=AUTHOR, author_email=AUTHOR_EMAIL, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, packages=['mplexporter', 'mplexporter.renderers'], )
import matplotlib import os MPLBE = os.environ.get('MPLBE') if MPLBE: matplotlib.use(MPLBE) try: from setuptools import setup except ImportError: from distutils.core import setup DESCRIPTION = "General Matplotlib Exporter" LONG_DESCRIPTION = open('README.md').read() NAME = "mplexporter" AUTHOR = "Jake VanderPlas" AUTHOR_EMAIL = "[email protected]" MAINTAINER = "Jake VanderPlas" MAINTAINER_EMAIL = "[email protected]" DOWNLOAD_URL = 'https://github.com/mpld3/mplexporter' URL = DOWNLOAD_URL LICENSE = 'BSD 3-clause' VERSION = '0.0.1' setup(name=NAME, version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, author=AUTHOR, author_email=AUTHOR_EMAIL, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, packages=['mplexporter', 'mplexporter.renderers'], )
bsd-3-clause
Python
6797300eeeb014debc5472927c5b5711597881ea
bump to 0.2.1
mwhooker/jones,mwhooker/jones,mwhooker/jones,mwhooker/jones
setup.py
setup.py
""" Copyright 2012 DISQUS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from setuptools import setup VERSION = '0.2.1' NAME = 'jones' install_requires = [ 'zc-zookeeper-static', 'kazoo>=0.2b1' ] web_requires = install_requires + [ 'flask', 'raven' ] tests_require = web_requires + [ 'nose', 'unittest2', 'mock', ] if __name__ == '__main__': setup( name=NAME, version=VERSION, author='Matthew Hooker', author_email='[email protected]', url='https://github.com/disqus/jones', description='Configuration frontend for Zookeeper.', license='Apache License 2.0', py_modules = ['jones.client'], zip_safe=False, install_requires=install_requires, tests_require=tests_require, extras_require={ 'test': tests_require, 'web': web_requires }, test_suite='nose.collector', include_package_data=True, )
""" Copyright 2012 DISQUS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from setuptools import setup VERSION = '0.2' NAME = 'jones' install_requires = [ 'zc-zookeeper-static', 'kazoo>=0.2b1' ] web_requires = install_requires + [ 'flask', 'raven' ] tests_require = web_requires + [ 'nose', 'unittest2', 'mock', ] if __name__ == '__main__': setup( name=NAME, version=VERSION, author='Matthew Hooker', author_email='[email protected]', url='https://github.com/disqus/jones', description='Configuration frontend for Zookeeper.', license='Apache License 2.0', py_modules = ['jones.client'], zip_safe=False, install_requires=install_requires, tests_require=tests_require, extras_require={ 'test': tests_require, 'web': web_requires }, test_suite='nose.collector', include_package_data=True, )
apache-2.0
Python
d611830525e93e1c1a364ed88695d62003490e07
Bump version number
mpharrigan/trajprocess,mpharrigan/trajprocess
setup.py
setup.py
from setuptools import setup, find_packages setup( name="trajprocess", version='2.0.5', packages=find_packages(), requires=['numpy', 'mdtraj', 'nose'], zip_safe=False, include_package_data=True, )
from setuptools import setup, find_packages setup( name="trajprocess", version='2.0.4', packages=find_packages(), requires=['numpy', 'mdtraj', 'nose'], zip_safe=False, include_package_data=True, )
mit
Python
abfdbaee5f80c7c02436268016718a5362f9083d
make setup.py pypi conform
ecoron/SerpScrap,ecoron/SerpScrap
setup.py
setup.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from setuptools import setup, find_packages version = '0.1.5' setup( name='SerpScrap', version=version, description='''A python module to scrape and extract data like links, titles, descriptions, ratings, from search engine result pages and listed urls.''', long_description=open('README.md').read(), author='Ronald Schmidt', author_email='[email protected]', url='https://github.com/ecoron/SerpScrap', license='MIT', packages=find_packages(), dependency_links=[ 'git+git://github.com/ecoron/GoogleScraper#egg=GoogleScraper' ], install_requires=[ 'GoogleScraper', 'chardet==2.3.0', 'beautifulsoup4==4.4.1', 'html2text==2016.4.2', ], classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'Topic :: Internet', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='serp url scraper', )
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from setuptools import setup, find_packages version = '0.1.5' setup( name='SerpScrap', version=version, description='A python module to scrape and extract data like links, titles, descriptions, ratings, from search engine result pages.', long_description=open('README.md').read(), author='Ronald Schmidt', author_email='[email protected]', url='https://github.com/ecoron/SerpScrap', packages=find_packages(), dependency_links=[ 'git+git://github.com/ecoron/GoogleScraper#egg=GoogleScraper' ], install_requires=[ 'GoogleScraper', 'chardet==2.3.0', 'beautifulsoup4==4.4.1', 'html2text==2016.4.2', ], )
mit
Python
8649b296e05c432dd3841d8c5dc8d9aebd6d09db
update global test script
architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS
cea/test.py
cea/test.py
""" Test all the main scripts in one go - drink coffee while you wait :) """ import properties import demand import emissions import embodied import graphs properties.test_properties() demand.test_demand() emissions.test_lca_operation() embodied.test_lca_embodied() graphs.test_graph_demand() print 'full test completed'
""" Test all the main scripts in one go - drink coffee while you wait :) """ import properties import demand import emissions import embodied import graphs properties.test_properties() demand.test_demand() emissions.test_lca_operation() embodied.test_lca_embodied() graphs.test_graph_demand()
mit
Python
90c07db3c507e1394cf0a72e73f9c7cc425b20a4
return False
mgaitan/one
one.py
one.py
def one(iterable): """Return the object in the given iterable that evaluates to True. If the given iterable has more than one object that evaluates to True, or if there is no object that fulfills such condition, return False. >>> one((True, False, False)) True >>> one((True, False, True)) False >>> one((0, 0, 'a')) 'a' >>> one((0, False, None)) False >>> one((True, True)) False >>> bool(one(('', 1))) True """ iterable = iter(iterable) for item in iterable: if item: break else: return False if any(iterable): return False return item if __name__ == "__main__": import doctest doctest.testmod()
def one(iterable): """ Return X if X is the only one value where bool(i) is True for each every i in the iterable. In any other case return None. >>> one((True, False, False)) True >>> one((True, False, True)) False >>> one((0, 0, 'a')) 'a' >>> one((0, False, None)) False >>> bool(one((True, True))) False >>> bool(one((False, True))) True """ iterable = iter(iterable) for item in iterable: if item: break else: return False if any(iterable): return False return item if __name__ == "__main__": import doctest doctest.testmod()
bsd-3-clause
Python
6726af1a15c3b64ea9cbb68e18a7983477713842
Update 0.91
Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC
src/cerberus_ac/admin.py
src/cerberus_ac/admin.py
# -*- coding: utf-8 -*- """Admin module.""" # from cerberus_ac.views import EditUserPermissions # from .models import * # # # class SecurityAdmin(AdminSite): # pass # # class DataAdmin(AdminSite): # pass # # class AuditAdmin(AdminSite): # pass # # security_admin_site = SecurityAdmin(name='SecurityAdmin') # data_admin_site = DataAdmin(name='DataAdmin') # audit_admin_site = AuditAdmin(name='AuditAdmin') # # Security Admin Pages # # Logs # @security_admin_site.register(AccessHistory) # class ObjectAccessHistoryAdmin(admin.ModelAdmin): # pass # # @security_admin_site.register(PrivilegeHistory) # class PrivChangesHistoryAdmin(admin.ModelAdmin): # pass # # # User Permissions # @security_admin_site.register(RolePrivilege) # class PermissionsAdmin(admin.ModelAdmin): # pass # Data Admin Pages
# -*- coding: utf-8 -*- """Admin module.""" # from django.contrib import admin # from django.contrib.admin.sites import AdminSite # # from cerberus_ac.views import EditUserPermissions # from .models import * # # # class SecurityAdmin(AdminSite): # pass # # class DataAdmin(AdminSite): # pass # # class AuditAdmin(AdminSite): # pass # # security_admin_site = SecurityAdmin(name='SecurityAdmin') # data_admin_site = DataAdmin(name='DataAdmin') # audit_admin_site = AuditAdmin(name='AuditAdmin') # # # Security Admin Pages # # Logs # @security_admin_site.register(AccessHistory) # class ObjectAccessHistoryAdmin(admin.ModelAdmin): # pass # # @security_admin_site.register(PrivilegeHistory) # class PrivChangesHistoryAdmin(admin.ModelAdmin): # pass # # # User Permissions # @security_admin_site.register(RolePrivilege) # class PermissionsAdmin(admin.ModelAdmin): # pass from cerberus_ac.views import EditUserPermissions from .models import * class SecurityAdmin(AdminSite): pass class DataAdmin(AdminSite): pass class AuditAdmin(AdminSite): pass security_admin_site = SecurityAdmin(name='SecurityAdmin') data_admin_site = DataAdmin(name='DataAdmin') audit_admin_site = AuditAdmin(name='AuditAdmin') # # Security Admin Pages # # Logs # @security_admin_site.register(AccessHistory) # class ObjectAccessHistoryAdmin(admin.ModelAdmin): # pass # # @security_admin_site.register(PrivilegeHistory) # class PrivChangesHistoryAdmin(admin.ModelAdmin): # pass # # # User Permissions # @security_admin_site.register(RolePrivilege) # class PermissionsAdmin(admin.ModelAdmin): # pass # Data Admin Pages
isc
Python
baab52477f637364f0a1a974b4ee13114c667bca
allow multiple encodings in headers (i.e. "From: =?iso-8859-2?Q?...?= <[email protected]>")
valhallasw/cia-mail
cia-mail.py
cia-mail.py
#!/usr/local/bin/python # # Copyright (C) Merlijn van Deen <[email protected]>, 2009 # # Distributed under the terms of the MIT license. # import sys, time from email.Parser import Parser from email.Header import Header, decode_header from xml.sax.saxutils import escape from xmlrpclib import ServerProxy e = Parser().parse(sys.stdin) # Stupid email library. This parses all headers into nice unicode strings... headers = dict([(header, ' '.join([text.decode(encoding if encoding else 'ascii') for (text, encoding) in decode_header(e[header])])) for header in e.keys()]) author = headers['From'] author = author[:author.find('<')].strip() # remove email address author = author.strip("\"\'") subject = headers['Subject'] subject = subject.replace('\n', ' ') message = """ <message> <generator> <name>CIA Python client for mail</name> <version>0.2</version> </generator> <source> <project>%(project)s</project> </source> <timestamp>%(timestamp)s</timestamp> <body> <commit> <author>%(author)s</author> <log>%(subject)s</log> </commit> </body> </message>""" % { 'project' : escape(sys.argv[1]), 'timestamp': int(time.time()), 'author' : escape(author.encode('utf-8')), 'subject' : escape(subject.encode('utf-8')) } print message print ServerProxy('http://cia.vc/RPC2').hub.deliver(message)
#!/usr/local/bin/python # # Copyright (C) Merlijn van Deen <[email protected]>, 2009 # # Distributed under the terms of the MIT license. # import sys, time from email.Parser import Parser from email.Header import Header, decode_header from xml.sax.saxutils import escape from xmlrpclib import ServerProxy e = Parser().parse(sys.stdin) # Stupid email library. This parses all headers into nice unicode strings... headers = dict([(header, Header(*decode_header(e[header])[0]).__unicode__()) for header in e.keys()]) author = headers['From'] author = author[:author.find('<')].strip() # remove email address author = author.strip("\"\'") subject = headers['Subject'] subject = subject.replace('\n', ' ') message = """ <message> <generator> <name>CIA Python client for mail</name> <version>0.2</version> </generator> <source> <project>%(project)s</project> </source> <timestamp>%(timestamp)s</timestamp> <body> <commit> <author>%(author)s</author> <log>%(subject)s</log> </commit> </body> </message>""" % { 'project' : escape(sys.argv[1]), 'timestamp': int(time.time()), 'author' : escape(author.encode('utf-8')), 'subject' : escape(subject.encode('utf-8')) } print message print ServerProxy('http://cia.vc/RPC2').hub.deliver(message)
mit
Python
8453607c1fb1cb1835bc1323f4c59366015e93fe
Create a command-line vulgarizer
Rosuav/shed,Rosuav/shed,Rosuav/shed,Rosuav/shed,Rosuav/shed
estimate.py
estimate.py
import sys from fractions import Fraction from math import log10 def vulgarize(rpt): """Calculate a vulgar fraction for a given continued fraction""" f = Fraction(0) if tuple(rpt) == (0,): return f # Avoid dividing by zero for term in reversed(rpt): f = 1 / (term + f) return 1/f def magnitude(x): """Give an indication of the magnitude of a number Bigger numbers have bigger magnitudes, and you can see the direction of the number in the result (so -4 is further from zero than +2 is). """ if x < 0: return -log10(-x) if x == 0: return 0 return log10(x) digits = sys.argv[1] if "," in digits: digits = [int(d.strip()) for d in digits.split(",")] frac = vulgarize(digits) print(frac, digits, float(frac)) sys.exit(0) print("Estimating %s as a fraction..." % digits) frac = [] orig = Fraction(digits) residue = 1/orig while residue: t = 1/residue frac.append(int(t)) residue = t - int(t) vulg = vulgarize(frac) error = magnitude(vulg - orig) print(f"%{len(digits)*2}s %+6.2f %r" % (vulg, error, frac))
import sys from fractions import Fraction from math import log10 digits = sys.argv[1] print("Estimating %s as a fraction..." % digits) def vulgarize(rpt): """Calculate a vulgar fraction for a given continued fraction""" f = Fraction(0) if tuple(rpt) == (0,): return f # Avoid dividing by zero for term in reversed(rpt): f = 1 / (term + f) return 1/f def magnitude(x): """Give an indication of the magnitude of a number Bigger numbers have bigger magnitudes, and you can see the direction of the number in the result (so -4 is further from zero than +2 is). """ if x < 0: return -log10(-x) if x == 0: return 0 return log10(x) frac = [] orig = Fraction(digits) residue = 1/orig while residue: t = 1/residue frac.append(int(t)) residue = t - int(t) vulg = vulgarize(frac) error = magnitude(vulg - orig) print(f"%{len(digits)*2}s %+6.2f %r" % (vulg, error, frac))
mit
Python
0e2ef0a70fa6627c0eb4a292e69d3ed1f8500f36
Add the ability to graph the results
Rosuav/shed,Rosuav/shed,Rosuav/shed,Rosuav/shed,Rosuav/shed
estimate.py
estimate.py
import sys from fractions import Fraction from math import log10 def vulgarize(rpt): """Calculate a vulgar fraction for a given continued fraction""" f = Fraction(0) if tuple(rpt) == (0,): return f # Avoid dividing by zero for term in reversed(rpt): f = 1 / (term + f) return 1/f def magnitude(x): """Give an indication of the magnitude of a number Bigger numbers have bigger magnitudes, and you can see the direction of the number in the result (so -4 is further from zero than +2 is). """ if x < 0: return -log10(-x) if x == 0: return 0 return log10(x) digits = sys.argv[1] if "," in digits: digits = [int(d.strip()) for d in digits.split(",")] frac = vulgarize(digits) print(frac, digits, float(frac)) sys.exit(0) print("Estimating %s as a fraction..." % digits) frac = [] orig = Fraction(digits) residue = 1/orig accuracy = [] while residue: t = 1/residue frac.append(int(t)) residue = t - int(t) vulg = vulgarize(frac) error = magnitude(vulg - orig) print(f"%{len(digits)*2}s %+6.2f %r" % (vulg, error, frac)) if vulg != orig: # Estimate the accuracy by showing, in effect, how many # correct digits there are before there's an error. # (Accuracy becomes immeasurable for the last term.) accuracy.append(-log10(abs(vulg - orig))) if "--graph" in sys.argv: import matplotlib.pyplot as plt # Convert accuracy into accuracy-gained-last-time # From three terms [a, b, c], we look at the accuracy gained by # adding term b, and then plot that alongside c. from operator import sub accuracy = [0] + list(map(sub, accuracy, [0] + accuracy[:-1])) # Different y-scales - see https://matplotlib.org/gallery/api/two_scales.html fig, ax1 = plt.subplots() ax1.set_xlabel("N Terms") ax1.set_ylabel("Term", color="tab:red") ax1.set_yscale("log") # Since accuracy is already, in effect, logarithmic, do the same here. ax1.plot(frac, color="tab:red") ax1.tick_params(axis="y", labelcolor="tab:red") ax2 = ax1.twinx() ax2.set_ylabel("Accuracy gained", color="tab:blue") ax2.plot(accuracy, color="tab:blue") ax2.tick_params(axis="y", labelcolor="tab:blue") fig.tight_layout() plt.show()
import sys from fractions import Fraction from math import log10 def vulgarize(rpt): """Calculate a vulgar fraction for a given continued fraction""" f = Fraction(0) if tuple(rpt) == (0,): return f # Avoid dividing by zero for term in reversed(rpt): f = 1 / (term + f) return 1/f def magnitude(x): """Give an indication of the magnitude of a number Bigger numbers have bigger magnitudes, and you can see the direction of the number in the result (so -4 is further from zero than +2 is). """ if x < 0: return -log10(-x) if x == 0: return 0 return log10(x) digits = sys.argv[1] if "," in digits: digits = [int(d.strip()) for d in digits.split(",")] frac = vulgarize(digits) print(frac, digits, float(frac)) sys.exit(0) print("Estimating %s as a fraction..." % digits) frac = [] orig = Fraction(digits) residue = 1/orig while residue: t = 1/residue frac.append(int(t)) residue = t - int(t) vulg = vulgarize(frac) error = magnitude(vulg - orig) print(f"%{len(digits)*2}s %+6.2f %r" % (vulg, error, frac))
mit
Python
a54cb5529e5611b2d21c837d5422e31abd8d2819
Add :q alias for quit
bebraw/Placidity
placidity/commands/quit/quit.py
placidity/commands/quit/quit.py
class Quit: aliases = ('quit', 'quit()', ':q', ) description = 'Quits the application' def execute(self): raise SystemExit
class Quit: aliases = ('quit', 'quit()', ) description = 'Quits the application' def execute(self): raise SystemExit
mit
Python
e2d009c2e64340d101319824af1130bb92b4b021
Add debug logger method to utils
data-8/DS8-Interact,data-8/DS8-Interact,data-8/DS8-Interact
app/util.py
app/util.py
import os def chown(username, path, destination): """Set owner and group of file to that of the parent directory.""" s = os.stat(path) os.chown(os.path.join(path, destination), s.st_uid, s.st_gid) def construct_path(path, format, *args): """Constructs a path using locally available variables.""" return os.path.join(path.format(**format), *args) def logger(config): """ Returns a logger if development mode, else a no-op. """ def log(message): print('[Debug]: {}'.format(message)) if config['DEBUG']: return log else: return lambda x: None
import os def chown(username, path, destination): """Set owner and group of file to that of the parent directory.""" s = os.stat(path) os.chown(os.path.join(path, destination), s.st_uid, s.st_gid) def construct_path(path, format, *args): """Constructs a path using locally available variables.""" return os.path.join(path.format(**format), *args)
apache-2.0
Python
cf6ddfdac8a56194ad1297921a390be541d773cc
Remove last digit of version number if it's 0.
shaurz/devo
app_info.py
app_info.py
# coding=UTF8 import datetime name = "Devo" release_date = datetime.date(2012, 12, 13) version = (1, 0, 0) version_string = ".".join(str(x) for x in (version if version[2] != 0 else version[:2])) identifier = "com.iogopro.devo" copyright = u"Copyright © 2010-2012 Luke McCarthy" developer = "Developer: Luke McCarthy <[email protected]>" company_name = "Iogopro Software" url = "http://iogopro.com/devo"
# coding=UTF8 import datetime name = "Devo" release_date = datetime.date(2012, 12, 13) version = (1, 0, 0) version_string = ".".join(str(x) for x in version) identifier = "com.iogopro.devo" copyright = u"Copyright © 2010-2012 Luke McCarthy" developer = "Developer: Luke McCarthy <[email protected]>" company_name = "Iogopro Software" url = "http://iogopro.com/devo"
mit
Python
1e7c95ee7d920a5d0f312608b323c7449ca4fe1c
Bump version.
Floobits/floobits-emacs
floobits.py
floobits.py
#!/usr/bin/env python # coding: utf-8 import os from floo import emacs_handler from floo.common import migrations from floo.common import reactor from floo.common import utils from floo.common import shared as G def cb(port): print('Now listening on %s' % port) def main(): G.__VERSION__ = '0.11' G.__PLUGIN_VERSION__ = '1.5.6' utils.reload_settings() if not os.path.exists(G.FLOORC_JSON_PATH): migrations.migrate_floorc() utils.reload_settings() migrations.rename_floobits_dir() migrations.migrate_symlinks() try: utils.normalize_persistent_data() except Exception: pass emacs = emacs_handler.EmacsHandler() G.emacs = emacs _, port = reactor.reactor.listen(emacs) utils.set_timeout(cb, 100, port) reactor.reactor.block() if __name__ == '__main__': main()
#!/usr/bin/env python # coding: utf-8 import os from floo import emacs_handler from floo.common import migrations from floo.common import reactor from floo.common import utils from floo.common import shared as G def cb(port): print('Now listening on %s' % port) def main(): G.__VERSION__ = '0.11' G.__PLUGIN_VERSION__ = '1.5.5' utils.reload_settings() if not os.path.exists(G.FLOORC_JSON_PATH): migrations.migrate_floorc() utils.reload_settings() migrations.rename_floobits_dir() migrations.migrate_symlinks() try: utils.normalize_persistent_data() except Exception: pass emacs = emacs_handler.EmacsHandler() G.emacs = emacs _, port = reactor.reactor.listen(emacs) utils.set_timeout(cb, 100, port) reactor.reactor.block() if __name__ == '__main__': main()
apache-2.0
Python
5950997c8925804338f224f1278c3018479dab09
scale pixels to 16 shades
KFW/peggy.pi.pic
ppp.py
ppp.py
#! /usr/bin/python """ ppp.py peggy.pi.pic Take picture with Raspberry Pi camera and then display as 25 x 25 pixel image (16 shades) on Peggy2 """ # http://picamera.readthedocs.org/en/release-1.9/recipes1.html#capturing-to-a-pil-image import io import time import picamera from PIL import Image # Create the in-memory stream stream = io.BytesIO() with picamera.PiCamera() as camera: camera.hflip = True camera.vflip = True camera.start_preview() time.sleep(2) camera.capture(stream, format='bmp') # "Rewind" the stream to the beginning so we can read its content stream.seek(0) image = Image.open(stream) #crop square image = image.crop((280,0,1000,720)) #convert to grey image = image.convert('L') image.thumbnail((25,25)) pxls = list(image.getdata()) # convert pixels to 16 levels from 256 # note: may want to check range of values and rescale # in order to preserve as much info as possible maxpxl = max(list) minpxl = min(list) deltapxl = maxpxl - minpxl for i, p in enumerate(pxls): scaledpxl = (pxls[i] - minpxl) * 255 / deltapxl pxls[i] = scaledpxl//16 # # look at pixel values in 25 x 25 array # i = 0 # for p in pxls: # print p, # if i % 25 == 24: # print '\n' # i += 1 image.putdata(pxl, scale = 16) #scale by 16 for regular display # # save image to file as test imgout = open('/home/pi/temp.bmp', 'w') image.save(imgout) imgout.close()
#! /usr/bin/python """ ppp.py peggy.pi.pic Take picture with Raspberry Pi camera and then display as 25 x 25 pixel image (16 shades) on Peggy2 """ # http://picamera.readthedocs.org/en/release-1.9/recipes1.html#capturing-to-a-pil-image import io import time import picamera from PIL import Image # Create the in-memory stream stream = io.BytesIO() with picamera.PiCamera() as camera: camera.hflip = True camera.vflip = True camera.start_preview() time.sleep(2) camera.capture(stream, format='bmp') # "Rewind" the stream to the beginning so we can read its content stream.seek(0) image = Image.open(stream) #crop square image = image.crop((280,0,1000,720)) #convert to grey image = image.convert('L') # # test - show image # image.show() image.thumbnail((25,25)) # # save image to file as test # imgout = open('/home/pi/temp.bmp', 'w') # image.save(imgout) # imgout.close() # pxls = list(image.getdata()) # convert pixels to 16 levels from 256 for i, p in enumerate(pxls): pxls[i] = p//16 # look at pixel values in 25 x 25 array i = 0 for p in pxls: print p, if i % 25 == 24: print '\n' i += 1
mit
Python
7bf84875d5999a537a5689df4c1bb9ff6ce950ae
Remove forgotten test link
Data2Semantics/linkitup,Data2Semantics/linkitup,Data2Semantics/linkitup
src/app/sameas/plugin.py
src/app/sameas/plugin.py
''' Created on 4 Nov 2013 @author: cmarat ''' from flask import request, jsonify from flask.ext.login import login_required import requests from app import app SAMEAS_URL = "http://sameas.org/json" @app.route('/sameas', methods=['POST']) @login_required def link_to_sameas(): # Retrieve the article from the post article = request.get_json() article_id = article['article_id'] app.logger.debug("Running sameAs.org plugin for article {}".format(article_id)) # Get article links match_items = article['links'] matches = {} for item in match_items: item_link = item['link'] original_qname = "figshare_{}".format(item['id']) # Query sameas.org response = requests.get(SAMEAS_URL, params={'uri': item_link}) hits = response.json() # Make a list of all links returned by sameas.org sameas_links = [uri for h in hits for uri in h['duplicates'] if uri != item_link] for uri in sameas_links: # Create the match dictionary match = {'type': "mapping", 'uri': uri, 'web': uri, 'show': uri, 'original':original_qname} # Append it to all matches matches[uri] = match if matches == {} : matches = None # Return the matches return jsonify({'title':'sameAs.org links','urls': matches})
''' Created on 4 Nov 2013 @author: cmarat ''' from flask import request, jsonify from flask.ext.login import login_required import requests from app import app SAMEAS_URL = "http://sameas.org/json" @app.route('/sameas', methods=['POST']) @login_required def link_to_sameas(): # Retrieve the article from the post article = request.get_json() article_id = article['article_id'] app.logger.debug("Running sameAs.org plugin for article {}".format(article_id)) # Get article links match_items = article['links'] match_items.append({u'link': u'http://dbpedia.org/resource/Resource_Description_Framework', u'id': 9999}) matches = {} for item in match_items: item_link = item['link'] original_qname = "figshare_{}".format(item['id']) # Query sameas.org response = requests.get(SAMEAS_URL, params={'uri': item_link}) hits = response.json() # Make a list of all links returned by sameas.org sameas_links = [uri for h in hits for uri in h['duplicates'] if uri != item_link] for uri in sameas_links: # Create the match dictionary match = {'type': "mapping", 'uri': uri, 'web': uri, 'show': uri, 'original':original_qname} # Append it to all matches matches[uri] = match if matches == {} : matches = None # Return the matches return jsonify({'title':'sameAs.org links','urls': matches})
mit
Python
e8c71806e5f10c46fe4ac3e81322e9a44b42f933
Simplify todo example
hhatto/autopep8,SG345/autopep8,MeteorAdminz/autopep8,vauxoo-dev/autopep8,Vauxoo/autopep8,Vauxoo/autopep8,vauxoo-dev/autopep8,MeteorAdminz/autopep8,SG345/autopep8,hhatto/autopep8
test/todo.py
test/todo.py
"""Incomplete fixes.""" # E501: This should be wrapped similar to how pprint does it {'2323k2323': 24232323, '2323323232323': 3434343434343434, '34434343434535535': 3434343434343434, '4334343434343': 3434343434} # See below {'2323323232323': 3434343434343434, '2323k2323': 24232323, '34434343434535535': 3434343434343434, '4334343434343': 3434343434} # W601: Handle complicated cases x = {1: 2} y = {} y.has_key(0) + x.has_key(x.has_key(0) + x.has_key(x.has_key(0) + x.has_key(1))) # E702: Indent correctly when breaking at semicolon ( 1, 2, 3); 4; 5; 5 # pyflakes
"""Incomplete fixes.""" # E501: This should be wrapped similar to how pprint does it {'2323k2323': 24232323, '2323323232323': 3434343434343434, '34434343434535535': 3434343434343434, '4334343434343': 3434343434} # See below {'2323323232323': 3434343434343434, '2323k2323': 24232323, '34434343434535535': 3434343434343434, '4334343434343': 3434343434} # W601: Handle complicated cases x = {1: 2} y = {} y.has_key(0) + x.has_key(x.has_key(0) + x.has_key(x.has_key(0) + x.has_key(1))) # E702: Indent correctly when breaking at semicolon from alnair.exception import ( NoSuchDirectoryError, NoSuchFileError, UndefinedPackageError, ); NoSuchDirectoryError; NoSuchFileError; UndefinedPackageError # pyflakes
mit
Python
631a231fc2a63dfc0b6d051aa6cef49bd67d80a6
add WSGI
gabisurita/99party,gabisurita/99party,gabisurita/99party
run.py
run.py
from app import app import mapping import models wsgi = app.wsgifunc() if __name__ == "__main__": models.createDB() app.run()
from app import app import mapping import models if __name__ == "__main__": models.createDB() app.run()
apache-2.0
Python
e7df2d658cdb0a3664b914d0577ea08da2845f08
fix run.py python interpreter
TransitSurveyor/API,TransitSurveyor/API,TransitSurveyor/Dashboard,TransitSurveyor/Dashboard,TransitSurveyor/Dashboard
run.py
run.py
#!flask/bin/python from api import app app.run(debug = True)
#!env/bin/python from api import app app.run(debug = True)
mit
Python
603cf4cab90e6655a5aa26269a93376d13dd7fe1
Fix package installing
colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager
lib/pacman/sync.py
lib/pacman/sync.py
from lib.pacman.command import execute def refresh_force(): """pacman -Syy""" execute('sudo -S pacman -Syy', discard_output=True) def system_upgrade(): """pacman -Syu""" execute('sudo -S pacman -Syu --noconfirm', discard_output=True) def install(package, asdeps=False): """pacman -S [--asdeps] package""" option = ['--noconfirm', '--needed'] if asdeps: option.append('--asdeps') command = 'sudo -S pacman -S {} {}'.format(' '.join(option), ' '.join(package)) execute(command, discard_output=True) def exist(package): """check 'pacman -Si package' return code""" result = execute('pacman -Ss "^{}$"'.format(package)) return not bool(result.returncode)
from lib.pacman.command import execute def refresh_force(): """pacman -Syy""" execute('sudo -S pacman -Syy', discard_output=True) def system_upgrade(): """pacman -Syu""" execute('sudo -S pacman -Syu --noconfirm', discard_output=True) def install(package, asdeps=False): """pacman -S [--asdeps] package""" option = ['--noconfirm', '--needed'] if asdeps: option.append('--asdeps') command = 'sudo -S pacman -S {} {}'.format(' '.join(option), package) execute(command, discard_output=True) def exist(package): """check 'pacman -Si package' return code""" result = execute('pacman -Ss "^{}$"'.format(package)) return not bool(result.returncode)
mit
Python
612c809a68640fea9130952fdd626ee0118646bb
Fix code style for task group model
selahssea/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core
src/ggrc_workflows/models/task_group.py
src/ggrc_workflows/models/task_group.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from ggrc import db from ggrc.models.mixins import ( Titled, Slugged, Described, Timeboxed, WithContact ) from ggrc.models.associationproxy import association_proxy from ggrc.models.reflection import PublishOnly class TaskGroup( WithContact, Timeboxed, Described, Titled, Slugged, db.Model): __tablename__ = 'task_groups' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id'), nullable=False) lock_task_order = db.Column(db.Boolean(), nullable=True) task_group_objects = db.relationship( 'TaskGroupObject', backref='task_group', cascade='all, delete-orphan') objects = association_proxy( 'task_group_objects', 'object', 'TaskGroupObject') task_group_tasks = db.relationship( 'TaskGroupTask', backref='task_group', cascade='all, delete-orphan') cycle_task_groups = db.relationship( 'CycleTaskGroup', backref='task_group') sort_index = db.Column( db.String(length=250), default="", nullable=False) _publish_attrs = [ 'workflow', 'task_group_objects', PublishOnly('objects'), 'task_group_tasks', 'lock_task_order', 'sort_index', # Intentionally do not include `cycle_task_groups` # 'cycle_task_groups', ] def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'workflow', 'sort_index', 'modified_by', 'context' ] if(kwargs.get('clone_people', False)): columns.append('contact') target = self.copy_into(_other, columns, **kwargs) if kwargs.get('clone_objects', False): self.copy_objects(target, **kwargs) if kwargs.get('clone_tasks', False): self.copy_tasks(target, **kwargs) return target def copy_objects(self, target, **kwargs): for task_group_object in self.task_group_objects: target.task_group_objects.append(task_group_object.copy( task_group=target, context=target.context, )) return target def copy_tasks(self, target, **kwargs): for task_group_task in self.task_group_tasks: target.task_group_tasks.append(task_group_task.copy( None, task_group=target, context=target.context, clone_people=kwargs.get("clone_people", False), )) return target
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from ggrc import db from ggrc.models.mixins import ( Titled, Slugged, Described, Timeboxed, WithContact ) from ggrc.models.associationproxy import association_proxy from ggrc.models.reflection import PublishOnly class TaskGroup( WithContact, Timeboxed, Described, Titled, Slugged, db.Model): __tablename__ = 'task_groups' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id'), nullable=False) lock_task_order = db.Column(db.Boolean(), nullable=True) task_group_objects = db.relationship( 'TaskGroupObject', backref='task_group', cascade='all, delete-orphan') objects = association_proxy( 'task_group_objects', 'object', 'TaskGroupObject') task_group_tasks = db.relationship( 'TaskGroupTask', backref='task_group', cascade='all, delete-orphan') cycle_task_groups = db.relationship( 'CycleTaskGroup', backref='task_group') sort_index = db.Column( db.String(length=250), default="", nullable=False) _publish_attrs = [ 'workflow', 'task_group_objects', PublishOnly('objects'), 'task_group_tasks', 'lock_task_order', 'sort_index', # Intentionally do not include `cycle_task_groups` #'cycle_task_groups', ] def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'workflow', 'sort_index', 'modified_by', 'context' ] if(kwargs.get('clone_people', False)): columns.append('contact') target = self.copy_into(_other, columns, **kwargs) if kwargs.get('clone_objects', False): self.copy_objects(target, **kwargs) if kwargs.get('clone_tasks', False): self.copy_tasks(target, **kwargs) return target def copy_objects(self, target, **kwargs): for task_group_object in self.task_group_objects: target.task_group_objects.append( task_group_object.copy( task_group=target, context=target.context, )) return target def copy_tasks(self, target, **kwargs): for task_group_task in self.task_group_tasks: target.task_group_tasks.append( task_group_task.copy(None, task_group=target, context=target.context, clone_people=kwargs.get("clone_people", False), )) return target
apache-2.0
Python
e37174e733b7b186a40cc82ffe95c3d10014bd2f
Check for errors
innogames/igcollect
src/redis.py
src/redis.py
#!/usr/bin/env python # # igcollect - Redis # # Copyright (c) 2016 InnoGames GmbH # from argparse import ArgumentParser from subprocess import check_output from time import time def parse_args(): parser = ArgumentParser() parser.add_argument('--prefix', default='redis') return parser.parse_args() def main(): args = parse_args() redis_info = check_output(['redis-cli', '-a', redis_pwd(), 'info']) redis_stats = {} for x in redis_info.splitlines(): if x.find(':') != -1: key, value = x.split(':') redis_stats[key] = value template = args.prefix + '.{} {} ' + str(int(time())) headers = ( 'total_connections_received', 'total_commands_processed', 'keyspace_hits', 'keyspace_misses', 'used_memory', 'used_cpu_sys', 'used_cpu_user', 'used_cpu_sys_children', 'used_cpu_user_children', ) for metric in headers: print(template.format(metric, redis_stats[metric])) def redis_pwd(): """Get the Redis password from the configuration""" with open("/etc/redis/redis.conf") as fd: secret_cfg = fd.read().splitlines() for line in secret_cfg: line = line.strip() if line.startswith("requirepass"): return line.split(" ")[1].strip() return '' if __name__ == '__main__': main()
#!/usr/bin/env python # # igcollect - Redis # # Copyright (c) 2016 InnoGames GmbH # from argparse import ArgumentParser from subprocess import Popen, PIPE from time import time def parse_args(): parser = ArgumentParser() parser.add_argument('--prefix', default='redis') return parser.parse_args() def main(): args = parse_args() redis_info = Popen(('redis-cli', '-a', redis_pwd(), 'info'), stdout=PIPE).stdout.read() redis_info = redis_info.splitlines() redis_stats = {} for x in redis_info: if x.find(':') != -1: key, value = x.split(':') redis_stats[key] = value template = args.prefix + '.{} {} ' + str(int(time())) headers = ( 'total_connections_received', 'total_commands_processed', 'keyspace_hits', 'keyspace_misses', 'used_memory', 'used_cpu_sys', 'used_cpu_user', 'used_cpu_sys_children', 'used_cpu_user_children', ) for metric in headers: print(template.format(metric, redis_stats[metric])) def redis_pwd(): """Get the Redis password from the configuration""" with open("/etc/redis/redis.conf") as fd: secret_cfg = fd.read().splitlines() for line in secret_cfg: line = line.strip() if line.startswith("requirepass"): return line.split(" ")[1].strip() return '' if __name__ == '__main__': main()
mit
Python
9d8de33a72e821bf0fb7415f73ba8cfabc3ba93b
Update version 0.7.6 -> 0.7.7
XiaonuoGantan/pywebsocket,XiaonuoGantan/pywebsocket
src/setup.py
src/setup.py
#!/usr/bin/env python # # Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Set up script for mod_pywebsocket. """ from distutils.core import setup import sys _PACKAGE_NAME = 'mod_pywebsocket' if sys.version < '2.3': print >> sys.stderr, '%s requires Python 2.3 or later.' % _PACKAGE_NAME sys.exit(1) setup(author='Yuzo Fujishima', author_email='[email protected]', description='WebSocket extension for Apache HTTP Server.', long_description=( 'mod_pywebsocket is an Apache HTTP Server extension for ' 'WebSocket (http://tools.ietf.org/html/rfc6455). ' 'See mod_pywebsocket/__init__.py for more detail.'), license='See COPYING', name=_PACKAGE_NAME, packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'], url='http://code.google.com/p/pywebsocket/', # See the source of distutils.version, distutils.versionpredicate and # distutils.dist to understand how to name version numbers. version='0.7.7', ) # vi:sts=4 sw=4 et
#!/usr/bin/env python # # Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Set up script for mod_pywebsocket. """ from distutils.core import setup import sys _PACKAGE_NAME = 'mod_pywebsocket' if sys.version < '2.3': print >> sys.stderr, '%s requires Python 2.3 or later.' % _PACKAGE_NAME sys.exit(1) setup(author='Yuzo Fujishima', author_email='[email protected]', description='WebSocket extension for Apache HTTP Server.', long_description=( 'mod_pywebsocket is an Apache HTTP Server extension for ' 'WebSocket (http://tools.ietf.org/html/rfc6455). ' 'See mod_pywebsocket/__init__.py for more detail.'), license='See COPYING', name=_PACKAGE_NAME, packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'], url='http://code.google.com/p/pywebsocket/', # See the source of distutils.version, distutils.versionpredicate and # distutils.dist to understand how to name version numbers. version='0.7.6', ) # vi:sts=4 sw=4 et
bsd-3-clause
Python
394581407a7788b315da97f870a6dcd0bfe4bd54
fix js MIME type
adrian7/ExchangeRates,landsurveyorsunited/ExchangeRates,adrian7/ExchangeRates,hippasus/ExchangeRates,landsurveyorsunited/ExchangeRates
src/utils.py
src/utils.py
#!/usr/local/bin/python # -*- coding: utf-8 -*- import json def is_none(target): return target is None def is_none_or_empty(target): return is_none(target) or len(target) == 0 def write_json_output(response, dic): response_text, content_type = json.dumps(dic), "application/json" _do_write(response, response_text, content_type) def write_jsonp_output(response, dic, jsonp_callback): if is_none_or_empty(jsonp_callback): write_json_output(response, dic) else: response_text, content_type = jsonp_callback + "(" + json.dumps(dic) + ")", "application/javascript" _do_write(response, response_text, content_type) def _do_write(response, response_text, content_type): response.headers['Content-Type'] = content_type response.out.write(response_text)
#!/usr/local/bin/python # -*- coding: utf-8 -*- import json def is_none(target): return target is None def is_none_or_empty(target): return is_none(target) or len(target) == 0 def write_json_output(response, dic): response_text, content_type = json.dumps(dic), "application/json" _do_write(response, response_text, content_type) def write_jsonp_output(response, dic, jsonp_callback): if is_none_or_empty(jsonp_callback): write_json_output(response, dic) else: response_text, content_type = jsonp_callback + "(" + json.dumps(dic) + ")", "application/x-javascript" _do_write(response, response_text, content_type) def _do_write(response, response_text, content_type): response.headers['Content-Type'] = content_type response.out.write(response_text)
mit
Python
6226c17ed1e9313dc202b44fc69b098a09140983
fix translation
cderici/pycket,magnusmorton/pycket,vishesh/pycket,magnusmorton/pycket,vishesh/pycket,cderici/pycket,samth/pycket,vishesh/pycket,pycket/pycket,krono/pycket,samth/pycket,samth/pycket,krono/pycket,magnusmorton/pycket,pycket/pycket,pycket/pycket,krono/pycket,cderici/pycket
pycket/values_regex.py
pycket/values_regex.py
from pycket.base import W_Object from pycket.error import SchemeException from pycket import values, values_string from pycket import regexp from rpython.rlib.rsre import rsre_core CACHE = regexp.RegexpCache() class W_AnyRegexp(W_Object): _immutable_fields_ = ["source"] errorname = "regexp" def __init__(self, source): self.source = source self.code = None def ensure_compiled(self): if self.code is None: code, flags, groupcount, groupindex, indexgroup, group_offsets = regexp.compile(CACHE, self.source, 0) self.code = code self.flags = flags self.groupcount = groupcount self.groupindex = groupindex self.indexgroup = indexgroup self.group_offsets = group_offsets def match_string(self, s): self.ensure_compiled() endpos = len(s) ctx = rsre_core.search(self.code, s) if not ctx: return None result = [ctx.group(i) for i in range(self.groupcount + 1)] return result def match_port(self, w_port): max_match = w_port._length_up_to_end() pos = w_port.tell() for i in range(max_match): w_port.seek(pos) s = w_port.read(i) result = self.match_string(s) if result: return result return None def eqv(self, other): if not isinstance(other, W_AnyRegexp): return False if type(self) is type(other): if (self.code or other.code): return self.code == other.code else: return self.source == other.source return False class W_Regexp(W_AnyRegexp): pass class W_PRegexp(W_AnyRegexp): pass class W_ByteRegexp(W_AnyRegexp): pass class W_BytePRegexp(W_AnyRegexp): pass
from pycket.base import W_Object from pycket.error import SchemeException from pycket import values, values_string from pycket import regexp from rpython.rlib.rsre import rsre_core CACHE = regexp.RegexpCache() class W_AnyRegexp(W_Object): _immutable_fields_ = ["source"] errorname = "regexp" def __init__(self, source): self.source = source self.code = None def ensure_compiled(self): if self.code is None: code, flags, groupcount, groupindex, indexgroup, group_offsets = regexp.compile(CACHE, self.source, 0) self.code = code self.flags = flags self.groupcount = groupcount self.groupindex = groupindex self.indexgroup = indexgroup self.group_offsets = group_offsets def match_string(self, s): self.ensure_compiled() endpos = len(s) ctx = rsre_core.search(self.code, s) if not ctx: return None result = [ctx.group(i) for i in range(self.groupcount + 1)] return result def match_port(self, w_port): max_match = w_port._length_up_to_end() pos = w_port.tell() for i in range(max_match): w_port.seek(pos) s = w_port.read(i) result = self.match_string(s) if result: return result return None def eqv(self, other): return type(self) is type(other) and \ (self.code == other.code if (self.code or other.code) \ else self.source == other.source) class W_Regexp(W_AnyRegexp): pass class W_PRegexp(W_AnyRegexp): pass class W_ByteRegexp(W_AnyRegexp): pass class W_BytePRegexp(W_AnyRegexp): pass
mit
Python
1069574db36d86745fa4357ff2bc35334883ad86
Bump app version to 2019.7.1
kernelci/kernelci-backend,kernelci/kernelci-backend
app/handlers/__init__.py
app/handlers/__init__.py
__version__ = "2019.7.1" __versionfull__ = __version__
__version__ = "2019.7.0" __versionfull__ = __version__
lgpl-2.1
Python
20d5e52221713ef1ab1bc9cd74b47520bea69ac6
Add tasks TODO
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
api/experiments/tasks.py
api/experiments/tasks.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import logging from api.settings import CeleryTasks from api.celery_api import app from experiments.models import Experiment from experiments.task_status import ExperimentStatus logger = logging.getLogger('polyaxon.api.experiments') def get_experiment_run_status(experiment): job_id, status = ExperimentStatus.get_status(experiment.id) return {'job_id': job_id, 'status': status} def start_experiment(experiment): job_id, status = ExperimentStatus.get_status(experiment.id) if not status or ExperimentStatus.is_final_status(status): job = execute.delay(experiment.id) ExperimentStatus.set_status(experiment.id, job.id, 'PENDING') return {'status': 'PENDING'} return {'status': status} @app.task(name=CeleryTasks.START_EXPERIMENT) def execute(experiment_id): try: experiment = Experiment.objects.get(id=experiment_id) except Experiment.DoesNotExist: logger.info('Experiment id `{}` does not exist'.format(experiment_id)) return # TODO: fix me return experiment
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import logging import polyaxon as plx from api.settings import CeleryTasks from api.celery_api import app from experiments.models import Experiment from experiments.task_status import ExperimentStatus logger = logging.getLogger('polyaxon.api.experiments') def get_experiment_run_status(experiment): job_id, status = ExperimentStatus.get_status(experiment.id) return {'job_id': job_id, 'status': status} def start_experiment(experiment): job_id, status = ExperimentStatus.get_status(experiment.id) if not status or ExperimentStatus.is_final_status(status): job = execute.delay(experiment.id) ExperimentStatus.set_status(experiment.id, job.id, 'PENDING') return {'status': 'PENDING'} return {'status': status} @app.task(name=CeleryTasks.START_EXPERIMENT) def execute(experiment_id): try: experiment = Experiment.objects.get(id=experiment_id) except Experiment.DoesNotExist: logger.info('Experiment id `{}` does not exist'.format(experiment_id)) return plx_xp = plx.experiments.create_experiment(experiment.to_config()) plx_xp.train()
apache-2.0
Python
9e8b6f47f25d4445031e0c996bc8c92ba6da4cd3
Remove unnecessary else
AndyDeany/pygame-template
pygametemplate/core.py
pygametemplate/core.py
"""Module containing the core functions of pygametemplate.""" import os import sys import traceback from datetime import datetime import ctypes import pygame from pygametemplate.exceptions import CaughtFatalException TEST = bool(int(os.environ.get("TEST", "0"))) PATH = os.getcwd() def path_to(*path): """Return the complete absolute path of the path given.""" return os.path.join(PATH, *"/".join(path).split("/")) LOG_FILE = path_to("log.txt") def log(*error_message, **options): """Takes 1 or more variables and concatenates them to create the error message.""" fatal = options.get("fatal", True) # `fatal` option defaults to True error_message = "".join(map(str, error_message)) with open(LOG_FILE, "a") as log_file: log_file.write("{} - {}.\n".format(datetime.utcnow(), error_message)) log_file.write(traceback.format_exc() + "\n") if fatal: if TEST: raise # pylint: disable=misplaced-bare-raise text = ("An error has occurred:\n\n {}.\n\n\n" "Please check log.txt for details.").format(error_message) ctypes.windll.user32.MessageBoxW(0, text, "Error", 0) raise CaughtFatalException(sys.exc_info()[1]) # TODO: Add some code here to show an error message in game # Asset loading def load_image(image_name, fix_alphas=True, file_extension=".png"): """Load the image with the given `image_name` (excluding file extension). Setting `fix_alphas` to False enables the image to be able to fade. A different file extension can be specified via the `file_extension` keyword argument, which defaults to ".png". """ # TODO: Add stuff for loading images of the correct resolution # depending on the player's resolution settings. image_path = path_to("assets/images", image_name + file_extension) try: try: image = pygame.image.load(image_path) except pygame.error: raise IOError except IOError: log("Image file not found: ", image_name, file_extension) if fix_alphas: return image.convert_alpha() # Fixes per pixel alphas permanently return image.convert() def load_font(font_name, font_size, file_extension=".ttf"): """Load the font with the given `font_name` with the given `font_size`.""" font_path = path_to("assets/fonts", font_name + file_extension) try: return pygame.font.Font(font_path, font_size) except IOError: log("Font file not found: ", font_name, file_extension)
"""Module containing the core functions of pygametemplate.""" import os import sys import traceback from datetime import datetime import ctypes import pygame from pygametemplate.exceptions import CaughtFatalException TEST = bool(int(os.environ.get("TEST", "0"))) PATH = os.getcwd() def path_to(*path): """Return the complete absolute path of the path given.""" return os.path.join(PATH, *"/".join(path).split("/")) LOG_FILE = path_to("log.txt") def log(*error_message, **options): """Takes 1 or more variables and concatenates them to create the error message.""" fatal = options.get("fatal", True) # `fatal` option defaults to True error_message = "".join(map(str, error_message)) with open(LOG_FILE, "a") as log_file: log_file.write("{} - {}.\n".format(datetime.utcnow(), error_message)) log_file.write(traceback.format_exc() + "\n") if fatal: if TEST: raise # pylint: disable=misplaced-bare-raise text = ("An error has occurred:\n\n {}.\n\n\n" "Please check log.txt for details.").format(error_message) ctypes.windll.user32.MessageBoxW(0, text, "Error", 0) raise CaughtFatalException(sys.exc_info()[1]) else: pass # TODO: Add some code here to show an error message in game # Asset loading def load_image(image_name, fix_alphas=True, file_extension=".png"): """Load the image with the given `image_name` (excluding file extension). Setting `fix_alphas` to False enables the image to be able to fade. A different file extension can be specified via the `file_extension` keyword argument, which defaults to ".png". """ # TODO: Add stuff for loading images of the correct resolution # depending on the player's resolution settings. image_path = path_to("assets/images", image_name + file_extension) try: try: image = pygame.image.load(image_path) except pygame.error: raise IOError except IOError: log("Image file not found: ", image_name, file_extension) if fix_alphas: return image.convert_alpha() # Fixes per pixel alphas permanently return image.convert() def load_font(font_name, font_size, file_extension=".ttf"): """Load the font with the given `font_name` with the given `font_size`.""" font_path = path_to("assets/fonts", font_name + file_extension) try: return pygame.font.Font(font_path, font_size) except IOError: log("Font file not found: ", font_name, file_extension)
mit
Python
c306c3b408e880edfc6d49e31575dc91a31783bd
Switch reset command to use call_command instead of subprocess.
cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive
kive/metadata/management/commands/reset.py
kive/metadata/management/commands/reset.py
from optparse import make_option import os import shutil from django.core.management.base import BaseCommand from django.core.management import call_command import kive.settings # @UnresolvedImport class Command(BaseCommand): help = 'Resets the database and loads sample data.' option_list = BaseCommand.option_list + ( make_option('--load', '-l', help="fixture name to load"), ) def handle(self, *args, **options): fixture = options['load'] targets = ["CodeResources", "Datasets", "Logs", "Sandboxes", "VerificationLogs", "VerificationScripts"] for target in targets: target_path = os.path.join(kive.settings.MEDIA_ROOT, target) if os.path.isdir(target_path): shutil.rmtree(target_path) call_command("flush", interactive=False) call_command("migrate") # flush truncates all tables, so we need to re-load this stuff. call_command("loaddata", "initial_groups") call_command("loaddata", "initial_user") call_command("loaddata", "initial_data") os.mkdir(os.path.join(kive.settings.MEDIA_ROOT, "Sandboxes")) if fixture: call_command("loaddata", fixture) fixture_folder = os.path.join("FixtureFiles", fixture) if os.path.isdir(fixture_folder): for child in os.listdir(fixture_folder): source = os.path.join(fixture_folder, child) if os.path.isdir(source): destination = os.path.join(kive.settings.MEDIA_ROOT, child) shutil.copytree(source, destination)
from optparse import make_option import os import shutil import subprocess import sys from django.core.management.base import BaseCommand from django.core.management import call_command import kive.settings # @UnresolvedImport class Command(BaseCommand): help = 'Resets the database and loads sample data.' option_list = BaseCommand.option_list + ( make_option('--load', '-l', help="fixture name to load"), ) def handle(self, *args, **options): python = sys.executable manage_script = sys.argv[0] fixture = options['load'] targets = ["CodeResources", "Datasets", "Logs", "Sandboxes", "VerificationLogs", "VerificationScripts"] for target in targets: target_path = os.path.join(kive.settings.MEDIA_ROOT, target) if os.path.isdir(target_path): shutil.rmtree(target_path) subprocess.check_call([python, manage_script, "flush", "--noinput"]) call_command("migrate") # flush truncates all tables, so we need to re-load this stuff. call_command("loaddata", "initial_groups") call_command("loaddata", "initial_user") call_command("loaddata", "initial_data") os.mkdir(os.path.join(kive.settings.MEDIA_ROOT, "Sandboxes")) if fixture: call_command("loaddata", fixture) fixture_folder = os.path.join("FixtureFiles", fixture) if os.path.isdir(fixture_folder): for child in os.listdir(fixture_folder): source = os.path.join(fixture_folder, child) if os.path.isdir(source): destination = os.path.join(kive.settings.MEDIA_ROOT, child) shutil.copytree(source, destination)
bsd-3-clause
Python
d77e3fbc0d59ca31ce028cc31f5d1e06f08900f4
Fix color effect decorator in GoL example (#20)
a5kin/hecate,a5kin/hecate
examples/game_of_life.py
examples/game_of_life.py
from hecate import core from hecate import seeds from hecate.core import color_effects class GameOfLife(core.CellularAutomaton): """ The Idea of classic CA built with HECATE framework """ state = core.IntegerProperty(max_val=1) class Topology: dimensions = 2 lattice = core.OrthogonalLattice() neighborhood = core.MooreNeighborhood() border = core.TorusBorder() def emit(self): for i in range(len(self.buffers)): self.buffers[i].state = self.main.state def absorb(self): neighbors_alive = core.IntegerVariable() for i in range(len(self.buffers)): neighbors_alive += self.neighbors[i].buffer.state is_born = (8 >> neighbors_alive) & 1 is_sustain = (12 >> neighbors_alive) & 1 self.main.state = is_born | is_sustain & self.main.state @color_effects.MovingAverage def color(self): r = self.main.state * 255 g = self.main.state * 255 b = self.main.state * 255 return (r, g, b) class GameOfLifeStatic(GameOfLife): class Topology: dimensions = 2 lattice = core.OrthogonalLattice() neighborhood = core.MooreNeighborhood() border = core.StaticBorder(1) class GOLExperiment(core.Experiment): """ Particular experiment, to be loaded at runtime in future """ word = "HECATE FIRST EXPERIMENT" size = (640, 360, ) zoom = 3 pos = [0, 0] seed = seeds.patterns.BigBang( pos=(320, 180), size=(100, 100), vals={ "state": seeds.random.RandInt(0, 1), } ) class GOLExperiment2(core.Experiment): """ Experiment initialized with Primordial Soup pattern. """ word = "HECATE FIRST EXPERIMENT" size = (640, 360, ) zoom = 3 pos = [0, 0] seed = seeds.patterns.PrimordialSoup( vals={ "state": seeds.random.RandInt(0, 1), } ) if __name__ == "__main__": import moire ca = GameOfLife(GOLExperiment) gui = moire.GUI(runnable=ca) gui.run()
from hecate import core from hecate import seeds from hecate.core import color_effects class GameOfLife(core.CellularAutomaton): """ The Idea of classic CA built with HECATE framework """ state = core.IntegerProperty(max_val=1) class Topology: dimensions = 2 lattice = core.OrthogonalLattice() neighborhood = core.MooreNeighborhood() border = core.TorusBorder() def emit(self): for i in range(len(self.buffers)): self.buffers[i].state = self.main.state def absorb(self): neighbors_alive = core.IntegerVariable() for i in range(len(self.buffers)): neighbors_alive += self.neighbors[i].buffer.state is_born = (8 >> neighbors_alive) & 1 is_sustain = (12 >> neighbors_alive) & 1 self.main.state = is_born | is_sustain & self.main.state @color_effects.moving_average def color(self): r = self.main.state * 255 g = self.main.state * 255 b = self.main.state * 255 return (r, g, b) class GameOfLifeStatic(GameOfLife): class Topology: dimensions = 2 lattice = core.OrthogonalLattice() neighborhood = core.MooreNeighborhood() border = core.StaticBorder(1) class GOLExperiment(core.Experiment): """ Particular experiment, to be loaded at runtime in future """ word = "HECATE FIRST EXPERIMENT" size = (640, 360, ) zoom = 3 pos = [0, 0] seed = seeds.patterns.BigBang( pos=(320, 180), size=(100, 100), vals={ "state": seeds.random.RandInt(0, 1), } ) class GOLExperiment2(core.Experiment): """ Experiment initialized with Primordial Soup pattern. """ word = "HECATE FIRST EXPERIMENT" size = (640, 360, ) zoom = 3 pos = [0, 0] seed = seeds.patterns.PrimordialSoup( vals={ "state": seeds.random.RandInt(0, 1), } ) if __name__ == "__main__": import moire ca = GameOfLife(GOLExperiment) gui = moire.GUI(runnable=ca) gui.run()
mit
Python