commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
4bc55a6b1bdef357acd24e6aba34a57f689e9da0
|
bokeh/command/subcommands/__init__.py
|
bokeh/command/subcommands/__init__.py
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
return results
all = _collect()
del _collect
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
results = sorted(results, key=lambda attr: attr.name)
return results
all = _collect()
del _collect
|
Sort subcommands.all so the tested results are deterministic
|
Sort subcommands.all so the tested results are deterministic
|
Python
|
bsd-3-clause
|
phobson/bokeh,clairetang6/bokeh,aiguofer/bokeh,jakirkham/bokeh,msarahan/bokeh,mindriot101/bokeh,philippjfr/bokeh,schoolie/bokeh,stonebig/bokeh,azjps/bokeh,percyfal/bokeh,bokeh/bokeh,draperjames/bokeh,percyfal/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,msarahan/bokeh,quasiben/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,bokeh/bokeh,justacec/bokeh,dennisobrien/bokeh,ericmjl/bokeh,philippjfr/bokeh,dennisobrien/bokeh,aavanian/bokeh,azjps/bokeh,clairetang6/bokeh,ptitjano/bokeh,ericmjl/bokeh,azjps/bokeh,timsnyder/bokeh,timsnyder/bokeh,rs2/bokeh,mindriot101/bokeh,clairetang6/bokeh,jakirkham/bokeh,DuCorey/bokeh,bokeh/bokeh,azjps/bokeh,mindriot101/bokeh,aavanian/bokeh,aavanian/bokeh,azjps/bokeh,bokeh/bokeh,clairetang6/bokeh,aavanian/bokeh,philippjfr/bokeh,justacec/bokeh,aiguofer/bokeh,mindriot101/bokeh,schoolie/bokeh,draperjames/bokeh,phobson/bokeh,DuCorey/bokeh,aiguofer/bokeh,philippjfr/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,rs2/bokeh,stonebig/bokeh,ericmjl/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,jakirkham/bokeh,DuCorey/bokeh,jakirkham/bokeh,ptitjano/bokeh,quasiben/bokeh,percyfal/bokeh,percyfal/bokeh,justacec/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,rs2/bokeh,phobson/bokeh,justacec/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,schoolie/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,ptitjano/bokeh,phobson/bokeh,dennisobrien/bokeh,philippjfr/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,msarahan/bokeh,aiguofer/bokeh,stonebig/bokeh,DuCorey/bokeh,draperjames/bokeh,jakirkham/bokeh,aavanian/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,timsnyder/bokeh,draperjames/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh
|
4fce2955ce76c1f886b2a234fe9d0c576843fefd
|
Dice.py
|
Dice.py
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
class DiceBag(object):
def __init__(self):
self.dice = []
self.dice_roll = []
def add_die_obj(self, die_obj):
self.dice.append(die_obj)
def remove_die(self, die_obj):
self.dice.remove(die_obj)
def remove_die_index(self, index):
del self.dice[index]
def add_die_notation(self, standard_die_notation):
lst_notation = standard_die_notation.split("d")
i = 0
while (i<int(lst_notation[0])):
die1 = Die(int(lst_notation[1]))
self.dice.append(die1)
i = i +1
def roll_all(self):
for obj in self.dice:
obj.roll_die()
self.dice_roll.append(obj.get_die_face())
def get_dice_roll(self):
return self.dice_roll
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
class DiceBag(object):
def __init__(self):
self.dice = []
self.dice_roll = []
def add_die_obj(self, die_obj):
self.dice.append(die_obj)
def remove_die(self, die_obj):
self.dice.remove(die_obj)
def remove_die_index(self, index):
del self.dice[index]
def add_die_notation(self, standard_die_notation):
lst_notation = standard_die_notation.split("d")
i = 0
while (i<int(lst_notation[0])):
die1 = Die(int(lst_notation[1]))
self.dice.append(die1)
i = i +1
def roll_all(self):
for obj in self.dice:
obj.roll_die()
self.dice_roll.append(obj.get_die_face())
def hold_all(self, held):
for obj in self.dice:
obj.change_held(held)
def get_dice_roll(self):
return self.dice_roll
|
Add hold all function to dicebag
|
Add hold all function to dicebag
|
Python
|
mit
|
achyutreddy24/DiceGame
|
628de346d3cf22342bf09e9ad3337a4408ed5662
|
properties/files.py
|
properties/files.py
|
from __future__ import absolute_import, unicode_literals, print_function, division
from builtins import open
from future import standard_library
standard_library.install_aliases()
import six
import json, numpy as np, os, io
from .base import Property
from . import exceptions
class File(Property):
mode = 'r' #: mode for opening the file.
def validator(self, instance, value):
if hasattr(value, 'read'):
prev = getattr(self, '_p_' + self.name, None)
if prev is not None and value is not prev:
prev.close()
return value
if isinstance(value, six.string_types) and os.path.isfile(value):
return open(value, self.mode)
raise ValueError('The value for "%s" must be an open file or a string.'%self.name)
class Image(File):
def validator(self, instance, value):
import png
if getattr(value, '__valid__', False):
return value
reader = png.Reader(value)
reader.validate_signature()
output = io.BytesIO()
output.name = 'texture.png'
output.__valid__ = True
if hasattr(value, 'read'):
fp = value
fp.seek(0)
else:
fp = open(value, 'rb')
output.write(fp.read())
output.seek(0)
fp.close()
return output
|
from __future__ import absolute_import, print_function, division
from builtins import open
from future import standard_library
standard_library.install_aliases()
import six
import json, numpy as np, os, io
from .base import Property
from . import exceptions
class File(Property):
mode = 'r' #: mode for opening the file.
def validator(self, instance, value):
if hasattr(value, 'read'):
prev = getattr(self, '_p_' + self.name, None)
if prev is not None and value is not prev:
prev.close()
return value
if isinstance(value, six.string_types) and os.path.isfile(value):
return open(value, self.mode)
raise ValueError('The value for "%s" must be an open file or a string.'%self.name)
class Image(File):
def validator(self, instance, value):
import png
if getattr(value, '__valid__', False):
return value
if hasattr(value, 'read'):
png.Reader(value).validate_signature()
else:
with open(value, 'rb') as v:
png.Reader(v).validate_signature()
output = io.BytesIO()
output.name = 'texture.png'
output.__valid__ = True
if hasattr(value, 'read'):
fp = value
fp.seek(0)
else:
fp = open(value, 'rb')
output.write(fp.read())
output.seek(0)
fp.close()
return output
|
Fix png for python 2/3 compatibility
|
Fix png for python 2/3 compatibility
|
Python
|
mit
|
aranzgeo/properties,3ptscience/properties
|
7fcccea5d7fdfb823d17f1db56f5ece42ef2fd8b
|
tools/bundle.py
|
tools/bundle.py
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything.
|
Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.
|
Python
|
apache-2.0
|
kans/zirgo,kans/zirgo,kans/zirgo
|
dfd6793f16d0128b3d143d0f1ebc196bb79505c2
|
chnnlsdmo/chnnlsdmo/models.py
|
chnnlsdmo/chnnlsdmo/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Voter(models.Model):
'''
Models someone who may vote
'''
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
class Flag(models.Model):
'''
Models a flag which may be voted on
'''
name = models.CharField(max_length=200)
designer = models.CharField(max_length=200)
image_url = models.URLField(max_length=1024)
def __str__(self):
return self.name
class Vote(models.Model):
'''
Models a single vote cast by a `Voter` for a `Flag`
'''
flag = models.ForeignKey(Flag, on_delete=models.CASCADE)
voter = models.ForeignKey(Voter, on_delete=models.CASCADE)
|
from django.db import models
from django.contrib.auth.models import User
class Voter(models.Model):
'''
Models someone who may vote
'''
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
class Flag(models.Model):
'''
Models a flag which may be voted on
'''
name = models.CharField(max_length=200)
designer = models.CharField(max_length=200)
image_url = models.URLField(max_length=1024)
def __str__(self):
return self.name
class Vote(models.Model):
'''
Models a single vote cast by a `Voter` for a `Flag`
'''
flag = models.ForeignKey(Flag, on_delete=models.CASCADE)
voter = models.ForeignKey(Voter, on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return "{0} voted for {1} at {2}".format( self.voter.user.username,
self.flag.name,
self.created )
|
Add date/time created timestamp to Vote model
|
Add date/time created timestamp to Vote model
|
Python
|
bsd-3-clause
|
shearichard/django-channels-demo,shearichard/django-channels-demo,shearichard/django-channels-demo
|
fbf8b169cceb4c9a78d114880d5ce0eb59108a38
|
rsr/cmd.py
|
rsr/cmd.py
|
import locale
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr import paths
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument(
'--experimental', action='store_true',
help='Enable experimental features.')
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
locale.setlocale(locale.LC_ALL, '.'.join(locale.getlocale()))
def main():
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load(
os.path.join(paths.data_dir, 'runsqlrun.gresource'))
Gio.Resource._register(resource)
#if args.dark_theme:
# Gtk.Settings.get_default().set_property(
# 'gtk-application-prefer-dark-theme', True)
app = Application(args)
sys.exit(app.run([]))
|
import locale
import os
import signal
import sys
from argparse import ArgumentParser
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '3.0')
from gi.repository import Gio, GLib, Gtk
from rsr import __version__
from rsr import paths
from rsr.app import Application
parser = ArgumentParser(prog='runsqlrun', description='Run SQL statements')
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument(
'--experimental', '-e', action='store_true',
help='Enable experimental features.')
# See issue3. Unfortunately this needs to be done before opening
# any Oracle connection.
os.environ.setdefault('NLS_LANG', '.AL32UTF8')
locale.setlocale(locale.LC_ALL, '.'.join(locale.getlocale()))
def main():
args = parser.parse_args()
signal.signal(signal.SIGINT, signal.SIG_DFL)
GLib.set_application_name('RunSQLRun')
GLib.set_prgname('runsqlrun')
resource = Gio.resource_load(
os.path.join(paths.data_dir, 'runsqlrun.gresource'))
Gio.Resource._register(resource)
#if args.dark_theme:
# Gtk.Settings.get_default().set_property(
# 'gtk-application-prefer-dark-theme', True)
app = Application(args)
sys.exit(app.run([]))
|
Add short shortcut for experimental flag.
|
Add short shortcut for experimental flag.
|
Python
|
mit
|
andialbrecht/runsqlrun
|
34d7a7ea41843ef4761804e973ec9ded1bb2a03b
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case, EligibilityCheck, CaseNotesHistory, Person, Income, Savings,
Deductions, PersonalDetails, ThirdPartyDetails, AdaptationDetails,
CaseKnowledgebaseAssignment, EODDetails, EODDetailsCategory, Property
)
from timer.models import Timer
from ...qs_to_file import QuerysetToFile
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
class Command(BaseCommand):
help = 'Attempts to re-load data that was deleted in the housekeeping'
def add_arguments(self, parser):
parser.add_argument('directory', nargs=1, type=str)
def handle(self, *args, **options):
d = args[0]
path = os.path.join(settings.TEMP_DIR, d)
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case, EligibilityCheck, CaseNotesHistory, Person, Income, Savings,
Deductions, PersonalDetails, ThirdPartyDetails, AdaptationDetails,
CaseKnowledgebaseAssignment, EODDetails, EODDetailsCategory, Property
)
from timer.models import Timer
from ...qs_to_file import QuerysetToFile
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
class Command(BaseCommand):
help = 'Attempts to re-load data that was deleted in the housekeeping'
def add_arguments(self, parser):
parser.add_argument('directory', nargs=1)
def handle(self, *args, **options):
path = os.path.join(settings.TEMP_DIR, args[0])
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
Refactor args in manage task
|
Refactor args in manage task
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
f7777c858baf049af83bd39168d0640e4dedf29c
|
main.py
|
main.py
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
print(os.environ)
while True:
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
body = os.environ["WELCOME_MESSAGE"].format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=body, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
lastPingTime = 0
while True:
raise Exception(str(os.environ))
for message in slackClient.rtm_read():
if message["type"] == "team_join":
username = message["user"]["name"]
body = os.environ["WELCOME_MESSAGE"].format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=body, username="The New Ro-Bot", icon_emoji=":wave:",
link_names = True)
now = time.time()
if now - lastPingTime >= 3:
slackClient.server.ping()
lastPingTime = now
time.sleep(.1)
|
Change debug message to exception
|
Change debug message to exception
|
Python
|
mit
|
ollien/Slack-Welcome-Bot
|
7d26f7c16b7b33ae0c011bb2db588b056fe90e3e
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
import webapp2
# Importing request handlers
from signup import Signup
from login import Login
from logout import Logout
from wikipage import WikiPage
from editpage import EditPage
PAGE_RE = r'(/(?:[a-zA-Z0-9_-]+/?)*)'
app = webapp2.WSGIApplication([
('/signup', Signup),
('/login', Login),
('/logout', Logout),
(PAGE_RE, WikiPage),
('/_edit' + PAGE_RE, EditPage)
], debug=True)
|
# -*- coding: utf-8 -*-
import webapp2
# Importing request handlers
from signup import Signup
from login import Login
from logout import Logout
from wikipage import WikiPage
from editpage import EditPage
from historypage import HistoryPage
PAGE_RE = r'(/(?:[a-zA-Z0-9_-]+/?)*)'
app = webapp2.WSGIApplication([
('/signup', Signup),
('/login', Login),
('/logout', Logout),
('/_edit' + PAGE_RE, EditPage),
('/_history' + PAGE_RE, HistoryPage),
(PAGE_RE, WikiPage),
], debug=True)
|
Fix logic bugs and add historypage handler
|
Fix logic bugs and add historypage handler
|
Python
|
mit
|
lttviet/udacity-final
|
f5cc3275a11c809bb6f5ab097414d0a5ccda2341
|
main.py
|
main.py
|
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
print("%s, %s" % (website, url))
if __name__ == '__main__':
main()
|
def main():
website = input("Input website(cnn, nytimes, bbc, nzherald): ")
url = input("Input url: ")
scraper(website, url)
def scraper(website, url):
if ".com" not in url:
print("Invalid url")
exit()
print("%s, %s" % (website, url))
if __name__ == '__main__':
main()
|
Check for .com in url
|
Check for .com in url
|
Python
|
mit
|
Alex-Gurung/ScrapeTheNews
|
e3e98b0533460837c4ea2eac67c4281eb0ba0012
|
test/requests/parametrized_test.py
|
test/requests/parametrized_test.py
|
import logging
import unittest
from wqflask import app
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
|
import logging
import unittest
from wqflask import app
from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
sleep(1)
|
Use existing code. Delay after delete.
|
Use existing code. Delay after delete.
* Use existing code to get the elasticsearch connection. This should
prevent tests from failing in case the way connections to
elasticsearch are made change.
* Delay a while after deleting to allow elasticsearch to re-index the
data, thus preventing subtle bugs in the test.
|
Python
|
agpl-3.0
|
DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2
|
cdb4fa00328f3bc5852b9cae799d4d3ed99f1280
|
pyramid_authsanity/util.py
|
pyramid_authsanity/util.py
|
from pyramid.interfaces import (
ISessionFactory,
)
from .interfaces import (
IAuthService,
IAuthSourceService,
)
def int_or_none(x):
return int(x) if x is not None else x
def kw_from_settings(settings, from_prefix='authsanity.'):
return dict((k.replace(from_prefix, ''), v) for (k, v) in settings.items() if k.startswith(from_prefix))
def add_vary_callback(vary_by):
def vary_add(request, response):
vary = set(response.vary if response.vary is not None else [])
vary |= set(vary_by)
response.vary = list(vary)
return vary_add
def _find_services(request):
sourcesvc = request.find_service(IAuthSourceService)
authsvc = request.find_service(IAuthService)
return (sourcesvc, authsvc)
def _session_registered(request):
registry = request.registry
factory = registry.queryUtility(ISessionFactory)
return (False if factory is None else True)
|
from pyramid.interfaces import (
ISessionFactory,
)
from .interfaces import (
IAuthService,
IAuthSourceService,
)
def int_or_none(x):
return int(x) if x is not None else x
def kw_from_settings(settings, from_prefix='authsanity.'):
return { k.replace(from_prefix, ''): v for k, v in settings.items() if k.startswith(from_prefix) }
def add_vary_callback(vary_by):
def vary_add(request, response):
vary = set(response.vary if response.vary is not None else [])
vary |= set(vary_by)
response.vary = list(vary)
return vary_add
def _find_services(request):
sourcesvc = request.find_service(IAuthSourceService)
authsvc = request.find_service(IAuthService)
return (sourcesvc, authsvc)
def _session_registered(request):
registry = request.registry
factory = registry.queryUtility(ISessionFactory)
return (False if factory is None else True)
|
Revert "Py 2.6 support is back"
|
Revert "Py 2.6 support is back"
This reverts commit 463c1ab6a7f5a7909b967e0dfa0320a77e166b95.
|
Python
|
isc
|
usingnamespace/pyramid_authsanity
|
977c8cc25c3978931e0d908589232db1bcac5b3f
|
fitizen/body_weight_workout/views.py
|
fitizen/body_weight_workout/views.py
|
# from datetime import datetime
from django.views.generic import RedirectView
from django.core.urlresolvers import reverse_lazy
from .models import BodyWeightWorkout
from braces import views
# Create your views here.
class CreateWorkout(
views.LoginRequiredMixin,
views.MessageMixin,
RedirectView
):
url = reverse_lazy('home')
login_url = reverse_lazy('login')
def get(self, request, *args, **kwargs):
user = request.user
workout = BodyWeightWorkout(user=user)
workout.save()
self.messages.success("New workout created!")
return super(CreateWorkout, self).get(request, *args, **kwargs)
|
from datetime import datetime
from django.utils import timezone
from django.shortcuts import redirect
from django.views.generic import View
from django.core.urlresolvers import reverse_lazy
from .models import BodyWeightWorkout
from braces import views
# Create your views here.
class CreateWorkout(
views.LoginRequiredMixin,
views.MessageMixin,
View
):
url = reverse_lazy('home')
login_url = reverse_lazy('login')
def get(self, request, *args, **kwargs):
now = timezone.now()
recent_workout = list(BodyWeightWorkout.objects.filter(user=request.user.id).datetimes('created', 'day', order='DESC')[:1])
difference = (now - recent_workout[0])
# check to see if they already worked out today
if difference.days == 0:
self.messages.success("You already worked out today!")
return redirect('home')
else:
user = request.user
workout = BodyWeightWorkout(user=user)
workout.save()
self.messages.success("New workout created!")
return redirect('home')
|
Create Workout now checks to see if you worked out once today already, if so tells user they already worked out on that day. fixed error where redirect would not re-instantiate get request to createview
|
Create Workout now checks to see if you worked out once today already, if so tells user they already worked out on that day. fixed error where redirect would not re-instantiate get request to createview
|
Python
|
mit
|
johnshiver/fitizen,johnshiver/fitizen
|
6567120249b82477bcf0ef82554b057f93618e7e
|
tools/gyp/find_mac_gcc_version.py
|
tools/gyp/find_mac_gcc_version.py
|
#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
|
#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
|
Revert "Use clang on mac if XCode >= 4.5"
|
Revert "Use clang on mac if XCode >= 4.5"
We cannot build v8 after this change because clang reports a warning in
v8/src/parser.cc about an unused field (and we turn warnings into errors).
We can enable this change again after we update to a new v8 version (this seems
to be fixed in v3.17).
Review URL: https://codereview.chromium.org//14477015
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@21984 260f80e4-7a28-3924-810f-c04153c831b5
|
Python
|
bsd-3-clause
|
dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk
|
c7c1f63836e052b7a63e35956a74d03f1be30677
|
webapp-django/questionnaire/views.py
|
webapp-django/questionnaire/views.py
|
from django.shortcuts import render
from .models import Question, MultipleChoiceQuestion
def index(request):
pass
def questions(request):
ques = MultipleChoiceQuestion.objects.all() + Question.objects.all()
questions = []
p = 0
for h in que:
p += 1
q = 0
qu = [h['question']]
ch = []
for i in h.choices:
q += 1
ch += [[q, i]]
qu += [[ch]]
questions += [qu]
return render(request, 'questions.html', {'questions': questions})
|
from django.shortcuts import render
from .models import Question, MultipleChoiceQuestion
def index(request):
pass
def questions(request):
ques = MultipleChoiceQuestion.objects.all() # + Question.objects.all()
questions = []
p = 0
for h in ques:
p += 1
q = 0
qu = [h['question']]
ch = []
for i in h.choices:
q += 1
ch += [[q, i]]
qu += [[ch]]
questions += [qu]
return render(request, 'questions.html', {'questions': questions})
|
Remove Questions from queried objects
|
Remove Questions from queried objects
|
Python
|
mit
|
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
|
8218b398731e8d9093a91de9bb127e2e933fa6db
|
json_editor/admin.py
|
json_editor/admin.py
|
import json
import copy
from django import forms
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(forms.Widget):
template_name = 'django_json_editor/django_json_editor.html'
def __init__(self, schema, collapsed=True):
super().__init__()
self._schema = schema
self._collapsed = collapsed
def render(self, name, value, attrs=None):
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': schema,
'data': json.loads(value),
}
return mark_safe(render_to_string(self.template_name, context))
class Media:
css = {'all': (
'django_json_editor/bootstrap/css/bootstrap.min.css',
'django_json_editor/fontawesome/css/font-awesome.min.css',
'django_json_editor/style.css',
)}
js = (
'django_json_editor/jquery/jquery.min.js',
'django_json_editor/bootstrap/js/bootstrap.min.js',
'django_json_editor/jsoneditor/jsoneditor.min.js',
)
|
import json
import copy
from django import forms
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(forms.Widget):
template_name = 'django_json_editor/django_json_editor.html'
def __init__(self, schema, collapsed=True):
super().__init__()
self._schema = schema
self._collapsed = collapsed
def render(self, name, value, attrs=None):
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': schema,
'data': value,
}
return mark_safe(render_to_string(self.template_name, context))
class Media:
css = {'all': (
'django_json_editor/bootstrap/css/bootstrap.min.css',
'django_json_editor/fontawesome/css/font-awesome.min.css',
'django_json_editor/style.css',
)}
js = (
'django_json_editor/jquery/jquery.min.js',
'django_json_editor/bootstrap/js/bootstrap.min.js',
'django_json_editor/jsoneditor/jsoneditor.min.js',
)
|
Load value from json field as string.
|
Load value from json field as string.
|
Python
|
mit
|
abogushov/django-admin-json-editor,abogushov/django-admin-json-editor
|
414f6e9174b8c7b88866319af19a5e36fcec643d
|
kk/admin/__init__.py
|
kk/admin/__init__.py
|
from django.contrib import admin
from kk.models import Hearing, Label, Introduction, Scenario, Comment
admin.site.register(Label)
admin.site.register(Hearing)
admin.site.register(Introduction)
admin.site.register(Scenario)
admin.site.register(Comment)
|
from django.contrib import admin
from kk import models
### Inlines
class IntroductionInline(admin.StackedInline):
model = models.Introduction
extra = 0
exclude = ["id"]
class ScenarioInline(admin.StackedInline):
model = models.Scenario
extra = 0
exclude = ["id"]
class HearingImageInline(admin.StackedInline):
model = models.HearingImage
extra = 0
class IntroductionImageInline(admin.StackedInline):
model = models.IntroductionImage
extra = 0
class ScenarioImageInline(admin.StackedInline):
model = models.ScenarioImage
extra = 0
### Admins
class HearingAdmin(admin.ModelAdmin):
inlines = [HearingImageInline, IntroductionInline, ScenarioInline]
class IntroductionAdmin(admin.ModelAdmin):
inlines = [IntroductionImageInline]
class ScenarioAdmin(admin.ModelAdmin):
inlines = [ScenarioImageInline]
### Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
admin.site.register(models.Introduction, IntroductionAdmin)
admin.site.register(models.Scenario, ScenarioAdmin)
|
Make the admin a little bit more palatable
|
Make the admin a little bit more palatable
Refs #25
|
Python
|
mit
|
stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi
|
b0029cffae96e25611d7387e699774de4d9682d3
|
corehq/apps/es/tests/utils.py
|
corehq/apps/es/tests/utils.py
|
import json
from nose.plugins.attrib import attr
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
import json
from nose.plugins.attrib import attr
from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
@nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
Mark es_test decorator as nottest
|
Mark es_test decorator as nottest
Second try...
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
913ae38e48591000195166a93e18e96a82d1d222
|
lily/messaging/email/migrations/0013_fix_multple_default_templates.py
|
lily/messaging/email/migrations/0013_fix_multple_default_templates.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def fix_multiple_default_templates(apps, schema_editor):
# Some users have more than 1 default template.
# This shouldn't be possible, make sure is will be just 1.
User = apps.get_model('users', 'LilyUser')
DefaultEmailTemplate = apps.get_model('email', 'DefaultEmailTemplate')
print('\nFixing default template for the following users:')
for user in User.objects.all():
templates = DefaultEmailTemplate.objects.filter(user=user.pk).order_by('id')
if templates.count() > 1:
# User has more than one default template.
# Best guess would be that the user prefers the last set template to be the default.
# So remove all except the last one.
template_to_keep = templates.last()
templates.exclude(id=template_to_keep.id).delete()
print('%d:\t%s' % (user.pk, user.email))
class Migration(migrations.Migration):
dependencies = [
('email', '0012_auto_20160715_1423'),
]
operations = [
migrations.RunPython(fix_multiple_default_templates),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def fix_multiple_default_templates(apps, schema_editor):
# Some users have more than 1 default template.
# This shouldn't be possible, make sure is will be just 1.
User = apps.get_model('users', 'LilyUser')
DefaultEmailTemplate = apps.get_model('email', 'DefaultEmailTemplate')
for user in User.objects.all():
templates = DefaultEmailTemplate.objects.filter(user=user.pk).order_by('id')
if templates.count() > 1:
# User has more than one default template.
# Best guess would be that the user prefers the last set template to be the default.
# So remove all except the last one.
template_to_keep = templates.last()
templates.exclude(id=template_to_keep.id).delete()
class Migration(migrations.Migration):
dependencies = [
('email', '0012_auto_20160715_1423'),
]
operations = [
migrations.RunPython(fix_multiple_default_templates),
]
|
Remove print statements, not usefull anymore.
|
Remove print statements, not usefull anymore.
|
Python
|
agpl-3.0
|
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
|
517e22b331f63e80cb344e257789463627b44508
|
utilities/rename-random-number.py
|
utilities/rename-random-number.py
|
''' rename files in local directory with random integer names.
windows screen saver isn't very good at randomizing fotos shown.
Change file names regularly to provide more variety
'''
import os
import re
import random
random.seed()
new_names = set()
original_files = []
for entry in os.listdir():
if os.path.isfile(entry):
if re.match(".*jpg", entry):
original_files.append(entry)
for counter in range(0, len(original_files)):
new_value = random.randint(0,100000)
while new_value in new_names:
new_value = random.randint(0,100000)
new_names.add(new_value)
for of in original_files:
nf = str(new_names.pop()).zfill(6) + ".jpg"
try:
os.rename(of, nf)
except Exception as e:
print("{}: {}".format(of, e))
|
#! python3
''' rename files in local directory with random integer names.
windows screen saver isn't very good at randomizing fotos shown.
Change file names regularly to provide more variety
'''
import os
import re
import random
import time
random.seed()
new_names = set()
original_files = []
for entry in os.listdir():
if os.path.isfile(entry):
if re.match(".*jpg", entry):
original_files.append(entry)
for counter in range(0, len(original_files)):
new_value = random.randint(0,1000000000)
# Make sure the new names are unique
# -- note this is only the new set, the new name
# may still duplicate an old name. The set is
# to minimize this chance
while new_value in new_names:
new_value = random.randint(0,1000000000)
new_names.add(new_value)
for of in original_files:
nf = str(new_names.pop()).zfill(10) + ".jpg"
try:
os.rename(of, nf)
except Exception as e:
print("{}: {}".format(of, e))
time.sleep(5)
|
Increase namespace, sleep before cmd window closes
|
Increase namespace, sleep before cmd window closes
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
69ae2f5b825ae6a404d78120b60727b59dbbcbac
|
xos/model_policies/model_policy_ControllerSlice.py
|
xos/model_policies/model_policy_ControllerSlice.py
|
def handle(controller_slice):
from core.models import ControllerSlice, Slice
try:
my_status_code = int(controller_slice.backend_status[0])
try:
his_status_code = int(controller_slice.slice.backend_status[0])
except:
his_status_code = 0
if (my_status_code not in [0,his_status_code]):
controller_slice.slice.backend_status = controller_slice.backend_status
controller_slice.slice.save(update_fields = ['backend_status'])
except Exception,e:
print str(e)
pass
|
def handle(controller_slice):
from core.models import ControllerSlice, Slice
try:
my_status_code = int(controller_slice.backend_status[0])
try:
his_status_code = int(controller_slice.slice.backend_status[0])
except:
his_status_code = 0
fields = []
if (my_status_code not in [0,his_status_code]):
controller_slice.slice.backend_status = controller_slice.backend_status
fields+=['backend_status']
if (controller_slice.backend_register != controller_slice.slice.backend_register):
controller_slice.slice.backend_register = controller_slice.backend_register
fields+=['backend_register']
controller_slice.slice.save(update_fields = fields)
except Exception,e:
print str(e)
pass
|
Copy backend_register from ControllerSlice to Slice
|
Copy backend_register from ControllerSlice to Slice
|
Python
|
apache-2.0
|
jermowery/xos,xmaruto/mcord,xmaruto/mcord,jermowery/xos,cboling/xos,jermowery/xos,cboling/xos,cboling/xos,jermowery/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,cboling/xos
|
4de0da9c28351047b1de6f728da5e68d9e73b3fd
|
satori.ars/setup.py
|
satori.ars/setup.py
|
# vim:ts=4:sts=4:sw=4:expandtab
from setuptools import setup, find_packages
setup(name='satori.ars',
packages=find_packages(),
namespace_packages=[
'satori',
],
install_requires=[
'setuptools',
'satori.objects',
]
)
|
# vim:ts=4:sts=4:sw=4:expandtab
from setuptools import setup, find_packages
setup(name='satori.ars',
packages=find_packages(),
namespace_packages=[
'satori',
],
install_requires=[
'setuptools',
'pyparsing',
'satori.objects',
]
)
|
Add pyparsing to satori.ars dependencies.
|
Add pyparsing to satori.ars dependencies.
|
Python
|
mit
|
zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori
|
5d6d2a02963cadd9b0a5c148fb6906fa63148052
|
booster_bdd/features/environment.py
|
booster_bdd/features/environment.py
|
"""Module with code to be run before and after certain events during the testing."""
import os
from src.support import helpers
def before_all(_context):
"""Perform the setup before the first event."""
if not helpers.is_user_logged_in():
username = os.getenv("OSIO_USERNAME")
password = os.getenv("OSIO_PASSWORD")
assert username != ""
assert password != ""
print("Loggin user {} in...".format(username))
helpers.login_user(username, password)
|
"""Module with code to be run before and after certain events during the testing."""
import os
from src.support import helpers
def before_all(_context):
"""Perform the setup before the first event."""
if not helpers.is_user_logged_in():
username = os.getenv("OSIO_USERNAME")
password = os.getenv("OSIO_PASSWORD")
assert username is not None
assert password is not None
assert username != ""
assert password != ""
print("Loggin user {} in...".format(username))
helpers.login_user(username, password)
|
Check for env. variable existence
|
Check for env. variable existence
|
Python
|
apache-2.0
|
ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test
|
08113ee79785f394a1c5244cdb87bef9f7fc5ff3
|
catplot/__init__.py
|
catplot/__init__.py
|
__all__ = ['en_profile']
__version__ = '0.1.0'
|
__all__ = ['en_profile', 'functions', 'chem_parser']
__version__ = '0.1.0'
|
Add more modules to __all__
|
Add more modules to __all__
|
Python
|
mit
|
PytLab/catplot
|
338a6e8da75a5b950949638b1a810510419450e9
|
scripts/state_and_transition.py
|
scripts/state_and_transition.py
|
#!/usr/bin/env python
#
# Copyright 2017 Robot Garden, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# States and Transitions
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
|
#!/usr/bin/env python
#
# Copyright 2017 Robot Garden, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# States and Transitions
from auto_number import AutoNumber
class STATE(AutoNumber):
Start = ()
Following_waypoint = ()
Avoiding_obstacle = ()
Driving_toward_cone = ()
Driving_away_from_cone = ()
Success = ()
Failure = ()
End = ()
class TRANSITION(AutoNumber):
obstacle_seen = ()
near_cone = ()
obstacle_cleared = ()
touched_cone = ()
passed_cone = ()
segment_timeout = ()
touched_last_cone = ()
passed_last_cone = ()
course_timeout = ()
cleared_cone = ()
|
Add new state for driving away from cone
|
Add new state for driving away from cone
|
Python
|
apache-2.0
|
ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan,ProgrammingRobotsStudyGroup/robo_magellan
|
7f5392d2581e789917b8ba5352d821277d5de8ab
|
numpy/typing/_scalars.py
|
numpy/typing/_scalars.py
|
from typing import Union, Tuple, Any
import numpy as np
# NOTE: `_StrLike` and `_BytesLike` are pointless, as `np.str_` and `np.bytes_`
# are already subclasses of their builtin counterpart
_CharLike = Union[str, bytes]
_BoolLike = Union[bool, np.bool_]
_IntLike = Union[int, np.integer]
_FloatLike = Union[_IntLike, float, np.floating]
_ComplexLike = Union[_FloatLike, complex, np.complexfloating]
_NumberLike = Union[int, float, complex, np.number, np.bool_]
_ScalarLike = Union[
int,
float,
complex,
str,
bytes,
np.generic,
]
# `_VoidLike` is technically not a scalar, but it's close enough
_VoidLike = Union[Tuple[Any, ...], np.void]
|
from typing import Union, Tuple, Any
import numpy as np
# NOTE: `_StrLike` and `_BytesLike` are pointless, as `np.str_` and `np.bytes_`
# are already subclasses of their builtin counterpart
_CharLike = Union[str, bytes]
# The 6 `<X>Like` type-aliases below represent all scalars that can be
# coerced into `<X>` (with the casting rule `same_kind`)
_BoolLike = Union[bool, np.bool_]
_IntLike = Union[_BoolLike, int, np.integer]
_FloatLike = Union[_IntLike, float, np.floating]
_ComplexLike = Union[_FloatLike, complex, np.complexfloating]
_NumberLike = Union[int, float, complex, np.number, np.bool_]
_ScalarLike = Union[
int,
float,
complex,
str,
bytes,
np.generic,
]
# `_VoidLike` is technically not a scalar, but it's close enough
_VoidLike = Union[Tuple[Any, ...], np.void]
|
Add `_BoolLike` to the union defining `_IntLike`
|
ENH: Add `_BoolLike` to the union defining `_IntLike`
|
Python
|
bsd-3-clause
|
seberg/numpy,pdebuyl/numpy,mhvk/numpy,pbrod/numpy,madphysicist/numpy,endolith/numpy,mattip/numpy,simongibbons/numpy,numpy/numpy,endolith/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,madphysicist/numpy,mhvk/numpy,jakirkham/numpy,pdebuyl/numpy,simongibbons/numpy,charris/numpy,simongibbons/numpy,anntzer/numpy,charris/numpy,numpy/numpy,mhvk/numpy,anntzer/numpy,jakirkham/numpy,anntzer/numpy,mattip/numpy,charris/numpy,madphysicist/numpy,pbrod/numpy,pbrod/numpy,jakirkham/numpy,endolith/numpy,seberg/numpy,pbrod/numpy,mhvk/numpy,jakirkham/numpy,rgommers/numpy,charris/numpy,mattip/numpy,rgommers/numpy,simongibbons/numpy,seberg/numpy,numpy/numpy,seberg/numpy,pbrod/numpy,pdebuyl/numpy,mhvk/numpy,numpy/numpy,pdebuyl/numpy,rgommers/numpy,endolith/numpy,madphysicist/numpy,madphysicist/numpy,simongibbons/numpy,anntzer/numpy
|
47faa5797e5d017848d695bc2ed960d6b8228bd8
|
openxc/sources/serial.py
|
openxc/sources/serial.py
|
"""A virtual serial port data source."""
from __future__ import absolute_import
import logging
from .base import BytestreamDataSource, DataSourceError
LOG = logging.getLogger(__name__)
try:
import serial
except ImportError:
LOG.debug("serial library not installed, can't use serial interface")
class SerialDataSource(BytestreamDataSource):
"""A data source reading from a serial port, which could be implemented
with a USB to Serial or Bluetooth adapter.
"""
DEFAULT_PORT = "/dev/ttyUSB0"
DEFAULT_BAUDRATE = 115200
def __init__(self, callback=None, port=None, baudrate=None):
"""Initialize a connection to the serial device.
Kwargs:
port - optionally override the default virtual COM port
baudrate - optionally override the default baudrate
Raises:
DataSourceError if the serial device cannot be opened.
"""
super(SerialDataSource, self).__init__(callback)
port = port or self.DEFAULT_PORT
baudrate = baudrate or self.DEFAULT_BAUDRATE
try:
self.device = serial.Serial(port, baudrate, rtscts=True)
except serial.SerialException as e:
raise DataSourceError("Unable to open serial device at port "
"%s: %s" % (port, e))
else:
LOG.debug("Opened serial device at %s", port)
def _read(self):
return self.device.readline()
|
"""A virtual serial port data source."""
from __future__ import absolute_import
import logging
from .base import BytestreamDataSource, DataSourceError
LOG = logging.getLogger(__name__)
try:
import serial
except ImportError:
LOG.debug("serial library not installed, can't use serial interface")
class SerialDataSource(BytestreamDataSource):
"""A data source reading from a serial port, which could be implemented
with a USB to Serial or Bluetooth adapter.
"""
DEFAULT_PORT = "/dev/ttyUSB0"
DEFAULT_BAUDRATE = 230400
def __init__(self, callback=None, port=None, baudrate=None):
"""Initialize a connection to the serial device.
Kwargs:
port - optionally override the default virtual COM port
baudrate - optionally override the default baudrate
Raises:
DataSourceError if the serial device cannot be opened.
"""
super(SerialDataSource, self).__init__(callback)
port = port or self.DEFAULT_PORT
baudrate = baudrate or self.DEFAULT_BAUDRATE
try:
self.device = serial.Serial(port, baudrate, rtscts=True)
except serial.SerialException as e:
raise DataSourceError("Unable to open serial device at port "
"%s: %s" % (port, e))
else:
LOG.debug("Opened serial device at %s", port)
def _read(self):
return self.device.readline()
|
Change default baud rate to 230400 to match cantranslator.
|
Change default baud rate to 230400 to match cantranslator.
|
Python
|
bsd-3-clause
|
openxc/openxc-python,openxc/openxc-python,openxc/openxc-python
|
239e759eed720f884e492e47b82e64f25fdc215f
|
core/views.py
|
core/views.py
|
# views.py
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
# Search
search_query = request.GET.get("q", None)
if search_query:
search_results = Page.objects.live().search(search_query)
# Log the query so Wagtail can suggest promoted results
Query.get(search_query).add_hit()
else:
search_results = Page.objects.none()
# Render template
return render(
request,
"core/search_results.html",
{
"search_query": search_query,
"search_results": search_results,
},
)
|
# views.py
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
# Search
search_query = request.GET.get("q", None)
page = request.GET.get("page", 1)
if search_query:
search_results = Page.objects.live().search(search_query)
# Log the query so Wagtail can suggest promoted results
Query.get(search_query).add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
# Render template
return render(
request,
"core/search_results.html",
{
"search_query": search_query,
"search_results": search_results,
},
)
|
Add pagination to the reinstated search view
|
Add pagination to the reinstated search view
|
Python
|
mit
|
springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail
|
24ba796dde4ce414d7fe72ccf553f687e13039f4
|
shopify/product/tasks.py
|
shopify/product/tasks.py
|
from decimal import Decimal
from django.conf import settings
from django.core.mail import EmailMessage
from celery.utils.log import get_task_logger
from .csv_attach import CSVAttachmentWriter
from .models import Transaction
from celeryapp import app
logger = get_task_logger(__name__)
@app.task(max_retries=3)
def email_journal_vouchers_import():
"""
"""
try:
credits = Transaction.objects.export_transactions()
debit = Decimal(0)
attachment = CSVAttachmentWriter()
for credit in credits:
attachment.writerow([credit['product__account_number'], '',
credit['price__sum']])
debit += credit['price__sum']
debit_account = getattr(settings, 'SHOPIFY_DEBIT_ACCOUNT_NUMBER', None)
attachment.writerow([debit_account, debit, ''])
message = EmailMessage('Journal Vouchers Import', '',
to=[m[1] for m in settings.MANAGERS])
message.attach(attachment.getname(), attachment.getvalue(), 'text/csv')
message.send()
except Exception as exc:
logger.debug("MIP export failed: %s" % exc)
logger.warn('MIP export failed, retrying')
raise email_mip_import_file.retry(exc=exc)
|
from decimal import Decimal
from django.conf import settings
from django.core.mail import EmailMessage
from celery.utils.log import get_task_logger
from .csv_attach import CSVAttachmentWriter
from .models import Transaction
from celeryapp import app
logger = get_task_logger(__name__)
@app.task(max_retries=3)
def email_journal_vouchers_import():
"""
"""
try:
transactions = Transaction.objects.export_transactions()
debit_sum = Decimal(0)
attachment = CSVAttachmentWriter()
for transaction in transactions:
attachment.writerow([transaction['product__account_number'], '',
transaction['price__sum']])
debit_sum += transaction['price__sum']
debit_account = getattr(settings, 'SHOPIFY_DEBIT_ACCOUNT_NUMBER', None)
attachment.writerow([debit_account, debit_sum, ''])
message = EmailMessage('Journal Vouchers Import', '',
to=[m[1] for m in settings.MANAGERS])
message.attach(attachment.getname(), attachment.getvalue(), 'text/csv')
message.send()
except Exception as exc:
logger.debug("MIP export failed: %s" % exc)
logger.warn('MIP export failed, retrying')
raise email_mip_import_file.retry(exc=exc)
|
Refactor email task variable names
|
Refactor email task variable names
|
Python
|
bsd-3-clause
|
CorbanU/corban-shopify,CorbanU/corban-shopify
|
f3d3c0ce81ba8717f5839b502e57d75ebbc1f6e7
|
meetuppizza/views.py
|
meetuppizza/views.py
|
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render, redirect
from meetuppizza.forms import RegistrationForm
from meetup.models import Meetup
from meetup.services.meetup_service import MeetupService
def index(request):
meetups = Meetup.objects.all()
meetup_presenters = []
for meetup in meetups:
service = MeetupService(meetup)
meetup_presenters.append(service.get_decorated_meetup())
return render(request, 'index.html', {"meetups": meetup_presenters})
|
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render, redirect
from meetuppizza.forms import RegistrationForm
from meetup.models import Meetup
from meetup.services.meetup_service import MeetupService
def index(request):
meetups = Meetup.objects.all()
meetup_presenters = [MeetupService(meetup).get_decorated_meetup() for meetup in meetups]
return render(request, 'index.html', {"meetups": meetup_presenters})
|
Use list comprehension to generate MeetupPresentor list in index view
|
Use list comprehension to generate MeetupPresentor list in index view
|
Python
|
mit
|
nicole-a-tesla/meetup.pizza,nicole-a-tesla/meetup.pizza
|
c6ef5bcac4d5daddac97ff30ff18645249928ac0
|
nap/engine.py
|
nap/engine.py
|
import json
try:
import msgpack
except ImportError:
pass
from decimal import Decimal
from datetime import date, datetime, time
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
import json
class Engine(object):
# The list of content types we match
CONTENT_TYPES = []
def dumps(self, data): # pragma: no cover
'''How to serialiser an object'''
raise NotImplementedError
def loads(self, data): # pragma: no cover
'''How to deserialise a string'''
raise NotImplementedError
class JsonEngine(Engine):
CONTENT_TYPES = ['application/json',]
def dumps(self, data):
return json.dumps(data)
def loads(self, data):
return json.loads(data)
try:
import msgpack
except ImportError:
pass
else:
class MsgPackEngine(Engine):
CONTENT_TYPES = ['application/x-msgpack',]
def dumps(self, data):
return msgpack.dumps(data)
def loads(self, data):
return msgpack.loads(data)
|
Remove unused imports Only define MsgPackEngine if we can import MsgPack
|
Remove unused imports
Only define MsgPackEngine if we can import MsgPack
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
ed69ace7f6065ec1b3dd2f2de3a0d5b56ac28366
|
climatemaps/data.py
|
climatemaps/data.py
|
import numpy
def import_climate_data():
ncols = 720
nrows = 360
digits = 5
with open('./data/cloud/ccld6190.dat') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if i >= nrows: # read one month
break
value = ''
values = []
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
Z[i][j] = float(value)
values.append(value)
value = ''
j += 1
counter += 1
i += 1
return latrange, lonrange, Z
|
import numpy
def import_climate_data():
ncols = 720
nrows = 360
digits = 5
monthnr = 3
with open('./data/cloud/ccld6190.dat', 'r') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
rown = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if rown < (monthnr-1)*nrows or rown >= monthnr*nrows: # read one month
rown += 1
continue
value = ''
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
value = float(value)
if value < 0:
value = numpy.nan
Z[i][j] = value
value = ''
j += 1
counter += 1
i += 1
rown += 1
return latrange, lonrange, Z
|
Create argument to select month to import
|
Create argument to select month to import
|
Python
|
mit
|
bartromgens/climatemaps,bartromgens/climatemaps,bartromgens/climatemaps
|
127e53b4aa125163765b8fa762669e717badd07b
|
seqfile/__init__.py
|
seqfile/__init__.py
|
from __future__ import absolute_import
from .seqfile import findNextFile
import pgk_ressources as _pkg
__version__ = _pkg.get_distribution("seqfile").version
__all__ = [ 'findNextFile' ]
|
from __future__ import absolute_import
from .seqfile import findNextFile
import pkg_resources as _pkg
__version__ = _pkg.get_distribution("seqfile").version
__all__ = [ 'findNextFile' ]
|
Fix typos in name of imports.
|
Fix typos in name of imports.
|
Python
|
mit
|
musically-ut/seqfile
|
015d18ddcf26a875e20bffbb2d52646799da9cf4
|
climatemaps/data.py
|
climatemaps/data.py
|
import numpy
def import_climate_data(filepath, monthnr):
ncols = 720
nrows = 360
digits = 5
with open(filepath, 'r') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
rown = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if rown < (monthnr-1)*nrows or rown >= monthnr*nrows: # read one month
rown += 1
continue
value = ''
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
value = float(value)
if value < 0:
value = numpy.nan
Z[i][j] = value
value = ''
j += 1
counter += 1
i += 1
rown += 1
return latrange, lonrange, Z
|
import numpy
def import_climate_data(filepath, monthnr):
ncols = 720
nrows = 360
digits = 5
with open(filepath, 'r') as filein:
lines = filein.readlines()
line_n = 0
grid_size = 0.50
xmin = 0.25
xmax = 360.25
ymin = -89.75
ymax = 90.25
lonrange = numpy.arange(xmin, xmax, grid_size)
latrange = numpy.arange(ymin, ymax, grid_size)
Z = numpy.zeros((int(latrange.shape[0]), int(lonrange.shape[0])))
print(len(lonrange))
print(len(latrange))
i = 0
rown = 0
for line in lines:
line_n += 1
if line_n < 3: # skip header
continue
if rown < (monthnr-1)*nrows or rown >= monthnr*nrows: # read one month
rown += 1
continue
value = ''
counter = 1
j = 0
for char in line:
value += char
if counter % digits == 0:
value = float(value)
if value == -9999:
value = numpy.nan
Z[i][j] = value
value = ''
j += 1
counter += 1
i += 1
rown += 1
return latrange, lonrange, Z
|
Allow negative values, only mask -9999
|
Allow negative values, only mask -9999
|
Python
|
mit
|
bartromgens/climatemaps,bartromgens/climatemaps,bartromgens/climatemaps
|
c15174d9bd7728dd5d397e6de09291853e65ed4d
|
scripts/test_deployment.py
|
scripts/test_deployment.py
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"key": "iw", "lines": ["test", "deployment"]}
response = requests.post(f"{url}/api/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/api/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/api/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/api/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
Update tests for new API routes
|
Update tests for new API routes
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
057d7a95031ba8c51ae10ea1b742534fcb5e82a3
|
bidb/keys/tasks.py
|
bidb/keys/tasks.py
|
import celery
import subprocess
from bidb.utils.tempfile import TemporaryDirectory
from bidb.utils.subprocess import check_output2
from .models import Key
@celery.task(soft_time_limit=60)
def update_or_create_key(uid):
with TemporaryDirectory() as homedir:
try:
check_output2((
'gpg',
'--homedir', homedir,
'--keyserver', 'http://p80.pool.sks-keyservers.net/',
'--recv-keys', uid,
))
except subprocess.CalledProcessError as exc:
print "E: {}: {}".format(exc, exc.output)
return None, False
data = check_output2((
'gpg',
'--homedir', homedir,
'--with-colons',
'--fixed-list-mode',
'--fingerprint',
uid,
))
for line in data.splitlines():
if line.startswith('uid:'):
name = line.split(':')[9]
break
else:
raise ValueError("Could not parse name from key: {}".format(data))
return Key.objects.update_or_create(uid=uid, defaults={
'name': name,
})
@celery.task()
def refresh_all():
for x in Key.objects.all():
update_or_create_key.delay(x.uid)
|
import celery
import subprocess
from bidb.utils.tempfile import TemporaryDirectory
from bidb.utils.subprocess import check_output2
from .models import Key
@celery.task(soft_time_limit=60)
def update_or_create_key(uid):
with TemporaryDirectory() as homedir:
try:
check_output2((
'gpg',
'--homedir', homedir,
'--keyserver', 'pgpkeys.mit.edu',
'--recv-keys', uid,
))
except subprocess.CalledProcessError as exc:
print "E: {}: {}".format(exc, exc.output)
return None, False
data = check_output2((
'gpg',
'--homedir', homedir,
'--with-colons',
'--fixed-list-mode',
'--fingerprint',
uid,
))
for line in data.splitlines():
if line.startswith('uid:'):
name = line.split(':')[9]
break
else:
raise ValueError("Could not parse name from key: {}".format(data))
return Key.objects.update_or_create(uid=uid, defaults={
'name': name,
})
@celery.task()
def refresh_all():
for x in Key.objects.all():
update_or_create_key.delay(x.uid)
|
Use pgpkeys.mit.edu as our keyserver; seems to work.
|
Use pgpkeys.mit.edu as our keyserver; seems to work.
|
Python
|
agpl-3.0
|
lamby/buildinfo.debian.net,lamby/buildinfo.debian.net
|
63f40971f8bc4858b32b41595d14315d2261169f
|
proselint/checks/garner/mondegreens.py
|
proselint/checks/garner/mondegreens.py
|
# -*- coding: utf-8 -*-
"""Mondegreens.
---
layout: post
source: Garner's Modern American Usage
source_url: http://amzn.to/15wF76r
title: mondegreens
date: 2014-06-10 12:31:19
categories: writing
---
Points out preferred form.
"""
from tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "garner.mondegreens"
msg = "'{}' is the preferred form."
list = [
["a girl with colitis goes by", "a girl with kaleidascope eyes"],
["a partridge in a pear tree", "a part-red gingerbread tree"],
["attorney and not a republic", "attorney and notary public"],
["beck and call", "beckon call"],
["for all intents and purposes", "for all intensive purposes"],
["laid him on the green", "Lady Mondegreen"],
["Olive, the other reindeer", "all of the other reindeer"],
["to the manner born", "to the manor born"],
]
return preferred_forms_check(text, list, err, msg)
|
# -*- coding: utf-8 -*-
"""Mondegreens.
---
layout: post
source: Garner's Modern American Usage
source_url: http://amzn.to/15wF76r
title: mondegreens
date: 2014-06-10 12:31:19
categories: writing
---
Points out preferred form.
"""
from tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "garner.mondegreens"
msg = "'{}' is the preferred form."
list = [
["a girl with kaleidascope eyes", "a girl with colitis goes by"],
["a partridge in a pear tree", "a part-red gingerbread tree"],
["attorney and not a republic", "attorney and notary public"],
["beck and call", "beckon call"],
["for all intents and purposes", "for all intensive purposes"],
["laid him on the green", "Lady Mondegreen"],
["Olive, the other reindeer", "all of the other reindeer"],
["to the manner born", "to the manor born"],
]
return preferred_forms_check(text, list, err, msg)
|
Fix bug in mondegreen rule
|
Fix bug in mondegreen rule
(The correct versions should all be in the left column.)
|
Python
|
bsd-3-clause
|
jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
abaa882aaa1b7e251d989d60391bd2e06801c2a2
|
py/desiUtil/install/most_recent_tag.py
|
py/desiUtil/install/most_recent_tag.py
|
# License information goes here
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0])[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
# License information goes here
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
# The line above will help with 2to3 support.
def most_recent_tag(tags,username=None):
"""Scan an SVN tags directory and return the most recent tag.
Parameters
----------
tags : str
A URL pointing to an SVN tags directory.
username : str, optional
If set, pass the value to SVN's ``--username`` option.
Returns
-------
most_recent_tag : str
The most recent tag found in ``tags``.
"""
from distutils.version import StrictVersion as V
from subprocess import Popen, PIPE
command = ['svn']
if username is not None:
command += ['--username', username]
command += ['ls',tags]
proc = Popen(command,stdout=PIPE,stderr=PIPE)
out, err = proc.communicate()
try:
mrt = sorted([v.rstrip('/') for v in out.split('\n') if len(v) > 0],
key=lambda x: V(x))[-1]
except IndexError:
mrt = '0.0.0'
return mrt
|
Add more careful version checks
|
Add more careful version checks
|
Python
|
bsd-3-clause
|
desihub/desiutil,desihub/desiutil
|
f60fe11653d71f278aa04e71a522a89fc86c284a
|
bse/api.py
|
bse/api.py
|
'''
Main interface to BSE functionality
'''
from . import io
def get_basis_set(name):
'''Reads a json basis set file given only the name
The path to the basis set file is taken to be the 'data' directory
in this project
'''
return io.read_table_basis_by_name(name)
def get_metadata(keys=None, key_filter=None):
if key_filter:
raise RuntimeError("key_filter not implemented")
avail_names = io.get_available_names()
metadata = {}
for n in avail_names:
bs = io.read_table_basis_by_name(n)
common_name = bs['basisSetName']
defined_elements = list(bs['basisSetElements'].keys())
function_types = set()
for e in bs['basisSetElements'].values():
for s in e['elementElectronShells']:
function_types.add(s['shellFunctionType'])
metadata[common_name] = {
'mangled_name': n,
'elements': defined_elements,
'functiontypes': list(function_types),
}
return metadata
|
'''
Main interface to BSE functionality
'''
from . import io
def get_basis_set(name):
'''Reads a json basis set file given only the name
The path to the basis set file is taken to be the 'data' directory
in this project
'''
return io.read_table_basis_by_name(name)
def get_metadata(keys=None, key_filter=None):
if key_filter:
raise RuntimeError("key_filter not implemented")
avail_names = io.get_available_names()
metadata = {}
for n in avail_names:
bs = io.read_table_basis_by_name(n)
displayname = bs['basisSetName']
defined_elements = list(bs['basisSetElements'].keys())
function_types = set()
for e in bs['basisSetElements'].values():
for s in e['elementElectronShells']:
function_types.add(s['shellFunctionType'])
metadata[n] = {
'displayname': displayname,
'elements': defined_elements,
'functiontypes': list(function_types),
}
return metadata
|
Switch which name is used as a metadata key
|
Switch which name is used as a metadata key
|
Python
|
bsd-3-clause
|
MOLSSI-BSE/basis_set_exchange
|
8d46e411b2e7091fc54c676665905da8ec6906f3
|
controllers/dotd.py
|
controllers/dotd.py
|
def form():
db.raw_log.uuid.default = uuid_generator()
db.raw_log.date.default = dbdate()
#don't display form items that are part of table, but not facing end user
db.raw_log.uuid.readable = db.raw_log.uuid.writable = False
db.raw_log.date.readable = db.raw_log.date.writable = False
form = SQLFORM(db.raw_log, showid=False, formstyle='divs').process()
if form.accepted:
redirect(URL('dotd', 'parsed', args=db.raw_log.uuid.default))
return dict(form=form)
def parsed():
if request.args:
uuid = request.args[0]
rows = db(db.raw_log.uuid==uuid).select()
if len(rows) == 0:
redirect(URL('form'))
for row in rows:
experience, obtained_items, proc_items, found_items, log_file, max_hit, hit_list=parser(row.data)
# hit_list=parser(row.data)
return locals()
else:
redirect(URL('form'))
|
def form():
db.raw_log.uuid.default = uuid_generator()
db.raw_log.date.default = dbdate()
#don't display form items that are part of table, but not facing end user
db.raw_log.uuid.readable = db.raw_log.uuid.writable = False
db.raw_log.date.readable = db.raw_log.date.writable = False
if form.accepted:
redirect(URL('dotd', 'parsed', args=db.raw_log.uuid.default))
return dict(form=form)
def parsed():
if request.args:
uuid = request.args[0]
rows = db(db.raw_log.uuid==uuid).select()
if len(rows) == 0:
redirect(URL('form'))
for row in rows:
experience, obtained_items, proc_items, found_items, log_file, max_hit, hit_list=parser(row.data)
# hit_list=parser(row.data)
return locals()
else:
redirect(URL('form'))
|
Remove selection of all raw_log rows, since it was used for debugging purposes only
|
Remove selection of all raw_log rows, since it was used for debugging
purposes only
|
Python
|
mit
|
tsunam/dotd_parser,tsunam/dotd_parser,tsunam/dotd_parser,tsunam/dotd_parser
|
627217b13482fff5451d3aa03867923925c49ec8
|
sale_order_add_variants/__openerp__.py
|
sale_order_add_variants/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Hugo Santos
# Copyright 2015 FactorLibre
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sale Order Add Variants',
'summary': 'Add variants from template into sale order',
'version': '0.1',
'author': 'Factorlibre,Odoo Community Association (OCA)',
'category': 'Sale',
'license': 'AGPL-3',
'website': 'http://factorlibre.com',
'depends': [
'sale'
],
'demo': [],
'data': [
'security/sale_order_add_variants_security.xml',
'view/sale_add_variants_view.xml',
'view/sale_view.xml',
'view/res_config_view.xml'
],
'installable': True
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Hugo Santos
# Copyright 2015 FactorLibre
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sale Order Add Variants',
'summary': 'Add variants from template into sale order',
'version': '0.1',
'author': 'FactorLibre,Odoo Community Association (OCA)',
'category': 'Sale',
'license': 'AGPL-3',
'website': 'http://factorlibre.com',
'depends': [
'sale'
],
'demo': [],
'data': [
'security/sale_order_add_variants_security.xml',
'view/sale_add_variants_view.xml',
'view/sale_view.xml',
'view/res_config_view.xml'
],
'installable': True
}
|
Fix typo in author FactorLibre
|
Fix typo in author FactorLibre
|
Python
|
agpl-3.0
|
kittiu/sale-workflow,Endika/sale-workflow,alexsandrohaag/sale-workflow,xpansa/sale-workflow,diagramsoftware/sale-workflow,BT-ojossen/sale-workflow,brain-tec/sale-workflow,brain-tec/sale-workflow,luistorresm/sale-workflow,numerigraphe/sale-workflow,anybox/sale-workflow,open-synergy/sale-workflow,BT-fgarbely/sale-workflow,anas-taji/sale-workflow,Rona111/sale-workflow,numerigraphe/sale-workflow,akretion/sale-workflow,jabibi/sale-workflow,acsone/sale-workflow,jjscarafia/sale-workflow,kittiu/sale-workflow,Antiun/sale-workflow,akretion/sale-workflow,adhoc-dev/sale-workflow,fevxie/sale-workflow,ddico/sale-workflow,factorlibre/sale-workflow,BT-jmichaud/sale-workflow,BT-cserra/sale-workflow,Eficent/sale-workflow,thomaspaulb/sale-workflow,acsone/sale-workflow,richard-willowit/sale-workflow
|
47b52333a74aeeb0ec2d7184455f70aa07633e62
|
createGlyphsPDF.py
|
createGlyphsPDF.py
|
# Some configuration
page_format = 'A4'
newPage(page_format)
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
print 'Registered', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
print self.glyph.width
for g in CurrentFont():
glyph = RegisterGlyph(g)
|
# Some configuration
page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values
my_selection = CurrentFont() # May also be CurrentFont.selection or else
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
print 'Registered', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
print self.glyph.width
def createPage(self):
newPage(page_format)
for g in my_selection:
glyph = RegisterGlyph(g)
glyph.createPage()
|
Create Page for every glyph
|
Create Page for every glyph
|
Python
|
mit
|
AlphabetType/DrawBot-Scripts
|
10aaa22cbcbb844a4393ac9eae526c3e50c121ab
|
src/ggrc/migrations/versions/20131209164454_49c670c7d705_add_private_column_t.py
|
src/ggrc/migrations/versions/20131209164454_49c670c7d705_add_private_column_t.py
|
"""Add private column to programs table.
Revision ID: 49c670c7d705
Revises: a3afeab3302
Create Date: 2013-12-09 16:44:54.222398
"""
# revision identifiers, used by Alembic.
revision = '49c670c7d705'
down_revision = 'a3afeab3302'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'programs',
sa.Column('private', sa.Boolean(), default=False, nullable=False),
)
def downgrade():
op.drop_column('programs', 'private')
|
"""Add private column to programs table.
Revision ID: 49c670c7d705
Revises: a3afeab3302
Create Date: 2013-12-09 16:44:54.222398
"""
# revision identifiers, used by Alembic.
revision = '49c670c7d705'
down_revision = 'a3afeab3302'
from alembic import op
from sqlalchemy.sql import table, column
import sqlalchemy as sa
def upgrade():
op.add_column(
'programs',
sa.Column('private', sa.Boolean(), default=False, nullable=False),
)
programs_table = table('programs',
column('id', sa.Integer),
column('context_id', sa.Integer),
column('private', sa.Boolean),
)
op.execute(programs_table.update().values(private=True)\
.where(programs_table.c.context_id != None))
def downgrade():
op.drop_column('programs', 'private')
|
Make sure to properly set private for existing private programs.
|
Make sure to properly set private for existing private programs.
|
Python
|
apache-2.0
|
hyperNURb/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,vladan-m/ggrc-core
|
63af9aa63dac1b3601ab5bfee5fd29b5e3602389
|
bonfiremanager/models.py
|
bonfiremanager/models.py
|
from django.db import models
class Event(models.Model):
name = models.CharField(max_length=1024, unique=True)
slug = models.SlugField(max_length=1024)
def __str__(self):
return self.name
class TimeSlot(models.Model):
event = models.ForeignKey(Event)
bookable = models.BooleanField(default=True)
end = models.DateTimeField()
name = models.CharField(max_length=1024)
start = models.DateTimeField()
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Room(models.Model):
event = models.ForeignKey(Event)
directions = models.TextField()
name = models.CharField(max_length=1024)
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Talk(models.Model):
room = models.ForeignKey(Room, null=True, blank=True)
description = models.TextField()
slug = models.SlugField(max_length=1024)
timeslot = models.IntegerField(default=0)
title = models.CharField(max_length=1024, unique=True)
def __str__(self):
return "{0} in {1}".format(self.title, self.room)
|
from django.db import models
class Event(models.Model):
name = models.CharField(max_length=1024, unique=True)
slug = models.SlugField(max_length=1024)
def __str__(self):
return self.name
class TimeSlot(models.Model):
event = models.ForeignKey(Event)
bookable = models.BooleanField(default=True)
end = models.DateTimeField()
name = models.CharField(max_length=1024)
start = models.DateTimeField()
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Room(models.Model):
event = models.ForeignKey(Event)
directions = models.TextField()
name = models.CharField(max_length=1024)
def __str__(self):
return "{0} ({1})".format(self.name, self.event)
class Talk(models.Model):
room = models.ForeignKey(Room, null=True, blank=True)
timeslot = models.ForeignKey(TimeSlot, null=True, blank=True)
description = models.TextField()
slug = models.SlugField(max_length=1024)
title = models.CharField(max_length=1024, unique=True)
def __str__(self):
return "{0} in {1}".format(self.title, self.room)
|
Make timeslot a FK on talk model
|
Make timeslot a FK on talk model
|
Python
|
agpl-3.0
|
yamatt/bonfiremanager
|
67ade89e301d11ca4c7ebfe6746cc262631b6cce
|
src/neighborhood_flow.py
|
src/neighborhood_flow.py
|
#!/usr/bin/env python
import sys
import os
import collections
import data
import figs
class Counter(collections.Counter):
year_range = range(2004, 2016)
def restrict_to_year_range(self, multiplier=1):
output = []
for year in self.year_range:
output.append(multiplier * self[year])
return output
out_dir = sys.argv[-1]
with open(sys.argv[1]) as stream:
reader = data.RawReader(stream)
neighborhood = None
new_counts, old_counts = Counter(), Counter()
for row in reader:
year = int(row['year'])
if neighborhood is None:
neighborhood = row['neighborhood']
if neighborhood != row['neighborhood']:
if not neighborhood:
neighborhood = "unknown"
fig = figs.FlowOverTime(
Counter.year_range,
new_counts.restrict_to_year_range(),
old_counts.restrict_to_year_range(multiplier=-1),
)
filename = os.path.join(
out_dir,
neighborhood.lower().replace(' ', '_') + '.png',
)
fig.save(filename)
fig.close()
print "saved", filename
neighborhood = row['neighborhood']
new_counts, old_counts = Counter(), Counter()
new_counts[year] = int(row['new'])
old_counts[year] = int(row['old'])
|
#!/usr/bin/env python
import sys
import os
import collections
import data
import figs
class Counter(collections.Counter):
year_range = range(2004, 2015)
def restrict_to_year_range(self, multiplier=1):
output = []
for year in self.year_range:
output.append(multiplier * self[year])
return output
out_dir = sys.argv[-1]
with open(sys.argv[1]) as stream:
reader = data.RawReader(stream)
neighborhood = None
new_counts, old_counts = Counter(), Counter()
for row in reader:
year = int(row['year'])
if neighborhood is None:
neighborhood = row['neighborhood']
if neighborhood != row['neighborhood']:
if not neighborhood:
neighborhood = "unknown"
fig = figs.FlowOverTime(
Counter.year_range,
new_counts.restrict_to_year_range(),
old_counts.restrict_to_year_range(multiplier=-1),
)
filename = os.path.join(
out_dir,
neighborhood.lower().replace(' ', '_') + '.png',
)
fig.save(filename)
fig.close()
print "saved", filename
neighborhood = row['neighborhood']
new_counts, old_counts = Counter(), Counter()
new_counts[year] = int(row['new'])
old_counts[year] = int(row['old'])
|
Revert year range end back to 2015 (2016 is not over)
|
Revert year range end back to 2015 (2016 is not over)
|
Python
|
unlicense
|
datascopeanalytics/chicago-new-business,datascopeanalytics/chicago-new-business
|
2fb0678363479c790e5a63de8b92a19de3ac2359
|
src/Camera.py
|
src/Camera.py
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
from traits.api import HasTraits, Int, Str, Tuple, Array, Range
from traitsui.api import View, Label
class CameraError(Exception):
def __init__(self, msg, cam):
self.msg = msg
self.camera_number = cam
def __str__(self):
return '{0} on camera {1}'.format(self.msg, self.camera_number)
class Camera(HasTraits):
camera_number = Int(-1)
id_string = Str()
resolution = Tuple(Int(), Int())
roi = Tuple(Int(), Int(), Int(), Int())
frame_rate = Range(1, 500, 30)
frame = Array()
# Default configuration panel
view = View(Label('No settings to configure'))
def __enter__(self):
self.open()
return self
def __exit__(self, *args):
self.close()
return False # don't suppress exceptions
def open(self):
raise NotImplementedError()
def close(self):
raise NotImplementedError()
def query_frame(self):
raise NotImplementedError()
def find_resolutions(self):
'''
Returns a list of resolution tuples that this camera supports.
'''
# Default: return the camera's own default resolution
return [self.resolution]
def configure(self):
"""Opens a dialog to set the camera's parameters."""
pass
|
Add default view for camera
|
Add default view for camera
|
Python
|
mit
|
ptomato/Beams
|
0a05f423ad591454a25c515d811556d10e5fc99f
|
Browser.py
|
Browser.py
|
from Zeroconf import *
import socket
class MyListener(object):
def __init__(self):
self.r = Zeroconf()
pass
def removeService(self, zeroconf, type, name):
print "Service", name, "removed"
def addService(self, zeroconf, type, name):
print "Service", name, "added"
print "Type is", type
info = self.r.getServiceInfo(type, name)
print "Address is", str(socket.inet_ntoa(info.getAddress()))
print "Port is", info.getPort()
print "Weight is", info.getWeight()
print "Priority is", info.getPriority()
print "Server is", info.getServer()
print "Text is", info.getText()
print "Properties are", info.getProperties()
if __name__ == '__main__':
print "Multicast DNS Service Discovery for Python Browser test"
r = Zeroconf()
print "1. Testing browsing for a service..."
type = "_http._tcp.local."
listener = MyListener()
browser = ServiceBrowser(r, type, listener)
|
from Zeroconf import *
import socket
class MyListener(object):
def __init__(self):
self.r = Zeroconf()
pass
def removeService(self, zeroconf, type, name):
print "Service", name, "removed"
def addService(self, zeroconf, type, name):
print "Service", name, "added"
print "Type is", type
info = self.r.getServiceInfo(type, name)
if info:
print "Address is", str(socket.inet_ntoa(info.getAddress()))
print "Port is", info.getPort()
print "Weight is", info.getWeight()
print "Priority is", info.getPriority()
print "Server is", info.getServer()
print "Text is", info.getText()
print "Properties are", info.getProperties()
if __name__ == '__main__':
print "Multicast DNS Service Discovery for Python Browser test"
r = Zeroconf()
print "1. Testing browsing for a service..."
type = "_http._tcp.local."
listener = MyListener()
browser = ServiceBrowser(r, type, listener)
|
Allow for the failure of getServiceInfo(). Not sure why it's happening, though.
|
Allow for the failure of getServiceInfo(). Not sure why it's happening,
though.
|
Python
|
lgpl-2.1
|
jantman/python-zeroconf,decabyte/python-zeroconf,nameoftherose/python-zeroconf,balloob/python-zeroconf,AndreaCensi/python-zeroconf,giupo/python-zeroconf,jstasiak/python-zeroconf,wmcbrine/pyzeroconf,basilfx/python-zeroconf,daid/python-zeroconf,gbiddison/python-zeroconf
|
e155d7b96c5b834f4c062b93cbd564a5317905f1
|
tools/po2js.py
|
tools/po2js.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Add the language code to the translated file
|
Add the language code to the translated file
|
Python
|
apache-2.0
|
operasoftware/dragonfly,operasoftware/dragonfly,operasoftware/dragonfly,operasoftware/dragonfly
|
23ee65e3eaa52e8e4ffcc294d2160bdd5451d490
|
scalyr_agent/tests/run_monitor_test.py
|
scalyr_agent/tests/run_monitor_test.py
|
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Imron Alston <[email protected]>
__author__ = '[email protected]'
import scalyr_agent.scalyr_logging as scalyr_logging
from scalyr_agent.run_monitor import run_standalone_monitor
from scalyr_agent.test_base import ScalyrTestCase
class RunMonitorTest(ScalyrTestCase):
def test_run_standalone_monitor( self ):
config = """{
}"""
run_standalone_monitor( "scalyr_agent.tests.empty_monitor", ".", config, float(0.5), scalyr_logging.DEBUG_LEVEL_1 )
|
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Imron Alston <[email protected]>
__author__ = '[email protected]'
import scalyr_agent.scalyr_logging as scalyr_logging
from scalyr_agent.run_monitor import run_standalone_monitor
from scalyr_agent.test_base import ScalyrTestCase
class RunMonitorTest(ScalyrTestCase):
def test_run_standalone_monitor( self ):
config = """{
}"""
run_standalone_monitor("scalyr_agent.tests.empty_monitor", ".", config, float(0.5),
scalyr_logging.DEBUG_LEVEL_0)
|
Fix test so it does not emit output.
|
Fix test so it does not emit output.
|
Python
|
apache-2.0
|
imron/scalyr-agent-2,scalyr/scalyr-agent-2,scalyr/scalyr-agent-2,scalyr/scalyr-agent-2,scalyr/scalyr-agent-2
|
afc658c6ae125042182976dd95af68881865a2da
|
handoverservice/handover_api/views.py
|
handoverservice/handover_api/views.py
|
from handover_api.models import User, Handover, Draft
from rest_framework import viewsets
from serializers import UserSerializer, HandoverSerializer, DraftSerializer
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class HandoverViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows handovers to be viewed or edited.
"""
queryset = Handover.objects.all()
serializer_class = HandoverSerializer
class DraftViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows drafts to be viewed or edited.
"""
queryset = Draft.objects.all()
serializer_class = DraftSerializer
|
from rest_framework import viewsets
from handover_api.models import User, Handover, Draft
from handover_api.serializers import UserSerializer, HandoverSerializer, DraftSerializer
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class HandoverViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows handovers to be viewed or edited.
"""
queryset = Handover.objects.all()
serializer_class = HandoverSerializer
class DraftViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows drafts to be viewed or edited.
"""
queryset = Draft.objects.all()
serializer_class = DraftSerializer
|
Update import of serializers for python3 compatibility
|
Update import of serializers for python3 compatibility
|
Python
|
mit
|
Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService
|
a50edf34659acb63f1fa6dda5494812fa1c4ff7d
|
models/ras_pathway/run_ras_pathway.py
|
models/ras_pathway/run_ras_pathway.py
|
import sys
import pickle
from indra import reach
from indra.assemblers import GraphAssembler
if len(sys.argv) < 2:
process_type = 'text'
else:
process_type = sys.argv[1]
if process_type == 'text':
txt = open('ras_pathway.txt', 'rt').read()
rp = reach.process_text(txt, offline=True)
st = rp.statements
elif process_type == 'json':
rp = reach.process_json_file('reach_output.json')
st = rp.statements
else:
st = pickle.load(open('statements.pkl', 'rb'))
for s in st:
print '%s\t%s' % (s, s.evidence[0].text)
graphpr = {'rankdir': 'TD'}
nodepr = {'fontsize': 12, 'shape': 'plaintext', 'margin': '0,0', 'pad': 0}
ga = GraphAssembler(st, graph_properties=graphpr, node_properties=nodepr)
ga.make_model()
ga.save_dot('ras_pathway.dot')
ga.save_pdf('ras_pathway.pdf')
|
import sys
import pickle
from indra import trips
from indra import reach
from indra.assemblers import GraphAssembler
def process_reach(txt, reread):
if reread:
rp = reach.process_text(txt, offline=True)
st = rp.statements
else:
rp = reach.process_json_file('reach_output.json')
st = rp.statements
for s in st:
print('%s\t%s' % (s, s.evidence[0].text))
return st
def process_trips(txt, reread):
if reread:
tp = trips.process_text(txt)
st = tp.statements
else:
tp = trips.process_xml(open('trips_output.xml', 'r').read())
st = tp.statements
for s in st:
print('%s\t%s' % (s, s.evidence[0].text))
return st
def draw_graph(stmts):
graphpr = {'rankdir': 'TD'}
nodepr = {'fontsize': 12, 'shape': 'plaintext', 'margin': '0,0', 'pad': 0}
ga = GraphAssembler(st, graph_properties=graphpr, node_properties=nodepr)
ga.make_model()
ga.save_dot('ras_pathway.dot')
ga.save_pdf('ras_pathway.pdf')
if __name__ == '__main__':
reread = True
txt = open('ras_pathway.txt', 'rt').read()
print('-----')
print(txt)
print('-----')
if len(sys.argv) < 2:
print('Reader not specified')
sys.exit()
reader = sys.argv[1]
if reader == 'reach':
print('Using REACH')
stmts = process_reach(txt, reread)
elif reader == 'trips':
print('Using TRIPS')
stmts = process_trips(txt, reread)
|
Add TRIPS reading option to RAS pathway map
|
Add TRIPS reading option to RAS pathway map
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,bgyori/indra
|
c7825a2ec9be702b05c58118249fe13e7e231ecb
|
cheroot/test/conftest.py
|
cheroot/test/conftest.py
|
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
yield from cheroot_server(cheroot.wsgi.Server)
@pytest.fixture(scope='module')
def native_server():
yield from cheroot_server(cheroot.server.HTTPServer)
|
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
|
Drop `yield from` to keep compat w/ 2.7
|
Drop `yield from` to keep compat w/ 2.7
|
Python
|
bsd-3-clause
|
cherrypy/cheroot
|
2e3045ed1009a60fe6e236387cae68ddf63bb9b5
|
distarray/core/tests/test_distributed_array_protocol.py
|
distarray/core/tests/test_distributed_array_protocol.py
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_export_buffer(self):
"""See if we actually export a buffer."""
export_data = self.larr.__distarray__()
memoryview(export_data['buffer'])
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Test if `__distarray__()['buffer']` returns a buffer.
|
Test if `__distarray__()['buffer']` returns a buffer.
|
Python
|
bsd-3-clause
|
enthought/distarray,enthought/distarray,RaoUmer/distarray,RaoUmer/distarray
|
52443c468a446638171f45b080dcf62f73e62866
|
src/wirecloud_fiware/tests/selenium.py
|
src/wirecloud_fiware/tests/selenium.py
|
from wirecloudcommons.test import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
tags = ('current',)
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
|
from wirecloudcommons.test import WirecloudSeleniumTestCase
__test__ = False
class FiWareSeleniumTestCase(WirecloudSeleniumTestCase):
def test_add_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
def test_delete_fiware_marketplace(self):
self.login()
self.add_marketplace('fiware', 'http://localhost:8080', 'fiware')
self.delete_marketplace('fiware')
|
Remove 'current' tag from FiWareSeleniumTestCase
|
Remove 'current' tag from FiWareSeleniumTestCase
|
Python
|
agpl-3.0
|
rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud
|
6fe2e1dfbce465fee8a12475b3bfcda3ea10594e
|
staticgen_demo/blog/staticgen_views.py
|
staticgen_demo/blog/staticgen_views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print response.status_code
print response.__dict__
if not response.status_code == 200:
pass
else:
try:
return response.context['paginator'], response.context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], response.context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
Add print statements to debug BlogPostListView
|
Add print statements to debug BlogPostListView
|
Python
|
bsd-3-clause
|
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
|
abfe0538769145ac83031062ce3b22d2622f18bf
|
opwen_email_server/utils/temporary.py
|
opwen_email_server/utils/temporary.py
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> str:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
from contextlib import contextmanager
from contextlib import suppress
from os import close
from os import remove
from tempfile import mkstemp
from typing import Generator
def create_tempfilename() -> str:
file_descriptor, filename = mkstemp()
close(file_descriptor)
return filename
@contextmanager
def removing(path: str) -> Generator[str, None, None]:
try:
yield path
finally:
_remove_if_exists(path)
def _remove_if_exists(path: str):
with suppress(FileNotFoundError):
remove(path)
|
Fix type annotation for context manager
|
Fix type annotation for context manager
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
d154cd852bdb02743e9752179559a91b9f1a7f8c
|
example/tests/unit/test_renderer_class_methods.py
|
example/tests/unit/test_renderer_class_methods.py
|
from django.contrib.auth import get_user_model
from rest_framework_json_api import serializers
from rest_framework_json_api.renderers import JSONRenderer
class ResourceSerializer(serializers.ModelSerializer):
class Meta:
fields = ('username',)
model = get_user_model()
def test_build_json_resource_obj():
resource = {
'pk': 1,
'username': 'Alice',
}
serializer = ResourceSerializer(data={'username': 'Alice'})
serializer.is_valid()
resource_instance = serializer.save()
output = {
'type': 'user',
'id': '1',
'attributes': {
'username': 'Alice'
},
}
assert JSONRenderer.build_json_resource_obj(
serializer.fields, resource, resource_instance, 'user') == output
def test_extract_attributes():
fields = {
'id': serializers.Field(),
'username': serializers.Field(),
'deleted': serializers.ReadOnlyField(),
}
resource = {'id': 1, 'deleted': None, 'username': 'jerel'}
expected = {
'username': 'jerel',
'deleted': None
}
assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted(expected), 'Regular fields should be extracted'
assert sorted(JSONRenderer.extract_attributes(fields, {})) == sorted(
{'username': ''}), 'Should not extract read_only fields on empty serializer'
|
from django.contrib.auth import get_user_model
from rest_framework_json_api import serializers
from rest_framework_json_api.renderers import JSONRenderer
pytestmark = pytest.mark.django_db
class ResourceSerializer(serializers.ModelSerializer):
class Meta:
fields = ('username',)
model = get_user_model()
def test_build_json_resource_obj():
resource = {
'pk': 1,
'username': 'Alice',
}
serializer = ResourceSerializer(data={'username': 'Alice'})
serializer.is_valid()
resource_instance = serializer.save()
output = {
'type': 'user',
'id': '1',
'attributes': {
'username': 'Alice'
},
}
assert JSONRenderer.build_json_resource_obj(
serializer.fields, resource, resource_instance, 'user') == output
def test_extract_attributes():
fields = {
'id': serializers.Field(),
'username': serializers.Field(),
'deleted': serializers.ReadOnlyField(),
}
resource = {'id': 1, 'deleted': None, 'username': 'jerel'}
expected = {
'username': 'jerel',
'deleted': None
}
assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted(expected), 'Regular fields should be extracted'
assert sorted(JSONRenderer.extract_attributes(fields, {})) == sorted(
{'username': ''}), 'Should not extract read_only fields on empty serializer'
|
Fix for Database access not allowed, use the "django_db" mark to enable it.
|
Fix for Database access not allowed, use the "django_db" mark to enable it.
|
Python
|
bsd-2-clause
|
django-json-api/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,schtibe/django-rest-framework-json-api,pombredanne/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,leo-naeka/rest_framework_ember,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/rest_framework_ember
|
d32ec29dfae5a3ea354266dfda0438d9c69398e3
|
daiquiri/wordpress/utils.py
|
daiquiri/wordpress/utils.py
|
from django.conf import settings
from .tasks import (
update_wordpress_user as update_wordpress_user_task,
update_wordpress_role as update_wordpress_role_task
)
def update_wordpress_user(user):
if not settings.ASYNC:
update_wordpress_user_task.apply((user.username, user.email, user.first_name, user.last_name), throw=True)
else:
update_wordpress_user_task.apply_async((user.username, user.email, user.first_name, user.last_name))
def update_wordpress_role(user):
if user.is_superuser:
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_admin').exists():
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_editor').exists():
wordpress_role = 'editor'
else:
wordpress_role = 'subscriber'
if not settings.ASYNC:
update_wordpress_role_task.apply((user.username, wordpress_role), throw=True)
else:
update_wordpress_role_task.apply_async((user.username, wordpress_role))
|
import random
import string
from django.conf import settings
from .tasks import (
update_wordpress_user as update_wordpress_user_task,
update_wordpress_role as update_wordpress_role_task
)
def update_wordpress_user(user):
if user.email:
email = user.email
else:
random_string = ''.join(random.choice(string.ascii_lowercase) for _ in range(8))
email = random_string + '@example.com'
if not settings.ASYNC:
update_wordpress_user_task.apply((user.username, email, user.first_name, user.last_name), throw=True)
else:
update_wordpress_user_task.apply_async((user.username, email, user.first_name, user.last_name))
def update_wordpress_role(user):
if user.is_superuser:
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_admin').exists():
wordpress_role = 'administrator'
elif user.groups.filter(name='wordpress_editor').exists():
wordpress_role = 'editor'
else:
wordpress_role = 'subscriber'
if not settings.ASYNC:
update_wordpress_role_task.apply((user.username, wordpress_role), throw=True)
else:
update_wordpress_role_task.apply_async((user.username, wordpress_role))
|
Fix update_wordpress_user for missing email
|
Fix update_wordpress_user for missing email
|
Python
|
apache-2.0
|
aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri
|
f6518a7bd554c87b4dcb68d1ca618babcf278c63
|
tests/extmod/machine1.py
|
tests/extmod/machine1.py
|
# test machine module
import machine
import uctypes
print(machine.mem8)
buf = bytearray(8)
addr = uctypes.addressof(buf)
machine.mem8[addr] = 123
print(machine.mem8[addr])
machine.mem16[addr] = 12345
print(machine.mem16[addr])
machine.mem32[addr] = 123456789
print(machine.mem32[addr])
try:
machine.mem16[1]
except ValueError:
print("ValueError")
try:
machine.mem16[1] = 1
except ValueError:
print("ValueError")
try:
del machine.mem8[0]
except TypeError:
print("TypeError")
|
# test machine module
try:
import machine
except ImportError:
print("SKIP")
import sys
sys.exit()
import uctypes
print(machine.mem8)
buf = bytearray(8)
addr = uctypes.addressof(buf)
machine.mem8[addr] = 123
print(machine.mem8[addr])
machine.mem16[addr] = 12345
print(machine.mem16[addr])
machine.mem32[addr] = 123456789
print(machine.mem32[addr])
try:
machine.mem16[1]
except ValueError:
print("ValueError")
try:
machine.mem16[1] = 1
except ValueError:
print("ValueError")
try:
del machine.mem8[0]
except TypeError:
print("TypeError")
|
Check that machine module exists and print SKIP if it doesn't.
|
tests: Check that machine module exists and print SKIP if it doesn't.
|
Python
|
mit
|
vitiral/micropython,lowRISC/micropython,adafruit/circuitpython,danicampora/micropython,ryannathans/micropython,pfalcon/micropython,mgyenik/micropython,adafruit/micropython,blazewicz/micropython,vitiral/micropython,chrisdearman/micropython,dxxb/micropython,dinau/micropython,tdautc19841202/micropython,utopiaprince/micropython,jmarcelino/pycom-micropython,tdautc19841202/micropython,noahchense/micropython,lowRISC/micropython,danicampora/micropython,neilh10/micropython,ahotam/micropython,vriera/micropython,kerneltask/micropython,mgyenik/micropython,selste/micropython,redbear/micropython,alex-robbins/micropython,adamkh/micropython,adafruit/micropython,danicampora/micropython,matthewelse/micropython,vitiral/micropython,cloudformdesign/micropython,mpalomer/micropython,torwag/micropython,cloudformdesign/micropython,galenhz/micropython,Timmenem/micropython,ganshun666/micropython,hiway/micropython,ganshun666/micropython,MrSurly/micropython,MrSurly/micropython-esp32,AriZuu/micropython,pramasoul/micropython,ryannathans/micropython,emfcamp/micropython,alex-robbins/micropython,ericsnowcurrently/micropython,omtinez/micropython,skybird6672/micropython,rubencabrera/micropython,swegener/micropython,feilongfl/micropython,turbinenreiter/micropython,puuu/micropython,noahwilliamsson/micropython,emfcamp/micropython,cloudformdesign/micropython,MrSurly/micropython,supergis/micropython,neilh10/micropython,ChuckM/micropython,ChuckM/micropython,bvernoux/micropython,utopiaprince/micropython,blazewicz/micropython,blmorris/micropython,xhat/micropython,hosaka/micropython,jlillest/micropython,martinribelotta/micropython,PappaPeppar/micropython,suda/micropython,mpalomer/micropython,kerneltask/micropython,xyb/micropython,alex-march/micropython,ryannathans/micropython,dinau/micropython,utopiaprince/micropython,martinribelotta/micropython,henriknelson/micropython,dhylands/micropython,noahwilliamsson/micropython,Timmenem/micropython,HenrikSolver/micropython,dxxb/micropython,MrSurly/micropython,pozetroninc/micropython,toolmacher/micropython,lbattraw/micropython,noahchense/micropython,skybird6672/micropython,infinnovation/micropython,dinau/micropython,ruffy91/micropython,turbinenreiter/micropython,Peetz0r/micropython-esp32,misterdanb/micropython,HenrikSolver/micropython,TDAbboud/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,dhylands/micropython,drrk/micropython,mpalomer/micropython,lbattraw/micropython,toolmacher/micropython,tralamazza/micropython,tdautc19841202/micropython,kerneltask/micropython,ruffy91/micropython,chrisdearman/micropython,adafruit/circuitpython,pozetroninc/micropython,emfcamp/micropython,mianos/micropython,supergis/micropython,firstval/micropython,heisewangluo/micropython,redbear/micropython,blmorris/micropython,kostyll/micropython,micropython/micropython-esp32,torwag/micropython,kostyll/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,tobbad/micropython,cnoviello/micropython,noahwilliamsson/micropython,tuc-osg/micropython,micropython/micropython-esp32,pramasoul/micropython,skybird6672/micropython,torwag/micropython,blmorris/micropython,praemdonck/micropython,omtinez/micropython,hiway/micropython,micropython/micropython-esp32,xyb/micropython,mianos/micropython,pozetroninc/micropython,suda/micropython,ruffy91/micropython,martinribelotta/micropython,infinnovation/micropython,deshipu/micropython,matthewelse/micropython,heisewangluo/micropython,supergis/micropython,dhylands/micropython,adafruit/circuitpython,mpalomer/micropython,PappaPeppar/micropython,bvernoux/micropython,alex-march/micropython,mianos/micropython,EcmaXp/micropython,selste/micropython,martinribelotta/micropython,dxxb/micropython,feilongfl/micropython,galenhz/micropython,micropython/micropython-esp32,MrSurly/micropython-esp32,EcmaXp/micropython,tobbad/micropython,alex-robbins/micropython,ceramos/micropython,puuu/micropython,PappaPeppar/micropython,xyb/micropython,trezor/micropython,dhylands/micropython,selste/micropython,oopy/micropython,xuxiaoxin/micropython,neilh10/micropython,infinnovation/micropython,adafruit/circuitpython,stonegithubs/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,torwag/micropython,skybird6672/micropython,tralamazza/micropython,matthewelse/micropython,hiway/micropython,ryannathans/micropython,tobbad/micropython,xhat/micropython,slzatz/micropython,noahchense/micropython,ceramos/micropython,deshipu/micropython,mianos/micropython,mhoffma/micropython,orionrobots/micropython,toolmacher/micropython,kerneltask/micropython,pfalcon/micropython,pozetroninc/micropython,misterdanb/micropython,galenhz/micropython,omtinez/micropython,jlillest/micropython,supergis/micropython,feilongfl/micropython,neilh10/micropython,Timmenem/micropython,mhoffma/micropython,PappaPeppar/micropython,hosaka/micropython,henriknelson/micropython,noahchense/micropython,martinribelotta/micropython,ChuckM/micropython,HenrikSolver/micropython,suda/micropython,ChuckM/micropython,rubencabrera/micropython,Peetz0r/micropython-esp32,deshipu/micropython,matthewelse/micropython,puuu/micropython,misterdanb/micropython,dxxb/micropython,PappaPeppar/micropython,danicampora/micropython,selste/micropython,pramasoul/micropython,tdautc19841202/micropython,turbinenreiter/micropython,firstval/micropython,blazewicz/micropython,HenrikSolver/micropython,ericsnowcurrently/micropython,noahchense/micropython,alex-robbins/micropython,deshipu/micropython,cnoviello/micropython,orionrobots/micropython,cnoviello/micropython,lowRISC/micropython,tuc-osg/micropython,torwag/micropython,xuxiaoxin/micropython,henriknelson/micropython,trezor/micropython,praemdonck/micropython,deshipu/micropython,jlillest/micropython,pramasoul/micropython,TDAbboud/micropython,vriera/micropython,matthewelse/micropython,HenrikSolver/micropython,xhat/micropython,pozetroninc/micropython,tralamazza/micropython,orionrobots/micropython,jmarcelino/pycom-micropython,adamkh/micropython,dmazzella/micropython,turbinenreiter/micropython,chrisdearman/micropython,TDAbboud/micropython,drrk/micropython,xhat/micropython,blazewicz/micropython,ChuckM/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,stonegithubs/micropython,galenhz/micropython,orionrobots/micropython,ryannathans/micropython,hosaka/micropython,lbattraw/micropython,firstval/micropython,ernesto-g/micropython,AriZuu/micropython,xyb/micropython,feilongfl/micropython,mianos/micropython,EcmaXp/micropython,noahwilliamsson/micropython,bvernoux/micropython,infinnovation/micropython,redbear/micropython,slzatz/micropython,slzatz/micropython,ahotam/micropython,dmazzella/micropython,ericsnowcurrently/micropython,pfalcon/micropython,redbear/micropython,feilongfl/micropython,misterdanb/micropython,vriera/micropython,lbattraw/micropython,ceramos/micropython,alex-robbins/micropython,henriknelson/micropython,AriZuu/micropython,hiway/micropython,lowRISC/micropython,adafruit/micropython,ericsnowcurrently/micropython,alex-march/micropython,dinau/micropython,skybird6672/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,trezor/micropython,MrSurly/micropython,dmazzella/micropython,ernesto-g/micropython,danicampora/micropython,omtinez/micropython,xuxiaoxin/micropython,bvernoux/micropython,praemdonck/micropython,kostyll/micropython,ruffy91/micropython,ruffy91/micropython,rubencabrera/micropython,adamkh/micropython,oopy/micropython,alex-march/micropython,utopiaprince/micropython,AriZuu/micropython,pfalcon/micropython,lbattraw/micropython,utopiaprince/micropython,ahotam/micropython,suda/micropython,heisewangluo/micropython,tobbad/micropython,MrSurly/micropython-esp32,suda/micropython,adafruit/micropython,hiway/micropython,cwyark/micropython,cnoviello/micropython,xhat/micropython,drrk/micropython,TDAbboud/micropython,ganshun666/micropython,adamkh/micropython,ahotam/micropython,oopy/micropython,jlillest/micropython,swegener/micropython,rubencabrera/micropython,tralamazza/micropython,tuc-osg/micropython,infinnovation/micropython,swegener/micropython,blmorris/micropython,omtinez/micropython,trezor/micropython,stonegithubs/micropython,adafruit/circuitpython,pramasoul/micropython,misterdanb/micropython,hosaka/micropython,ganshun666/micropython,matthewelse/micropython,ceramos/micropython,mhoffma/micropython,emfcamp/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,orionrobots/micropython,pfalcon/micropython,cloudformdesign/micropython,mgyenik/micropython,bvernoux/micropython,firstval/micropython,redbear/micropython,dinau/micropython,cwyark/micropython,MrSurly/micropython,galenhz/micropython,henriknelson/micropython,emfcamp/micropython,alex-march/micropython,firstval/micropython,mgyenik/micropython,jmarcelino/pycom-micropython,ericsnowcurrently/micropython,rubencabrera/micropython,oopy/micropython,cwyark/micropython,mgyenik/micropython,ganshun666/micropython,TDAbboud/micropython,mhoffma/micropython,noahwilliamsson/micropython,adamkh/micropython,xuxiaoxin/micropython,chrisdearman/micropython,turbinenreiter/micropython,heisewangluo/micropython,puuu/micropython,cwyark/micropython,drrk/micropython,puuu/micropython,kostyll/micropython,selste/micropython,oopy/micropython,praemdonck/micropython,toolmacher/micropython,cnoviello/micropython,slzatz/micropython,swegener/micropython,blmorris/micropython,drrk/micropython,EcmaXp/micropython,mhoffma/micropython,ceramos/micropython,ahotam/micropython,stonegithubs/micropython,SHA2017-badge/micropython-esp32,vriera/micropython,SHA2017-badge/micropython-esp32,stonegithubs/micropython,dhylands/micropython,jlillest/micropython,tuc-osg/micropython,xyb/micropython,heisewangluo/micropython,xuxiaoxin/micropython,AriZuu/micropython,hosaka/micropython,dxxb/micropython,cwyark/micropython,kostyll/micropython,swegener/micropython,slzatz/micropython,dmazzella/micropython,Timmenem/micropython,vriera/micropython,tdautc19841202/micropython,chrisdearman/micropython,supergis/micropython,tobbad/micropython,blazewicz/micropython,ernesto-g/micropython,ernesto-g/micropython,mpalomer/micropython,toolmacher/micropython,lowRISC/micropython,vitiral/micropython,ernesto-g/micropython,EcmaXp/micropython,praemdonck/micropython,jmarcelino/pycom-micropython,adafruit/micropython,trezor/micropython,cloudformdesign/micropython,neilh10/micropython,vitiral/micropython
|
ddd4a0d1ba607f49f75f9516c378159f1204d9fb
|
readthedocs/rtd_tests/tests/test_search_json_parsing.py
|
readthedocs/rtd_tests/tests/test_search_json_parsing.py
|
import os
from django.test import TestCase
from search.parse_json import process_file
base_dir = os.path.dirname(os.path.dirname(__file__))
class TestHacks(TestCase):
def test_h2_parsing(self):
data = process_file(
os.path.join(
base_dir,
'files/api.fjson',
)
)
self.assertEqual(data['sections'][0]['id'], 'a-basic-api-client-using-slumber')
# Only capture h2's
for obj in data['sections']:
self.assertEqual(obj['content'][:5], '\n<h2>')
|
import os
from django.test import TestCase
from search.parse_json import process_file
base_dir = os.path.dirname(os.path.dirname(__file__))
class TestHacks(TestCase):
def test_h2_parsing(self):
data = process_file(
os.path.join(
base_dir,
'files/api.fjson',
)
)
self.assertEqual(data['sections'][1]['id'], 'a-basic-api-client-using-slumber')
# Only capture h2's after the first section
for obj in data['sections'][1:]:
self.assertEqual(obj['content'][:5], '\n<h2>')
|
Fix tests now that we have H1 capturing
|
Fix tests now that we have H1 capturing
|
Python
|
mit
|
wanghaven/readthedocs.org,wijerasa/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,d0ugal/readthedocs.org,takluyver/readthedocs.org,wanghaven/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,attakei/readthedocs-oauth,agjohnson/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,dirn/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,michaelmcandrew/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,pombredanne/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,SteveViss/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,kdkeyser/readthedocs.org,KamranMackey/readthedocs.org,kenshinthebattosai/readthedocs.org,singingwolfboy/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,kenwang76/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,kenshinthebattosai/readthedocs.org,atsuyim/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,mhils/readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,fujita-shintaro/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,Tazer/readthedocs.org,hach-que/readthedocs.org,techtonik/readthedocs.org,jerel/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,attakei/readthedocs-oauth,mrshoki/readthedocs.org,stevepiercy/readthedocs.org,jerel/readthedocs.org,laplaceliu/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,titiushko/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,sils1297/readthedocs.org,raven47git/readthedocs.org,takluyver/readthedocs.org,d0ugal/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,emawind84/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,agjohnson/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,cgourlay/readthedocs.org,agjohnson/readthedocs.org,CedarLogic/readthedocs.org,gjtorikian/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,VishvajitP/readthedocs.org,LukasBoersma/readthedocs.org,dirn/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,gjtorikian/readthedocs.org,nikolas/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,cgourlay/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,sid-kap/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,davidfischer/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,kdkeyser/readthedocs.org,davidfischer/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,raven47git/readthedocs.org,nikolas/readthedocs.org,kdkeyser/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,tddv/readthedocs.org,mhils/readthedocs.org,KamranMackey/readthedocs.org,sunnyzwh/readthedocs.org,LukasBoersma/readthedocs.org,hach-que/readthedocs.org,dirn/readthedocs.org,raven47git/readthedocs.org
|
135a97a58a95c04d2635fff68d2c080413f1d804
|
tests/test_conditions.py
|
tests/test_conditions.py
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
u"Statement": [
{
u"Action": [u"s3:ListBucket"],
u"Condition": {
u"ForAllValues:StringLike": {
u"dynamodb:requestedAttributes": [
u"PostDateTime",
u"Message",
u"Tags",
]
}
},
u"Effect": u"Allow",
u"Resource": [u"arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
import json
import unittest
import awacs.aws as aws
import awacs.s3 as s3
class TestConditions(unittest.TestCase):
def test_for_all_values(self):
c = aws.Condition(
aws.ForAllValuesStringLike(
"dynamodb:requestedAttributes", ["PostDateTime", "Message", "Tags"]
)
)
pd = aws.PolicyDocument(
Statement=[
aws.Statement(
Action=[s3.ListBucket],
Effect=aws.Allow,
Resource=[s3.ARN("myBucket")],
Condition=c,
)
]
)
self.assertEqual(
{
"Statement": [
{
"Action": ["s3:ListBucket"],
"Condition": {
"ForAllValues:StringLike": {
"dynamodb:requestedAttributes": [
"PostDateTime",
"Message",
"Tags",
]
}
},
"Effect": "Allow",
"Resource": ["arn:aws:s3:::myBucket"],
}
]
},
json.loads(pd.to_json()),
)
|
Remove 'u' prefix from strings
|
Remove 'u' prefix from strings
|
Python
|
bsd-2-clause
|
cloudtools/awacs
|
07cffdaa6e131c4f02c570de3925d6238656fc87
|
tests/test_invocation.py
|
tests/test_invocation.py
|
import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
import sys
import subprocess
import re
import pytest
@pytest.mark.skipif(sys.version_info < (2, 7), reason="check_output not available")
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
Disable test on Python 2.6.
|
Disable test on Python 2.6.
|
Python
|
mit
|
nickstenning/honcho,nickstenning/honcho
|
1e7306d31cc9f5423f9594257b631d5f1a6c0ced
|
swiftly/__init__.py
|
swiftly/__init__.py
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.4'
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.5'
|
Work on master is now 1.5 dev work
|
Work on master is now 1.5 dev work
|
Python
|
apache-2.0
|
dpgoetz/swiftly,rackerlabs/swiftly,gholt/swiftly
|
f052666502ef0108d991940ca713ebc0c5d0c036
|
MyBot.py
|
MyBot.py
|
from hlt import *
from networking import *
myID, gameMap = getInit()
sendInit("MyPythonBot")
while True:
moves = []
gameMap = getFrame()
for y in range(gameMap.height):
for x in range(gameMap.width):
location = Location(x, y)
if gameMap.getSite(location).owner == myID:
moves.append(Move(location, random.choice(DIRECTIONS)))
sendFrame(moves)
|
from hlt import *
from networking import *
myID, gameMap = getInit()
sendInit("dpetkerPythonBot")
def create_move(location):
site = gameMap.getSite(location)
# See if there's an enemy adjacent to us with less strength. If so, capture it
for d in CARDINALS:
neighbour_site = gameMap.getSite(location, d)
if neighbour_site.owner != myID and neighbour_site.strength < site.strength:
return Move(location, d)
# Don't move until we're sufficiently strong
if site.strength < site.production * 5:
return Move(location, STILL)
return Move(location, NORTH if random.random() > 0.5 else WEST)
while True:
moves = []
gameMap = getFrame()
for y in range(gameMap.height):
for x in range(gameMap.width):
location = Location(x, y)
if gameMap.getSite(location).owner == myID:
moves.append(create_move(location))
sendFrame(moves)
|
Improve my bot according to tutorial
|
Improve my bot according to tutorial
|
Python
|
mit
|
dpetker/halite,dpetker/halite
|
9eabdbc6b73661865c4d785cbc57d7ee51fe59cd
|
future/tests/test_imports_urllib.py
|
future/tests/test_imports_urllib.py
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
"""
This should perhaps fail: importing urllib first means that the import hooks
won't be consulted when importing urllib.response.
"""
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
import urllib
orig_file = urllib.__file__
from future.standard_library.urllib import response as urllib_response
self.assertEqual(orig_file, urllib.__file__)
print(urllib_response.__file__)
if __name__ == '__main__':
unittest.main()
|
Change urllib test to use an explicit import
|
Change urllib test to use an explicit import
|
Python
|
mit
|
QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future,krischer/python-future,krischer/python-future,QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future
|
cad4e7e9feaf7fefe9ef91dea18594b095861204
|
content_editor/models.py
|
content_editor/models.py
|
from types import SimpleNamespace
from django.db import models
__all__ = ("Region", "Template", "create_plugin_base")
class Region(SimpleNamespace):
key = ""
title = "unnamed"
inherited = False
class Template(SimpleNamespace):
key = ""
template_name = None
title = ""
regions = []
def create_plugin_base(content_base):
"""
This is purely an internal method. Here, we create a base class for
the concrete content types, which are built in
``create_plugin``.
The three fields added to build a concrete content type class/model
are ``parent``, ``region`` and ``ordering``.
"""
class PluginBase(models.Model):
parent = models.ForeignKey(
content_base,
related_name="%(app_label)s_%(class)s_set",
on_delete=models.CASCADE,
)
region = models.CharField(max_length=255)
ordering = models.IntegerField(default=0)
class Meta:
abstract = True
app_label = content_base._meta.app_label
ordering = ["ordering"]
def __str__(self):
return "%s<region=%s ordering=%s pk=%s>" % (
self._meta.label,
self.region,
self.ordering,
self.pk,
)
@classmethod
def get_queryset(cls):
return cls.objects.all()
return PluginBase
|
from types import SimpleNamespace
from django.db import models
__all__ = ("Region", "Template", "create_plugin_base")
class Region(SimpleNamespace):
key = ""
title = "unnamed"
inherited = False
class Template(SimpleNamespace):
key = ""
template_name = None
title = ""
regions = []
def create_plugin_base(content_base):
"""
Create and return a base class for plugins
The base class contains a ``parent`` foreign key and the required
``region`` and ``ordering`` fields.
"""
class PluginBase(models.Model):
parent = models.ForeignKey(
content_base,
related_name="%(app_label)s_%(class)s_set",
on_delete=models.CASCADE,
)
region = models.CharField(max_length=255)
ordering = models.IntegerField(default=0)
class Meta:
abstract = True
app_label = content_base._meta.app_label
ordering = ["ordering"]
def __str__(self):
return "%s<region=%s ordering=%s pk=%s>" % (
self._meta.label,
self.region,
self.ordering,
self.pk,
)
@classmethod
def get_queryset(cls):
return cls.objects.all()
return PluginBase
|
Fix the docstring of create_plugin_base: Not internal, it's the main API
|
Fix the docstring of create_plugin_base: Not internal, it's the main API
|
Python
|
bsd-3-clause
|
matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor
|
27d8f3d637e5071e8eb048cd36218977bf0772ff
|
javascript_configuration/configuration_builder.py
|
javascript_configuration/configuration_builder.py
|
import sys
from javascript_configuration import settings
class ConfigurationBuilder:
"""
Get javascript configurations from urls.py files from all installed apps.
"""
def __init__(self):
self.configuration = None
def fetch(self):
configuration = {}
for app_name, module_name in settings.SCAN_MODULES.iter_items():
try:
__import__(module_name)
urls = sys.modules[module_name]
if hasattr(urls, 'javascript_configuration'):
configuration[app_name] = urls.javascript_configuration()
except ImportError:
pass
return configuration
def get_configuration(self):
if self.configuration is None:
self.configuration = self.fetch()
return self.configuration
DEFAULT_CONFIGURATION_BUILDER = ConfigurationBuilder()
|
import sys
from javascript_configuration import settings
class ConfigurationBuilder:
"""
Get javascript configurations from urls.py files from all installed apps.
"""
def __init__(self):
self.configuration = None
def fetch(self):
configuration = {}
for app_name, module_name in settings.SCAN_MODULES.iteritems():
try:
__import__(module_name)
urls = sys.modules[module_name]
if hasattr(urls, 'javascript_configuration'):
configuration[app_name] = urls.javascript_configuration()
except ImportError:
pass
return configuration
def get_configuration(self):
if self.configuration is None:
self.configuration = self.fetch()
return self.configuration
DEFAULT_CONFIGURATION_BUILDER = ConfigurationBuilder()
|
Fix bug in iteritems on SCAN_MODULES.
|
Fix bug in iteritems on SCAN_MODULES.
Change-Id: Ifa58f29a9e69ad46b44c301244525d711b43faca
Reviewed-on: http://review.pozytywnie.pl:8080/2340
Reviewed-by: Tomasz Wysocki <[email protected]>
Tested-by: Tomasz Wysocki <[email protected]>
|
Python
|
mit
|
pozytywnie/django-javascript-settings
|
0ecff906f8d504576f00f28c46be6d4594008f38
|
parcels/interaction/distance_utils.py
|
parcels/interaction/distance_utils.py
|
import numpy as np
def fast_distance(lat1, lon1, lat2, lon2):
'''Compute the arc distance assuming the earth is a sphere.'''
g = np.sin(lat1)*np.sin(lat2)+np.cos(lat1)*np.cos(lat2)*np.cos(lon1-lon2)
return np.arccos(np.minimum(1, g))
def spherical_distance(depth1_m, lat1_deg, lon1_deg, depth2_m, lat2_deg,
lon2_deg):
"Compute the arc distance, uses degrees as input."
R_earth = 6371000
lat1 = np.pi*lat1_deg/180
lon1 = np.pi*lon1_deg/180
lat2 = np.pi*lat2_deg/180
lon2 = np.pi*lon2_deg/180
horiz_dist = R_earth*fast_distance(lat1, lon1, lat2, lon2)
vert_dist = np.abs(depth1_m-depth2_m)
return (vert_dist, horiz_dist)
|
import numpy as np
def fast_distance(lat1, lon1, lat2, lon2):
'''Compute the arc distance assuming the earth is a sphere.
This is not the only possible implementation. It was taken from:
https://www.mkompf.com/gps/distcalc.html
'''
g = np.sin(lat1)*np.sin(lat2)+np.cos(lat1)*np.cos(lat2)*np.cos(lon1-lon2)
return np.arccos(np.minimum(1, g))
def spherical_distance(depth1_m, lat1_deg, lon1_deg, depth2_m, lat2_deg,
lon2_deg):
"Compute the arc distance, uses degrees as input."
R_earth = 6371000
lat1 = np.pi*lat1_deg/180
lon1 = np.pi*lon1_deg/180
lat2 = np.pi*lat2_deg/180
lon2 = np.pi*lon2_deg/180
horiz_dist = R_earth*fast_distance(lat1, lon1, lat2, lon2)
vert_dist = np.abs(depth1_m-depth2_m)
return (vert_dist, horiz_dist)
|
Add link for distance computation
|
Add link for distance computation
|
Python
|
mit
|
OceanPARCELS/parcels,OceanPARCELS/parcels
|
c07234bb3142df96dc9e02a236975bc3de2415cc
|
nailgun/nailgun/test/test_plugin.py
|
nailgun/nailgun/test/test_plugin.py
|
# -*- coding: utf-8 -*-
from nailgun.test.base import BaseHandlers
class TestPluginStateMachine(BaseHandlers):
def test_attrs_creation(self):
pass
|
# -*- coding: utf-8 -*-
from nailgun.test.base import BaseHandlers
from nailgun.plugin.process import get_queue, PluginProcessor
from nailgun.api.models import Task
class TestPluginProcess(BaseHandlers):
def setUp(self):
super(TestPluginProcess, self).setUp()
self.plugin_processor = PluginProcessor()
self.plugin_processor.start()
def tearDown(self):
super(TestPluginProcess, self).tearDown()
self.plugin_processor.terminate()
def test_task_set_to_error_when_exception_raised(self):
queue = get_queue()
task = Task(name='install_plugin', cache={'plugin_id': -1})
self.env.db.add(task)
self.env.db.commit()
queue.put(task.uuid)
def check_task_status_is_error():
self.env.db.refresh(task)
return task.status == 'error'
self.env.wait_for_true(check_task_status_is_error, timeout=2)
self.assertEquals(task.progress, 100)
|
Implement plugin test on exception handling
|
Implement plugin test on exception handling
|
Python
|
apache-2.0
|
SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,zhaochao/fuel-main,zhaochao/fuel-main,huntxu/fuel-main,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-main,SmartInfrastructures/fuel-main-dev,huntxu/fuel-web,teselkin/fuel-main,ddepaoli3/fuel-main-dev,teselkin/fuel-main,SmartInfrastructures/fuel-web-dev,SergK/fuel-main,dancn/fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SergK/fuel-main,zhaochao/fuel-web,dancn/fuel-main-dev,nebril/fuel-web,dancn/fuel-main-dev,AnselZhangGit/fuel-main,Fiware/ops.Fuel-main-dev,AnselZhangGit/fuel-main,nebril/fuel-web,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,AnselZhangGit/fuel-main,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-main,SergK/fuel-main,prmtl/fuel-web,zhaochao/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,zhaochao/fuel-web,SmartInfrastructures/fuel-web-dev,koder-ua/nailgun-fcert,koder-ua/nailgun-fcert,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,zhaochao/fuel-web,zhaochao/fuel-web,teselkin/fuel-main,stackforge/fuel-main,prmtl/fuel-web,AnselZhangGit/fuel-main,zhaochao/fuel-main,eayunstack/fuel-web,eayunstack/fuel-main,koder-ua/nailgun-fcert,stackforge/fuel-web,nebril/fuel-web,stackforge/fuel-web,zhaochao/fuel-main,ddepaoli3/fuel-main-dev,Fiware/ops.Fuel-main-dev,prmtl/fuel-web,SmartInfrastructures/fuel-main-dev,Fiware/ops.Fuel-main-dev,zhaochao/fuel-main,koder-ua/nailgun-fcert,huntxu/fuel-web,dancn/fuel-main-dev,stackforge/fuel-main,eayunstack/fuel-web,prmtl/fuel-web,stackforge/fuel-web,huntxu/fuel-web,huntxu/fuel-main,Fiware/ops.Fuel-main-dev,teselkin/fuel-main,nebril/fuel-web,eayunstack/fuel-web,stackforge/fuel-main
|
593c00153b8634e4ea3817de2ef3592fe0540e58
|
spinach/contrib/spinachd/management/commands/spinach.py
|
spinach/contrib/spinachd/management/commands/spinach.py
|
from django.core.management.base import BaseCommand
from spinach.const import DEFAULT_QUEUE, DEFAULT_WORKER_NUMBER
from spinach.contrib.datadog import register_datadog_if_module_patched
from ...apps import spin
class Command(BaseCommand):
help = 'Run Spinach workers'
def add_arguments(self, parser):
parser.add_argument(
'--threads',
dest='threads',
type=int,
default=DEFAULT_WORKER_NUMBER,
help='Number of worker threads to launch'
)
parser.add_argument(
'--queue',
dest='queue',
default=DEFAULT_QUEUE,
help='Queue to consume'
)
parser.add_argument(
'--stop-when-queue-empty',
dest='stop_when_queue_empty',
default=False,
action='store_true',
help='Stop workers once the queue is empty'
)
def handle(self, *args, **options):
# Use the Datadog integration if Datadog is already used
# to trace Django.
register_datadog_if_module_patched(
'django',
namespance=spin.namespace
)
spin.start_workers(
number=options['threads'],
queue=options['queue'],
stop_when_queue_empty=options['stop_when_queue_empty']
)
|
from django.core.management.base import BaseCommand
from spinach.const import DEFAULT_QUEUE, DEFAULT_WORKER_NUMBER
from spinach.contrib.datadog import register_datadog_if_module_patched
from ...apps import spin
class Command(BaseCommand):
help = 'Run Spinach workers'
def add_arguments(self, parser):
parser.add_argument(
'--threads',
dest='threads',
type=int,
default=DEFAULT_WORKER_NUMBER,
help='Number of worker threads to launch'
)
parser.add_argument(
'--queue',
dest='queue',
default=DEFAULT_QUEUE,
help='Queue to consume'
)
parser.add_argument(
'--stop-when-queue-empty',
dest='stop_when_queue_empty',
default=False,
action='store_true',
help='Stop workers once the queue is empty'
)
def handle(self, *args, **options):
# Use the Datadog integration if Datadog is already used
# to trace Django.
register_datadog_if_module_patched(
'django',
namespace=spin.namespace
)
spin.start_workers(
number=options['threads'],
queue=options['queue'],
stop_when_queue_empty=options['stop_when_queue_empty']
)
|
Fix typo preventing the Django/Datadog integration from starting
|
Fix typo preventing the Django/Datadog integration from starting
|
Python
|
bsd-2-clause
|
NicolasLM/spinach
|
09edd3b548baaa4f6d1e31d5a9891f2b6eef45d6
|
noopy/project_template/dispatcher.py
|
noopy/project_template/dispatcher.py
|
from noopy import lambda_functions
from noopy.endpoint import Endpoint
from noopy.endpoint import methods
def dispatch(event, context):
print event
if event['type'] == 'APIGateway':
path = event['path']
method = getattr(methods, event['method'])
endpoint = Endpoint.endpoints[Endpoint(path, method)]
return endpoint(event.get('params', {}), context)
if event['type'] == 'Lambda':
funcs = [f for f in lambda_functions if f.func_name == event['function_name']]
if len(funcs) != 1:
raise ValueError('One and only one function "{}" needed.'.format(event['function_name']))
funcs[0](event.get('params', {}), context)
|
from noopy import lambda_functions
from noopy.endpoint import Endpoint
from noopy.endpoint import methods
def dispatch(event, context):
print event
if event['type'] == 'APIGateway':
path = event['path']
method = getattr(methods, event['method'])
endpoint = Endpoint.endpoints[Endpoint(path, method)]
return endpoint(event.get('params', {}), context)
if event['type'] == 'Lambda':
funcs = [f for f in lambda_functions if f.func_name == event['function_name']]
if len(funcs) != 1:
raise ValueError('One and only one function "{}" needed.'.format(event['function_name']))
return funcs[0](event.get('params', {}), context)
raise ValueError('Undefined type: "%s"' % event['type'])
|
Raise error on undefined type
|
Raise error on undefined type
|
Python
|
mit
|
acuros/noopy
|
e861e74374d22d3684dccfa5e0063ff37549bcfc
|
api/app.py
|
api/app.py
|
from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
_verify_parameters(doc, docs)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
def _verify_parameters(doc, docs):
if doc == {}:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if not isinstance(doc, dict):
msg = 'The parameter `doc` should be a dict'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
|
from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
_verify_parameters(doc, docs)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
def _verify_parameters(doc, docs):
if bool(doc) is False:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if not isinstance(doc, dict):
msg = 'The parameter `doc` should be a dict'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
|
Refactor to change the comparator of dict
|
Refactor to change the comparator of dict
|
Python
|
mit
|
joaojunior/y_text_recommender_system
|
89fd94bb06e81f38b40bd75d793107599a1b7c48
|
freedomain.py
|
freedomain.py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def start(count):
return 'SETUP APP'
if __name__ == '__main__':
app.run(host="172.31.27.41", port=8080)
|
from flask import Flask
import time
app = Flask(__name__)
alphabet = 'abcdefghijklmnopqrstuwxyz'
number = '1234567890'
def numbering_system():
base_system = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
result = {}
for csc_n in base_system:
result[csc_n] = base_system.find(csc_n)
return result
ns = numbering_system()
def csc(sym):
result = ''
for s in sym:
result += str(ns[s])
return result
def r_csc(num):
for key in numbering_system().keys():
if ns[key] == int(num):
return key
return 'out_of_range'
def increment(csc_number):
csc_len = len(csc_number)
i = 0
while 1:
if i > csc_len:
csc_number += '0'
if i == csc_len:
csc_number += '0'
break
num = csc_number[i]
if num in ns.keys():
csc_result = r_csc(int(csc(num))+1)
if csc_result != 'out_of_range':
csc_number = csc_number[:i] + csc_result + csc_number[i+1:]
break
else:
csc_number = csc_number[:i] + '0' + csc_number[i+1:]
i += 1
else:
csc_number = csc_number[:i] + '0' + csc_number[i+1:]
i += 1
return csc_number
def word_generator(csc_number):
return 0
def getDifTime(s):
current_milli_time2 = lambda: int(round(time.time() * 1000))
endTime = current_milli_time2()
return float(endTime - s) / 1000
@app.route('/<count>')
def freedomain(count):
return 'TEST'
if __name__ == '__main__':
app.run(host="172.31.27.41", port=8080)
#app.run()
|
Add base methods for generation dictionary
|
Add base methods for generation dictionary
|
Python
|
mit
|
cludtk/freedomain,cludtk/freedomain
|
1f4ee4e9d978322938579abc4c6723fdc783937d
|
build.py
|
build.py
|
#!/usr/bin/env python
import os
import subprocess
import sys
def build(pkgpath):
os.chdir(pkgpath)
targets = [
'build',
'package',
'install',
'clean',
'clean-depends',
]
for target in targets:
p = subprocess.Popen(
['bmake', target],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = p.communicate()
log = 'bmake-' + target + '-log.txt'
with open(log, 'w+') as f:
f.write(out)
f.write(err)
assert p.returncode == 0, '%s %s' % (pkg, target)
if __name__ == '__main__':
home = os.environ['HOME']
localbase = os.path.join(home, 'usr', 'pkgsrc')
lines = sys.stdin.readlines()
pkgs = [line.rstrip('\n') for line in lines]
pkgpaths = [pkg.split(' ')[0] for pkg in pkgs]
for pkgpath in pkgpaths:
print pkgpath
os.chdir(localbase)
assert os.path.exists(os.path.join(localbase, pkgpath))
build(pkgpath)
|
#!/usr/bin/env python
from __future__ import print_function
import os
import subprocess
import sys
def build(pkgpath):
os.chdir(pkgpath)
targets = [
'build',
'package',
'install',
'clean',
'clean-depends',
]
for target in targets:
p = subprocess.Popen(
['bmake', target],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = p.communicate()
log = 'bmake-' + target + '-log.txt'
with open(log, 'w+') as f:
f.write(out)
f.write(err)
assert p.returncode == 0, '%s %s' % (pkg, target)
if __name__ == '__main__':
home = os.environ['HOME']
localbase = os.path.join(home, 'usr', 'pkgsrc')
lines = sys.stdin.readlines()
pkgs = [line.rstrip('\n') for line in lines]
pkgpaths = [pkg.split(' ')[0] for pkg in pkgs]
for pkgpath in pkgpaths:
print(pkgpath)
os.chdir(localbase)
assert os.path.exists(os.path.join(localbase, pkgpath))
build(pkgpath)
|
Use the Python 3 print function.
|
Use the Python 3 print function.
|
Python
|
isc
|
eliteraspberries/minipkg,eliteraspberries/minipkg
|
c12cbae226f42405a998b93c6fd7049aadc6a19c
|
build.py
|
build.py
|
import os
import string
if __name__ == '__main__':
patch_file = 'example.patch'
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
with open('kpatch-patch.spec') as f:
spec_template = string.Template(f.read())
print(spec_template.substitute(values))
|
import os
import string
def generate_rpm_spec(template, patch_file):
spec_template = string.Template(template)
base_name, _ = os.path.splitext(patch_file)
values = {
'name': 'kpatch-module-{}'.format(base_name),
'patch_file': patch_file,
'kmod_filename': 'kpatch-{}.ko'.format(base_name),
'description': 'Package generated from {} by '
'kpatch-package-builder'.format(patch_file),
}
return spec_template.substitute(values)
if __name__ == '__main__':
with open('kpatch-patch.spec') as f:
template = f.read()
print(generate_rpm_spec(template, 'example.patch'))
|
Split spec generation into function
|
Split spec generation into function
|
Python
|
mit
|
centos-livepatching/kpatch-package-builder
|
916a02a609af6dc125b0a82215adb94858f4d597
|
yutu.py
|
yutu.py
|
import discord
from discord.ext.commands import Bot
import json
client = Bot("~", game=discord.Game(name="~help"))
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.command()
async def highfive(ctx):
'''
Give Yutu a high-five
'''
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@client.command()
async def cute(ctx, member: discord.Member = None):
if member is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = member
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first, second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
if __name__ == "__main__":
with open("cfg.json") as fh:
token = json.load(fh)['token']
client.run(token)
|
import discord
from discord.ext.commands import Bot
import json
client = Bot("~", game=discord.Game(name="~help"))
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
@client.command()
async def highfive(ctx):
'''
Give Yutu a high-five
'''
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@client.command()
async def cute(ctx, user: discord.Member = None):
"""
Tell someone they are cute!
Tells a user that you think they are cute, if you don't give a user, then Yutu will let you know that you are cute.
"""
if user is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = user
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first, second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
if __name__ == "__main__":
with open("cfg.json") as fh:
token = json.load(fh)['token']
client.run(token)
|
Add help text for cute
|
Add help text for cute
|
Python
|
mit
|
HarkonenBade/yutu
|
e35767544e7c6b4461e511eaad42c047abcbe911
|
openprocurement/tender/esco/utils.py
|
openprocurement/tender/esco/utils.py
|
# -*- coding: utf-8 -*-
from openprocurement.api.utils import get_now
def request_get_now(request):
return get_now()
|
# -*- coding: utf-8 -*-
from decimal import Decimal
from openprocurement.api.utils import get_now
def request_get_now(request):
return get_now()
def to_decimal(fraction):
return Decimal(fraction.numerator) / Decimal(fraction.denominator)
|
Add function to convert fraction to decimal
|
Add function to convert fraction to decimal
|
Python
|
apache-2.0
|
openprocurement/openprocurement.tender.esco
|
aeac11d889695f17aab3b972b64101eaefd322f2
|
fuzzycount.py
|
fuzzycount.py
|
from django.conf import settings
from django.db import connections
from django.db.models.query import QuerySet
from model_utils.managers import PassThroughManager
class FuzzyCountQuerySet(QuerySet):
def count(self):
is_postgresql = settings.DATABASES[self.db]["ENGINE"].endswith(("postgis", "postgresql"))
is_filtered = self.query.where or self.query.having
if not is_postgresql or is_filtered:
return super(FuzzyCountQuerySet, self).count()
cursor = connections[self.db].cursor()
cursor.execute("SELECT reltuples FROM pg_class "
"WHERE relname = '%s';" % self.model._meta.db_table)
return int(cursor.fetchone()[0])
FuzzyCountManager = PassThroughManager.for_queryset_class(FuzzyCountQuerySet)
|
from django.conf import settings
from django.db import connections
from django.db.models.query import QuerySet
from model_utils.managers import PassThroughManager
class FuzzyCountQuerySet(QuerySet):
def count(self):
postgres_engines = ("postgis", "postgresql", "django_postgrespool")
engine = settings.DATABASES[self.db]["ENGINE"].split(".")[-1]
is_postgres = engine.startswith(postgresql_engines)
is_filtered = self.query.where or self.query.having
if not is_postgres or is_filtered
return super(FuzzyCountQuerySet, self).count()
cursor = connections[self.db].cursor()
cursor.execute("SELECT reltuples FROM pg_class "
"WHERE relname = '%s';" % self.model._meta.db_table)
return int(cursor.fetchone()[0])
FuzzyCountManager = PassThroughManager.for_queryset_class(FuzzyCountQuerySet)
|
Fix engine check and added check for django_postgrespool.
|
Fix engine check and added check for django_postgrespool.
|
Python
|
bsd-2-clause
|
stephenmcd/django-postgres-fuzzycount
|
acce959e4885a52ba4a80beaed41a56aac63844e
|
tests/opwen_email_server/api/test_client_read.py
|
tests/opwen_email_server/api/test_client_read.py
|
from contextlib import contextmanager
from os import environ
from unittest import TestCase
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self._given_clients('{"client1": "id1"}') as download:
message, status = download('unknown_client')
self.assertEqual(status, 403)
@classmethod
@contextmanager
def _given_clients(cls, clients: str):
environ['LOKOLE_CLIENTS'] = clients
from opwen_email_server.api import client_read
yield client_read.download
del client_read
|
from contextlib import contextmanager
from unittest import TestCase
from opwen_email_server.api import client_read
from opwen_email_server.services.auth import EnvironmentAuth
class DownloadTests(TestCase):
def test_denies_unknown_client(self):
with self.given_clients({'client1': 'id1'}):
message, status = client_read.download('unknown_client')
self.assertEqual(status, 403)
@contextmanager
def given_clients(self, clients):
original_clients = client_read.CLIENTS
client_read.CLIENTS = EnvironmentAuth(clients)
yield
client_read.CLIENTS = original_clients
|
Remove need to set environment variables in test
|
Remove need to set environment variables in test
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
bfc7e08ba70ba0e3acb9e4cc69b70c816845b6cb
|
djofx/views/home.py
|
djofx/views/home.py
|
from django.db.models import Sum
from django.views.generic import TemplateView
from djofx.forms import OFXForm
from djofx.views.base import PageTitleMixin, UserRequiredMixin
from djofx import models
class HomePageView(PageTitleMixin, UserRequiredMixin, TemplateView):
template_name = "djofx/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
context['accounts'] = models.Account.objects.filter(
owner=self.request.user
)
context['form'] = OFXForm()
breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__is_void=False
).values(
'transaction_category__pk',
'transaction_category__name'
).annotate(
total=Sum('amount')
).order_by('-total')
context['breakdown'] = [
(
abs(item['total']),
item['transaction_category__pk'],
item['transaction_category__name']
)
for item in breakdown
]
return context
|
from datetime import date, timedelta
from django.db.models import Sum
from django.views.generic import TemplateView
from djofx.forms import OFXForm
from djofx.views.base import PageTitleMixin, UserRequiredMixin
from djofx import models
from operator import itemgetter
class HomePageView(PageTitleMixin, UserRequiredMixin, TemplateView):
template_name = "djofx/home.html"
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
context['accounts'] = models.Account.objects.filter(
owner=self.request.user
)
context['form'] = OFXForm()
cutoff = date.today() - timedelta(days=120)
uncategorised_breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__isnull=True,
date__gte=cutoff
).aggregate(
total=Sum('amount')
)
breakdown = models.Transaction.objects.filter(
amount__lt=0,
transaction_category__is_void=False,
date__gte=cutoff
).values(
'transaction_category__pk',
'transaction_category__name'
).annotate(
total=Sum('amount')
).order_by('-total')
context['breakdown'] = [
(
abs(item['total']),
item['transaction_category__pk'],
item['transaction_category__name']
)
for item in breakdown
]
context['breakdown'].append(
(
uncategorised_breakdown['total'] * -1,
0,
'Uncategorised'
)
)
context['breakdown'] = sorted(context['breakdown'],
key=itemgetter(0),
reverse=True)
return context
|
Include uncategorised spending in overview pie chart
|
Include uncategorised spending in overview pie chart
Also, only show last 120 days
|
Python
|
mit
|
dominicrodger/djofx,dominicrodger/djofx,dominicrodger/djofx
|
d0ce6af2bb893904e8a3e36dd725707bd6d9f201
|
indico/modules/attachments/tasks.py
|
indico/modules/attachments/tasks.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.attachments.models.attachments import Attachment
from indico.modules.files.models.files import File
@celery.task(ignore_result=False)
def generate_materials_package(attachment_ids, event):
from indico.modules.attachments.controllers.event_package import AttachmentPackageGeneratorMixin
attachments = Attachment.query.filter(Attachment.id.in_(attachment_ids)).all()
attachment_package_mixin = AttachmentPackageGeneratorMixin()
attachment_package_mixin.event = event
generated_zip = attachment_package_mixin._generate_zip_file(attachments, return_file=True)
f = File(filename='material-package.zip', content_type='application/zip', meta={'event_id': event.id})
context = ('event', event.id, 'attachment-package')
f.save(context, generated_zip)
db.session.add(f)
db.session.commit()
return f.signed_download_url
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import os
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.attachments.models.attachments import Attachment
from indico.modules.files.models.files import File
@celery.task(ignore_result=False)
def generate_materials_package(attachment_ids, event):
from indico.modules.attachments.controllers.event_package import AttachmentPackageGeneratorMixin
attachments = Attachment.query.filter(Attachment.id.in_(attachment_ids)).all()
attachment_package_mixin = AttachmentPackageGeneratorMixin()
attachment_package_mixin.event = event
generated_zip = attachment_package_mixin._generate_zip_file(attachments, return_file=True)
f = File(filename='material-package.zip', content_type='application/zip', meta={'event_id': event.id})
context = ('event', event.id, 'attachment-package')
f.save(context, generated_zip)
db.session.add(f)
db.session.commit()
os.unlink(generated_zip.name)
return f.signed_download_url
|
Delete material package temp file after creation
|
Delete material package temp file after creation
|
Python
|
mit
|
ThiefMaster/indico,ThiefMaster/indico,DirkHoffmann/indico,pferreir/indico,indico/indico,pferreir/indico,indico/indico,indico/indico,pferreir/indico,pferreir/indico,DirkHoffmann/indico,DirkHoffmann/indico,ThiefMaster/indico,DirkHoffmann/indico,indico/indico,ThiefMaster/indico
|
d3751ef64869ad37f8802eca933e0995773175a8
|
16/016_power_digit_sum.py
|
16/016_power_digit_sum.py
|
"""Power Digit Sum
2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
What is the sum of the digits of the number 21000?
"""
assert sum(int(x) for x in str(2 ** 1000)) == 1366
|
"""Power Digit Sum
2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
What is the sum of the digits of the number 21000?
"""
assert sum(int(x) for x in str(2 ** 1000)) == 1366
|
Remove redundant empty line at end of file
|
Remove redundant empty line at end of file
There is no need to have multiple empty lines in the end.
|
Python
|
mit
|
the-gigi/project-euler,the-gigi/project-euler,the-gigi/project-euler
|
b0e3e93c3af70e42edf110e09039815575195c00
|
packages/dependencies/spirv_tools.py
|
packages/dependencies/spirv_tools.py
|
{
'repo_type' : 'git',
'rename_folder' : 'spirv-tools',
'url' : 'https://github.com/KhronosGroup/SPIRV-Tools.git',
'branch' : 'aa270e568a3fd637f4a329611107b384a8023195',
'needs_make' : False,
'needs_make_install' : False,
'needs_configure' : False,
'recursive_git' : True,
'_info' : { 'version' : None, 'fancy_name' : 'SPIRV Tools' },
}
|
{
'repo_type' : 'git',
'rename_folder' : 'spirv-tools',
'url' : 'https://github.com/KhronosGroup/SPIRV-Tools.git',
'needs_make' : False,
'needs_make_install' : False,
'needs_configure' : False,
'recursive_git' : True,
'_info' : { 'version' : None, 'fancy_name' : 'SPIRV Tools' },
}
|
Revert "packages/spirvtools: stick to last working commit"
|
Revert "packages/spirvtools: stick to last working commit"
This reverts commit cbaac43c95226b4ca5c9c1478467069966e9ef35.
|
Python
|
mpl-2.0
|
DeadSix27/python_cross_compile_script
|
12914961c0c2851dd720e84ff811389b1cd936dd
|
wsgi.py
|
wsgi.py
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
if __name__ == '__main__':
app.run()
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
if __name__ == '__main__':
socketio.run(app)
|
Change to socketio.run() so WebSockets work on local runs.
|
[NG] Change to socketio.run() so WebSockets work on local runs.
|
Python
|
mit
|
SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev
|
e050864d333e4b332a21671cb5f08f2ffd9172fe
|
pipeline/archivebot/shared_config.py
|
pipeline/archivebot/shared_config.py
|
import os
import yaml
def config():
my_dir = os.path.dirname(__file__)
config_file = os.path.join(my_dir, '../../lib/shared_config.yml')
with open(config_file, 'r') as f:
return yaml.load(f.read())
def log_channel():
c = config()
return c['channels']['log']
def pipeline_channel():
c = config()
return c['channels']['pipeline']
def job_channel(ident):
return '%s%s' % (job_channel_prefix(), ident)
def job_channel_prefix():
c = config()
return c['channels']['job_prefix']
# vim:ts=4:sw=4:et:tw=78
|
import os
import yaml
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
def config():
my_dir = os.path.dirname(__file__)
config_file = os.path.join(my_dir, '../../lib/shared_config.yml')
with open(config_file, 'r') as f:
return yaml.load(f.read(), Loader = Loader)
def log_channel():
c = config()
return c['channels']['log']
def pipeline_channel():
c = config()
return c['channels']['pipeline']
def job_channel(ident):
return '%s%s' % (job_channel_prefix(), ident)
def job_channel_prefix():
c = config()
return c['channels']['job_prefix']
# vim:ts=4:sw=4:et:tw=78
|
Fix compatibility with PyYAML 6.0 (mandatory `Loader`)
|
Fix compatibility with PyYAML 6.0 (mandatory `Loader`)
|
Python
|
mit
|
ArchiveTeam/ArchiveBot,ArchiveTeam/ArchiveBot,ArchiveTeam/ArchiveBot,ArchiveTeam/ArchiveBot,ArchiveTeam/ArchiveBot
|
bb3d2927437a51d8144ec398085876bc3dedb5f6
|
project_generator/commands/clean.py
|
project_generator/commands/clean.py
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by progen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", help="Clean project files")
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by progen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", help="Clean project files for this tool")
|
Clean command - tool help fix
|
Clean command - tool help fix
|
Python
|
apache-2.0
|
sarahmarshy/project_generator,ohagendorf/project_generator,0xc0170/project_generator,project-generator/project_generator
|
d198b8d92ec42f8e5fc995f59c8776044b8950e8
|
analysis/data_process/uk_2017/config.py
|
analysis/data_process/uk_2017/config.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
count_na = True
normalise = False
class PlottingConfig(CleaningConfig):
plot_na = False
normalise = True
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
allow_errors = True
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './'}}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
count_na = True
normalise = False
class PlottingConfig(CleaningConfig):
plot_na = False
normalise = True
# Different than normalise, add a freq_table with percentage
# in addition of the table with counts
show_percent = True
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
allow_errors = True
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './'}}
|
Add the option for showing percent rather than count in plots
|
Add the option for showing percent rather than count in plots
|
Python
|
bsd-3-clause
|
softwaresaved/international-survey
|
2762599917362bc621e84a17ae922588ad4296ae
|
saleor/order/urls.py
|
saleor/order/urls.py
|
from django.conf.urls import patterns, url
from . import views
TOKEN_PATTERN = ('(?P<token>[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}'
'-[0-9a-z]{12})')
urlpatterns = patterns(
'',
url(r'^%s/$' % TOKEN_PATTERN, views.details, name='details'),
url(r'^%s/payment/(?P<variant>[a-z-]+)/$' % TOKEN_PATTERN,
views.start_payment, name='payment'),
url(r'^%s/cancel-payment/$' % TOKEN_PATTERN, views.cancel_payment,
name='cancel-payment'))
|
from django.conf.urls import patterns, url
from . import views
TOKEN_PATTERN = ('(?P<token>[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}'
'-[0-9a-z]{12})')
urlpatterns = patterns(
'',
url(r'^%s/$' % TOKEN_PATTERN, views.details, name='details'),
url(r'^%s/payment/(?P<variant>[-\w]+)/$' % TOKEN_PATTERN,
views.start_payment, name='payment'),
url(r'^%s/cancel-payment/$' % TOKEN_PATTERN, views.cancel_payment,
name='cancel-payment'))
|
Add all words and "-" to payment url
|
Add all words and "-" to payment url
|
Python
|
bsd-3-clause
|
laosunhust/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,HyperManTT/ECommerceSaleor,hongquan/saleor,KenMutemi/saleor,josesanch/saleor,rodrigozn/CW-Shop,Drekscott/Motlaesaleor,laosunhust/saleor,KenMutemi/saleor,KenMutemi/saleor,taedori81/saleor,paweltin/saleor,rchav/vinerack,avorio/saleor,itbabu/saleor,UITools/saleor,rchav/vinerack,maferelo/saleor,Drekscott/Motlaesaleor,Drekscott/Motlaesaleor,car3oon/saleor,spartonia/saleor,UITools/saleor,mociepka/saleor,arth-co/saleor,HyperManTT/ECommerceSaleor,laosunhust/saleor,UITools/saleor,jreigel/saleor,hongquan/saleor,laosunhust/saleor,arth-co/saleor,rodrigozn/CW-Shop,arth-co/saleor,taedori81/saleor,avorio/saleor,paweltin/saleor,paweltin/saleor,car3oon/saleor,tfroehlich82/saleor,avorio/saleor,car3oon/saleor,taedori81/saleor,mociepka/saleor,tfroehlich82/saleor,hongquan/saleor,avorio/saleor,josesanch/saleor,josesanch/saleor,maferelo/saleor,spartonia/saleor,dashmug/saleor,UITools/saleor,dashmug/saleor,spartonia/saleor,UITools/saleor,rodrigozn/CW-Shop,dashmug/saleor,Drekscott/Motlaesaleor,taedori81/saleor,jreigel/saleor,paweltin/saleor,mociepka/saleor,jreigel/saleor,rchav/vinerack,spartonia/saleor,arth-co/saleor,tfroehlich82/saleor,itbabu/saleor
|
da22196a8167a57c5edf39578ceece4efd8cfd63
|
app/views.py
|
app/views.py
|
from app import app
from flask import make_response
@app.route('/')
@app.route('/index')
def index():
return make_response(open(app.root_path + '/templates/index.html').read())
|
from flask import render_template
from app import app
@app.route('/')
@app.route('/index')
def index():
user = { 'nickname': 'Marvolo' } # fake user
posts = [ # fake array of posts
{
'author': { 'nickname': 'John' },
'body': 'Beautiful day in Portland!'
},
{
'author': { 'nickname': 'Susan' },
'body': 'The Avengers movie was so cool!'
}
]
return render_template("index.html",
title = 'Home',
user = user,
posts = posts)
|
Set up mock data for index
|
Set up mock data for index
|
Python
|
apache-2.0
|
happyraul/tv
|
158a2ffd17cf59a87092581dc0ce40cf0b82aa85
|
bigbuild/management/commands/publish.py
|
bigbuild/management/commands/publish.py
|
from django.conf import settings
from bakery.management.commands.publish import Command as BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
# When in BIGBUILD_BRANCH_BUILD don't delete because we'll be syncing
# a different subdirectory for each one of our git branches
if settings.BIGBUILD_BRANCH_BUILD:
options['no_delete'] = True
super(Command, self).handle(*args, **options)
|
from django.conf import settings
from bakery.management.commands.publish import Command as BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
# When in BIGBUILD_BRANCH_BUILD don't delete because we'll be syncing
# a different subdirectory for each one of our git branches
if getattr(settings, 'BIGBUILD_BRANCH_BUILD', False):
options['no_delete'] = True
super(Command, self).handle(*args, **options)
|
Make an attribute call a little smarter
|
Make an attribute call a little smarter
|
Python
|
mit
|
datadesk/django-bigbuild,datadesk/django-bigbuild,datadesk/django-bigbuild
|
939c5fd069fafbe353fc9a209d2bd376e8d9bbd6
|
gridded/gridded.py
|
gridded/gridded.py
|
class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, *args, **kwargs):
for go in cls._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(*args, **kwargs):
return go(*args, **kwargs)
|
Fix self- > cls, make super generic (no `nc`)
|
Fix self- > cls, make super generic (no `nc`)
|
Python
|
mit
|
pyoceans/gridded
|
d2adf86767857e9b57527c3db1d720b1f8f086a2
|
openedx/stanford/djangoapps/register_cme/admin.py
|
openedx/stanford/djangoapps/register_cme/admin.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
"""
Admin interface for ExtraInfo model.
"""
readonly_fields = (
'user',
)
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
|
Make `user` field read-only in `ExtraInfo` detail
|
Make `user` field read-only in `ExtraInfo` detail
Previously, server would hang trying to load an `ExtraInfo` detail page,
because the `user` field was rendering as a dropdown select menu loading
all users in the system. We fix this by making the field read-only.
|
Python
|
agpl-3.0
|
caesar2164/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform
|
cf8621affe1e654bf5ec391d80f260cbce83445b
|
cli/cli.py
|
cli/cli.py
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
import argparse
import os
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='%(prog)s 0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_action', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('db_name', help='Name of database where each database corresponds to a course offering')
run_parser.add_argument('run_action', help='Run analytics based on argument', nargs='?', default='basic')
run_parser.add_argument('-o', '--output', help='Path to directory to save CSV report (defaults to current directory: %(default)s)', default=os.getcwd())
run_parser.add_argument('-j', '--json', help='Path to JSON file that may be needed for some analytics commands')
run_parser.add_argument('-c', '--csv', help='Path to CSV file that may be needed for some analytics commands')
run_parser.add_argument('-p', '--problem-id', help='Course specifc problem ID that may be needed for some analytics commands')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
Define more arguments for command line parser
|
Define more arguments for command line parser
|
Python
|
mit
|
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
|
749650a56347dad48bb2eabff139646ecf5c98d0
|
feedbot/__init__.py
|
feedbot/__init__.py
|
_author__ = 'Liav Koren'
__email__ = '[email protected]'
__version__ = '0.1.0rc1'
|
_author__ = 'Liav Koren'
__email__ = '[email protected]'
__version__ = '0.1.1'
|
Bump minor point version to 0.1.1
|
Bump minor point version to 0.1.1
|
Python
|
apache-2.0
|
liavkoren/feedbot
|
df1e5be22cd4c7cb95952c4419defeab0eb284a4
|
instance/config.py
|
instance/config.py
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
Revert test db to postgress
|
Revert test db to postgress
|
Python
|
mit
|
Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api
|
adfbd9b192316bc527259a1c0a01db0a8dbd5f3e
|
examples/rmg/liquid_phase/input.py
|
examples/rmg/liquid_phase/input.py
|
# Data sources
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'],
kineticsEstimator = 'rate rules',
)
# List of species
species(
label='octane',
reactive=True,
structure=SMILES("C(CCCCC)CC"),
)
species(
label='oxygen',
reactive=True,
structure=SMILES("[O][O]"),
)
# Reaction systems
liquidReactor(
temperature=(500,'K'),
initialConcentrations={
"octane": (6.154e-3,'mol/cm^3'),
"oxygen": (4.953e-6,'mol/cm^3')
},
terminationConversion={
'octane': 0.9,
},
terminationTime=(1e6,'s'),
)
solvation(
solvent='octane'
)
simulator(
atol=1e-16,
rtol=1e-8,
)
model(
toleranceKeepInEdge=1E-9,
toleranceMoveToCore=0.001,
toleranceInterruptSimulation=0.1,
maximumEdgeSpecies=100000
)
options(
units='si',
saveRestartPeriod=None,
drawMolecules=False,
generatePlots=False,
saveConcentrationProfiles=True,
)
|
# Data sources
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'],
kineticsEstimator = 'rate rules',
)
# List of species
species(
label='octane',
reactive=True,
structure=SMILES("C(CCCCC)CC"),
)
species(
label='oxygen',
reactive=True,
structure=SMILES("[O][O]"),
)
# Reaction systems
liquidReactor(
temperature=(500,'K'),
initialConcentrations={
"octane": (6.154e-3,'mol/cm^3'),
"oxygen": (4.953e-6,'mol/cm^3')
},
terminationTime=(5,'s'),
)
solvation(
solvent='octane'
)
simulator(
atol=1e-16,
rtol=1e-8,
)
model(
toleranceKeepInEdge=1E-9,
toleranceMoveToCore=0.001,
toleranceInterruptSimulation=0.1,
maximumEdgeSpecies=100000
)
options(
units='si',
saveRestartPeriod=None,
drawMolecules=False,
generatePlots=False,
saveConcentrationProfiles=True,
)
|
Change termination criteria for liquid phase examples to attainable value.
|
Change termination criteria for liquid phase examples to attainable value.
Conversion of octane was stuck at 8e-3 for quite some time...
|
Python
|
mit
|
nyee/RMG-Py,faribas/RMG-Py,enochd/RMG-Py,faribas/RMG-Py,comocheng/RMG-Py,chatelak/RMG-Py,pierrelb/RMG-Py,pierrelb/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,enochd/RMG-Py,nyee/RMG-Py,KEHANG/RMG-Py,KEHANG/RMG-Py,nickvandewiele/RMG-Py,comocheng/RMG-Py
|
e4e38ecd09b4c96e5b801b1bc9f7a943934c6485
|
cobertura_clover_transform/converter.py
|
cobertura_clover_transform/converter.py
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
args = parser.parse_args()
converted = convert(args.coverage_xml)
print(converted)
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
parser.add_argument('-o', '--output', required=False)
args = parser.parse_args()
converted = convert(args.coverage_xml)
if args.output:
with open(args.output, 'w') as out:
out.write(converted)
else:
print(converted)
|
Add support for output to a file
|
Add support for output to a file
|
Python
|
mit
|
cwacek/cobertura-clover-transform
|
270e222301cf8c61e7632b366fba349552356928
|
services/__init__.py
|
services/__init__.py
|
#!/usr/bin/env python
import os
import glob
__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")]
class InvalidConfigException(Exception):
pass
class PluginMount(type):
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'plugins'):
# This branch only executes when processing the mount point itself.
# So, since this is a new plugin type, not an implementation, this
# class shouldn't be registered as a plugin. Instead, it sets up a
# list where plugins can be registered later.
cls.plugins = []
else:
# This must be a plugin implementation, which should be registered.
# Simply appending it to the list is all that's needed to keep
# track of it later.
cls.plugins.append(cls)
class Service:
"""
The constructor is passed a dictionary containing the configuration
options for the service
"""
__metaclass__ = PluginMount
def verify_up(self):
"""Returns a boolean representing whether the service is up or not"""
pass
def owner(self):
"""Returns a string containing the name of the owning team/player"""
pass
|
#!/usr/bin/env python
import os
import glob
__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")]
class InvalidConfigException(Exception):
pass
class PluginMount(type):
def __init__(cls, name, bases, attrs):
if not hasattr(cls, 'plugins'):
# This branch only executes when processing the mount point itself.
# So, since this is a new plugin type, not an implementation, this
# class shouldn't be registered as a plugin. Instead, it sets up a
# list where plugins can be registered later.
cls.plugins = []
else:
# This must be a plugin implementation, which should be registered.
# Simply appending it to the list is all that's needed to keep
# track of it later.
cls.plugins.append(cls)
def get_plugin(cls, name):
"""Get an new plugin by name"""
for p in cls.plugins:
if p.name == name:
return p
return None
class Service:
"""
The constructor is passed a dictionary containing the configuration
options for the service.
All Services must specify the 'name' attribute.
"""
__metaclass__ = PluginMount
def verify_up(self):
"""Returns a boolean representing whether the service is up or not"""
pass
def owner(self):
"""Returns a string containing the name of the owning team/player"""
pass
|
Add get_plugin method to the plugin base.
|
Add get_plugin method to the plugin base.
|
Python
|
bsd-3-clause
|
vtcsec/wargame-scorer
|
3a3997b19966560b828efb1699ee29a58cacbfc8
|
spriteworld/configs/cobra/common.py
|
spriteworld/configs/cobra/common.py
|
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# python2 python3
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25, noise_scale=0.05)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
|
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# python2 python3
"""Shared definitions and methods across all COBRA tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from spriteworld import action_spaces
from spriteworld import renderers as spriteworld_renderers
def action_space():
return action_spaces.SelectMove(scale=0.25)
def renderers():
return {
'image':
spriteworld_renderers.PILRenderer(
image_size=(64, 64),
anti_aliasing=5,
color_to_rgb=spriteworld_renderers.color_maps.hsv_to_rgb,
)
}
|
Remove noise from default COBRA configs.
|
Remove noise from default COBRA configs.
PiperOrigin-RevId: 265733849
Change-Id: Ie0e7c0385497852fd85c769ee85c951542c14463
|
Python
|
apache-2.0
|
deepmind/spriteworld
|
6adfd6ee8f673a601a3d118a45d21d2941b1e0aa
|
buildlet/utils/hashutils.py
|
buildlet/utils/hashutils.py
|
import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s)
return m.hexdigest()
|
import hashlib
def hexdigest(strings):
m = hashlib.md5()
for s in strings:
m.update(s.encode())
return m.hexdigest()
|
Fix TypeError in Python 3
|
Fix TypeError in Python 3
|
Python
|
bsd-3-clause
|
tkf/buildlet
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.