commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
04c3cac3054626773bc0434453378cb295f7e38c
|
Add handling of invalid values
|
timtroendle/pytus2000
|
pytus2000/read.py
|
pytus2000/read.py
|
import pandas as pd
from .datadicts import diary
def read_diary_file(path_to_file):
return pd.read_csv(
path_to_file,
delimiter='\t',
converters=_column_name_to_type_mapping(diary),
low_memory=False # some columns seem to have mixed types
)
def _column_name_to_type_mapping(module):
mapping = {}
for member in module.Variable:
try:
module.__dict__[member.name]
mapping[member.name] = _enum_converter(module.__dict__[member.name])
except KeyError:
pass # nothing to do; there is no enum
return mapping
def _enum_converter(enumcls):
def enum_converter(value):
if value == ' ':
return None
else:
try:
value = enumcls(value)
except ValueError as ve:
print(ve)
return None
else:
return value
return enum_converter
|
import pandas as pd
from .datadicts import diary
def read_diary_file(path_to_file):
return pd.read_csv(
path_to_file,
delimiter='\t',
nrows=50,
converters=_column_name_to_type_mapping(diary),
low_memory=False # some columns seem to have mixed types
)
def _column_name_to_type_mapping(module):
mapping = {}
for member in module.Variable:
try:
module.__dict__[member.name]
mapping[member.name] = _enum_converter(module.__dict__[member.name])
except KeyError:
pass # nothing to do; there is no enum
return mapping
def _enum_converter(enumcls):
def enum_converter(value):
if value == ' ':
return None
else:
return enumcls(value)
return enum_converter
|
mit
|
Python
|
fd421a4c5f7cdacdc98aa049b4650c9d1d62267a
|
Fix some issues with open.
|
rec/grit,rec/grit
|
grit/command/Open.py
|
grit/command/Open.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import platform
import random
from grit import Call
from grit import Git
from grit import GitRoot
from grit import Settings
from grit.String import startswith
HELP = """
grit open [filename]
Open the filename as a Github URL in the browser.
Selects the first file that starts with filename. If filename is missing,
opens the current directory in the browser.
"""
"""
What should we be able to open?
* The current directory.
* A file.
* A found file.
in
* our repo
* the upstream repo
* some other repo.
And:
* A pull request.
* the pull request for this branch, if any.
"""
SAFE = True
_OPEN_COMMANDS = {
'Darwin': 'open',
'Linux': 'xdg-open',
}
_URL = 'https://github.com/{user}/{project}/tree/{branch}/{path}'
def open_url(branch, path,
project=Settings.PROJECT,
user=Settings.USER):
path = os.path.relpath(path, GitRoot.ROOT)
u = _URL.format(branch=branch, path=path, project=project, user=user)
Call.call('%s %s' % (_OPEN_COMMANDS[platform.system()], u))
def open(filename=''):
if not platform.system() in _OPEN_COMMANDS:
raise ValueError("Can't open a URL for platform.system() = " + plat)
branch = Git.branch()
full_path = os.getcwd()
if filename:
path, f = os.path.split(filename)
full_path = os.path.join(full_path, path)
if not os.path.exists(full_path):
raise ValueError("Path %s doesn't exist." % full_path)
if f:
for p in os.listdir(full_path):
if startswith(p, f):
full_path = os.path.join(full_path, p)
break
else:
raise ValueError("Can't find file matching " + filename)
open_url(branch=Git.branch(), path=full_path)
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import platform
import random
from grit import Call
from grit import Git
from grit import GitRoot
from grit import Settings
from grit.String import startswith
HELP = """
grit open [filename]
Open the filename as a Github URL in the browser.
Selects the first file that starts with filename. If filename is missing,
opens the current directory in the browser.
"""
"""
What should we be able to open?
* The current directory.
* A file.
* A found file.
in
* our repo
* the upstream repo
* some other repo.
And:
* A pull request.
* the pull request for this branch, if any.
"""
SAFE = True
_OPEN_COMMANDS = {
'Darwin': 'open',
'Linux': 'xdg-open',
}
_URL = 'https://github.com/{user}/{project}/tree/{branch}/{path}'
def open_url(url):
Call.call('%s %s' % (_OPEN_COMMANDS[platform.system()], url))
def open(filename=''):
if not platform.system() in _OPEN_COMMANDS:
raise ValueError("Can't open a URL for platform.system() = " + plat)
branch = Git.branch()
full_path = os.getcwd()
if filename:
path, f = os.path.split(filename)
full_path = os.path.join(full_path, path)
if not os.path.exists(full_path):
raise ValueError("Path %s doesn't exist." % full_path)
if f:
for p in os.listdir(full_path):
if startswith(p, f):
full_path = os.path.join(full_path, p)
break
else:
raise ValueError("Can't find file matching " + filename)
url = _URL.format(
branch=Git.branch(),
path=os.path.relpath(full_path, GitRoot.ROOT),
project=Settings.PROJECT,
user=Settings.USER)
open_url(url)
|
artistic-2.0
|
Python
|
dbde102d14632bbaef7d6319d0742ac2819d6e38
|
Implement the given spec.
|
m-lab/ooni-support,m-lab/ooni-support,hellais/ooni-support,hellais/ooni-support
|
mlab-ns-simulator/mlabsim/update.py
|
mlab-ns-simulator/mlabsim/update.py
|
"""
This approximates the mlab-ns slice information gathering. The actual
system uses nagios and we're not certain about the details. This much
simplified version is just a web URL anyone may PUT data into.
Warning: This doesn't have any security properties! We need a way to
prevent the addition of malicious entries.
"""
import logging
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
DBEntryNames = [
'city',
'country',
'fqdn',
'ip',
'port',
'site',
'tool_extra',
]
class UpdateResource (resource.Resource):
def __init__(self, db):
"""db is a dict which will be modified to map { fqdn -> other_details }"""
resource.Resource.__init__(self)
self._db = db
self._log = logging.getLogger(type(self).__name__)
def render_PUT(self, request):
body = request.content.read()
self._log.debug('Request body: %r', body)
try:
dbentry = json.loads(body)
except ValueError:
self._send_response(request, 400, 'invalid', 'Malformed JSON body.')
return NOT_DONE_YET
try:
fqdn = dbentry['fqdn']
except KeyError:
self._send_response(request, 400, 'invalid', "Missing 'fqdn' field.")
return NOT_DONE_YET
self._db[fqdn] = dbentry
self._send_response(request, 200, 'ok', 'Ok.')
return NOT_DONE_YET
def _send_response(self, request, code, status, message):
request.setResponseCode(code, status)
request.setHeader('content-type', 'text/plain')
request.write(message)
request.finish()
|
"""
This approximates the mlab-ns slice information gathering. The actual
system uses nagios and we're not certain about the details. This much
simplified version is just a web URL anyone may PUT data into.
Warning: This doesn't have any security properties! We need a way to
prevent the addition of malicious entries.
"""
import logging
import json
from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
DBEntryNames = [
'city',
'country',
'fqdn',
'ip',
'port',
'site',
'tool_extra',
]
class UpdateResource (resource.Resource):
def __init__(self, db):
"""db is a dict which will be modified to map { fqdn -> other_details }"""
resource.Resource.__init__(self)
self._db = db
self._log = logging.getLogger(type(self).__name__)
def render_PUT(self, request):
body = request.content.read()
self._log.debug('Request body: %r', body)
try:
dbentry = json.loads(body)
except ValueError:
request.setResponseCode(400, 'invalid')
request.finish()
return NOT_DONE_YET
fqdn = dbentry['fqdn']
self._db[fqdn] = dbentry
request.setResponseCode(200, 'ok')
request.finish()
return NOT_DONE_YET
|
apache-2.0
|
Python
|
3ab50fb563a89449af94af0d870c6a6153afdb98
|
reformat as pep8
|
CodeforChemnitz/BaustellenChemnitz
|
helper/listConcat.py
|
helper/listConcat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class listConcat:
lists = None
def add(self, listToAdd):
if self.lists == None:
self.lists = [listToAdd]
return
for i, l in enumerate(self.lists):
if l[0] == listToAdd[-1]:
self.lists[i] = listToAdd[:-1] + l
return
elif l[-1] == listToAdd[0]:
self.lists[i] = l + listToAdd[1:]
return
elif l[0] == listToAdd[0]:
listToAdd.reverse()
self.lists[i] = listToAdd + l[1:]
return
elif l[-1] == listToAdd[-1]:
listToAdd.reverse()
self.lists[i] = l[:-1] + listToAdd
return
self.lists.append(listToAdd)
def get(self):
return self.lists
def testIt():
"""concats lists
>>> a = listConcat()
>>> a.get()
>>> a.add([1,2,3])
>>> a.get()
[[1, 2, 3]]
>>> a.add([-1,4,1])
>>> a.get()
[[-1, 4, 1, 2, 3]]
>>> a.add([3,5])
>>> a.get()
[[-1, 4, 1, 2, 3, 5]]
>>> a.add([2,5])
>>> a.get()
[[-1, 4, 1, 2, 3, 5, 2]]
>>> a.add([-1,7])
>>> a.get()
[[7, -1, 4, 1, 2, 3, 5, 2]]
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class listConcat:
lists = None
def add(self, listToAdd):
if self.lists == None:
self.lists = [listToAdd]
return
for i, l in enumerate(self.lists):
if l[0] == listToAdd[-1]:
self.lists[i] = listToAdd[:-1] + l
return
elif l[-1] == listToAdd[0]:
self.lists[i] = l + listToAdd[1:]
return
elif l[0] == listToAdd[0]:
listToAdd.reverse()
self.lists[i] = listToAdd + l[1:]
return
elif l[-1] == listToAdd[-1]:
listToAdd.reverse()
self.lists[i] = l[:-1] + listToAdd
return
self.lists.append(listToAdd)
def get(self):
return self.lists
def testIt():
"""concats lists
>>> a = listConcat()
>>> a.get()
>>> a.add([1,2,3])
>>> a.get()
[[1, 2, 3]]
>>> a.add([-1,4,1])
>>> a.get()
[[-1, 4, 1, 2, 3]]
>>> a.add([3,5])
>>> a.get()
[[-1, 4, 1, 2, 3, 5]]
>>> a.add([2,5])
>>> a.get()
[[-1, 4, 1, 2, 3, 5, 2]]
>>> a.add([-1,7])
>>> a.get()
[[7, -1, 4, 1, 2, 3, 5, 2]]
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
mit
|
Python
|
e4be9429e050dae6b1c9e988fa3da3c3e9d1d417
|
Add bots root directory to parent.py
|
mvollmer/cockpit,mvollmer/cockpit,cockpit-project/cockpit,martinpitt/cockpit,martinpitt/cockpit,garrett/cockpit,garrett/cockpit,cockpit-project/cockpit,cockpit-project/cockpit,martinpitt/cockpit,mvollmer/cockpit,martinpitt/cockpit,garrett/cockpit,garrett/cockpit,mvollmer/cockpit,cockpit-project/cockpit,mvollmer/cockpit,cockpit-project/cockpit,garrett/cockpit,martinpitt/cockpit
|
test/common/parent.py
|
test/common/parent.py
|
import os
import sys
TEST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BOTS_DIR = os.path.join(os.path.dirname(TEST_DIR), "bots")
sys.path.append(os.path.join(BOTS_DIR)) # for lib
sys.path.append(os.path.join(BOTS_DIR, "machine"))
sys.path.append(os.path.join(TEST_DIR, "common"))
|
import os
import sys
TEST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BOTS_DIR = os.path.join(os.path.dirname(TEST_DIR), "bots")
sys.path.append(os.path.join(BOTS_DIR, "machine"))
sys.path.append(os.path.join(TEST_DIR, "common"))
|
lgpl-2.1
|
Python
|
826f23f0fc7eea4c72dcc26f637f3752bee51b47
|
Allow tests to be called from parent directory of "test"
|
kanzure/ctypesgen,kanzure/ctypesgen,novas0x2a/ctypesgen,davidjamesca/ctypesgen,kanzure/ctypesgen
|
test/ctypesgentest.py
|
test/ctypesgentest.py
|
import optparse, sys, StringIO
sys.path.append(".") # Allow tests to be called from parent directory with Python 2.6
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
import optparse, sys, StringIO
sys.path.append("..")
import ctypesgencore
"""ctypesgentest is a simple module for testing ctypesgen on various C constructs. It consists of a
single function, test(). test() takes a string that represents a C header file, along with some
keyword arguments representing options. It processes the header using ctypesgen and returns a tuple
containing the resulting module object and the output that ctypesgen produced."""
def test(header, **more_options):
assert isinstance(header, str)
file("temp.h","w").write(header)
options = ctypesgencore.options.get_default_options()
options.headers = ["temp.h"]
for opt in more_options:
setattr(options, opt, more_options[opt])
# Redirect output
sys.stdout = StringIO.StringIO()
# Step 1: Parse
descriptions=ctypesgencore.parser.parse(options.headers,options)
# Step 2: Process
ctypesgencore.processor.process(descriptions,options)
# Step 3: Print
ctypesgencore.printer.WrapperPrinter("temp.py",options,descriptions)
# Un-redirect output
output = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = sys.__stdout__
# Load the module we have just produced
module = __import__("temp")
return module, output
|
bsd-3-clause
|
Python
|
bd7a1f8fe5237efc0de9fd60ddc035cc4be620ca
|
Update path_helper.py
|
conversationai/unintended-ml-bias-analysis,conversationai/unintended-ml-bias-analysis
|
unintended_ml_bias/new_madlibber/path_helper.py
|
unintended_ml_bias/new_madlibber/path_helper.py
|
import os
class PathHelper(object):
def __init__(self, word_file, sentence_template_file, output_file):
if not os.path.exists(word_file):
raise IOError("Input word file '{}' does not exist!".format(word_file))
if not os.path.isfile(word_file):
raise IOError("Input word file '{}' is not a file!".format(word_file))
self.word_file = word_file
if not os.path.exists(sentence_template_file):
raise IOError("Input sentence template file '{}' does not exist!".format(
sentence_template_file))
if not os.path.isfile(sentence_template_file):
raise IOError("Input sentence template file '{}' is not a file!".format(
sentence_template_file))
self.sentence_template_file = sentence_template_file
if not os.path.basename(output_file):
raise IOError(
"Output file '{}' cannot be a directory.".format(output_file))
output_dirname = os.path.dirname(output_file)
if not os.path.exists(output_dirname):
print("Output directory '{}' does not exist...creating".format(
output_dirname))
os.makedirs(output_dirname)
self.output_file = output_file
|
import os
class PathHelper(object):
def __init__(self, word_file, sentence_template_file, output_file):
if not os.path.exists(word_file):
raise IOError("Input word file '{}' does not exist!".format(word_file))
if not os.path.isfile(word_file):
raise IOError("Input word file '{}' is not a file!".format(word_file))
self.word_file = word_file
if not os.path.exists(sentence_template_file):
raise IOError("Input sentence template file '{}' does not exist!".format(sentence_template_file))
if not os.path.isfile(sentence_template_file):
raise IOError("Input sentence template file '{}' is not a file!".format(sentence_template_file))
self.sentence_template_file = sentence_template_file
if os.path.basename(output_file) == '':
raise IOError("Output file '{}' cannot be a directory.".format(output_file))
output_dirname = os.path.dirname(output_file)
if not os.path.exists(output_dirname):
print("Output directory '{}' does not exist...creating".format(output_dirname))
os.makedirs(output_dirname)
self.output_file = output_file
|
apache-2.0
|
Python
|
3d4afd579bdd690c9fba94ee96e52257bf4d79d2
|
copy production procfile
|
battlemidget/juju-charm-huginn
|
reactive/huginn.py
|
reactive/huginn.py
|
from charms.reactive import (
hook,
when,
only_once,
is_state
)
import os.path as path
from charmhelpers.core import hookenv, host
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from shell import shell
# ./lib/nginxlib
import nginxlib
# ./lib/rubylib
from rubylib import ruby_dist_dir, bundle
# ./lib/huginnlib.py
import huginnlib
config = hookenv.config()
# HOOKS -----------------------------------------------------------------------
@hook('config-changed')
def config_changed():
if not is_state('nginx.available'):
return
host.service_restart('nginx')
hookenv.status_set('active', 'Ready')
# REACTORS --------------------------------------------------------------------
@when('nginx.available')
@only_once
def install_app():
""" Performs application installation
"""
hookenv.log('Installing Huginn', 'info')
# Configure NGINX vhost
nginxlib.configure_site('default', 'vhost.conf',
app_path=ruby_dist_dir())
# Update application
huginnlib.download_archive()
shell("mkdir -p %s/{log,tmp/pids,tmp/sockets}" % (ruby_dist_dir()))
shell("cp %(dir)s/config/unicorn.rb.example "
"%(dir)s/config/unicorn.rb" % {'dir': ruby_dist_dir()})
bundle("install --deployment --without development test")
procfile = path.join(hookenv.charm_dir(), 'templates/Procfile')
shell("cp %(procfile)s %(dir)s/Procfile" % {
'procfile': procfile,
'dir': ruby_dist_dir()
})
bundle("exec rake assets:precompile RAILS_ENV=production")
host.service_restart('nginx')
hookenv.status_set('active', 'Huginn is installed!')
@when('nginx.available', 'database.available')
def setup_mysql(mysql):
""" Mysql is available, update Huginn
"""
hookenv.status_set('maintenance', 'Huginn is connecting to MySQL!')
target = path.join(ruby_dist_dir(), '.env')
render(source='application.env',
target=target,
context=dict(db=mysql))
bundle("exec rake db:create RAILS_ENV=production")
bundle("exec rake db:migrate RAILS_ENV=production")
bundle("exec rake db:seed RAILS_ENV=production")
host.service_restart('nginx')
hookenv.status_set('active', 'Ready')
|
from charms.reactive import (
hook,
when,
only_once,
is_state
)
import os.path as path
from charmhelpers.core import hookenv, host
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from shell import shell
# ./lib/nginxlib
import nginxlib
# ./lib/rubylib
from rubylib import ruby_dist_dir, bundle
# ./lib/huginnlib.py
import huginnlib
config = hookenv.config()
# HOOKS -----------------------------------------------------------------------
@hook('config-changed')
def config_changed():
if not is_state('nginx.available'):
return
host.service_restart('nginx')
hookenv.status_set('active', 'Ready')
# REACTORS --------------------------------------------------------------------
@when('nginx.available')
@only_once
def install_app():
""" Performs application installation
"""
hookenv.log('Installing Huginn', 'info')
# Configure NGINX vhost
nginxlib.configure_site('default', 'vhost.conf',
app_path=ruby_dist_dir())
# Update application
huginnlib.download_archive()
shell("mkdir -p %s/{log,tmp/pids,tmp/sockets}" % (ruby_dist_dir()))
shell("cp %(dir)s/config/unicorn.rb.example "
"%(dir)s/config/unicorn.rb" % {'dir': ruby_dist_dir()})
bundle("install --deployment --without development test")
bundle("exec rake assets:precompile RAILS_ENV=production")
host.service_restart('nginx')
hookenv.status_set('active', 'Huginn is installed!')
@when('nginx.available', 'database.available')
def setup_mysql(mysql):
""" Mysql is available, update Huginn
"""
hookenv.status_set('maintenance', 'Huginn is connecting to MySQL!')
target = path.join(ruby_dist_dir(), '.env')
render(source='application.env',
target=target,
context=dict(db=mysql))
bundle("exec rake db:create RAILS_ENV=production")
bundle("exec rake db:migrate RAILS_ENV=production")
bundle("exec rake db:seed RAILS_ENV=production")
host.service_restart('nginx')
hookenv.status_set('active', 'Ready')
|
mit
|
Python
|
86216b39365a7877103dfe075bf8e08a8ce696d0
|
bump version
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
radar/__init__.py
|
radar/__init__.py
|
__version__ = '2.47.23'
|
__version__ = '2.47.22'
|
agpl-3.0
|
Python
|
3bfcc096acd5f3ed0cda2427bdc5177bd3e55dd7
|
bump version
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
radar/__init__.py
|
radar/__init__.py
|
__version__ = '2.46.26'
|
__version__ = '2.46.25'
|
agpl-3.0
|
Python
|
d94e862d5775ecedf49fb0e15820b4744573c24c
|
bump to 1.1.1
|
rubik/radon
|
radon/__init__.py
|
radon/__init__.py
|
'''This module contains the main() function, which is the entry point for the
command line interface.'''
__version__ = '1.1.1'
def main():
'''The entry point for Setuptools.'''
import sys
from radon.cli import program, log_error
if not sys.argv[1:]:
sys.argv.append('-h')
try:
program()
except Exception as e:
log_error(e)
if __name__ == '__main__':
main()
|
'''This module contains the main() function, which is the entry point for the
command line interface.'''
__version__ = '1.1'
def main():
'''The entry point for Setuptools.'''
import sys
from radon.cli import program, log_error
if not sys.argv[1:]:
sys.argv.append('-h')
try:
program()
except Exception as e:
log_error(e)
if __name__ == '__main__':
main()
|
mit
|
Python
|
370d58420c48ed5291fb3291a3f89449b2fb5230
|
Add description to update-production script
|
muzhack/musitechhub,muzhack/muzhack,muzhack/musitechhub,muzhack/muzhack,muzhack/muzhack,muzhack/musitechhub,muzhack/musitechhub,muzhack/muzhack
|
docker/update-production.py
|
docker/update-production.py
|
#!/usr/bin/env python3
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser(description='Update production server to latest Docker image.')
args = parser.parse_args()
def _info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
def _run_tutum(args):
try:
subprocess.check_call(['tutum',] + args, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as err:
sys.stderr.write('{}\n'.format(err))
sys.exit(1)
_info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
_info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
_info('Redeploying service \'{}\'...'.format(link_to))
_run_tutum(['service', 'redeploy', '--sync', link_to,])
_info('Linking to service \'{}\'...'.format(link_to))
_run_tutum(['service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb.muzhack-staging',])
_info('Successfully switched production service to {}'.format(link_to))
|
#!/usr/bin/env python3
import argparse
import subprocess
import json
import sys
parser = argparse.ArgumentParser()
args = parser.parse_args()
def _info(msg):
sys.stdout.write('* {}\n'.format(msg))
sys.stdout.flush()
def _run_tutum(args):
try:
subprocess.check_call(['tutum',] + args, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as err:
sys.stderr.write('{}\n'.format(err))
sys.exit(1)
_info('Determining current production details...')
output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode(
'utf-8')
data = json.loads(output)
linked_service = data['linked_to_service'][0]['name']
_info('Currently linked service is \'{}\''.format(linked_service))
if linked_service == 'muzhack-green':
link_to = 'muzhack-blue'
else:
assert linked_service == 'muzhack-blue'
link_to = 'muzhack-green'
_info('Redeploying service \'{}\'...'.format(link_to))
_run_tutum(['service', 'redeploy', '--sync', link_to,])
_info('Linking to service \'{}\'...'.format(link_to))
_run_tutum(['service', 'set', '--link-service', '{0}:{0}'.format(link_to),
'--sync', 'lb.muzhack-staging',])
_info('Successfully switched production service to {}'.format(link_to))
|
mit
|
Python
|
f4a8121bf38cdd8dea4a828316dc1c117c5ea0f3
|
update West Devon import script for parl.2017-06-08 (closes #902)
|
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations
|
polling_stations/apps/data_collection/management/commands/import_west_devon.py
|
polling_stations/apps/data_collection/management/commands/import_west_devon.py
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000047'
addresses_name = 'parl.2017-06-08/Version 2/merged.tsv'
stations_name = 'parl.2017-06-08/Version 2/merged.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000047'
addresses_name = 'Democracy_Club__04May2017 - west devon.TSV'
stations_name = 'Democracy_Club__04May2017 - west devon.TSV'
elections = [
'local.devon.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
|
bsd-3-clause
|
Python
|
7b1871b311aae41d699a41da7c6553b45a588313
|
purge wip about cassandra metrics (not-the-right-place)
|
SergioChan/Stream-Framework,turbolabtech/Stream-Framework,Anislav/Stream-Framework,izhan/Stream-Framework,turbolabtech/Stream-Framework,nikolay-saskovets/Feedly,smuser90/Stream-Framework,Anislav/Stream-Framework,nikolay-saskovets/Feedly,smuser90/Stream-Framework,turbolabtech/Stream-Framework,SergioChan/Stream-Framework,izhan/Stream-Framework,Anislav/Stream-Framework,nikolay-saskovets/Feedly,smuser90/Stream-Framework,SergioChan/Stream-Framework,turbolabtech/Stream-Framework,SergioChan/Stream-Framework,smuser90/Stream-Framework,izhan/Stream-Framework,nikolay-saskovets/Feedly,Anislav/Stream-Framework,izhan/Stream-Framework
|
feedly/storage/cassandra/models.py
|
feedly/storage/cassandra/models.py
|
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.exceptions import ValidationError
class VarInt(columns.Column):
db_type = 'varint'
def validate(self, value):
val = super(VarInt, self).validate(value)
if val is None:
return
try:
return long(val)
except (TypeError, ValueError):
raise ValidationError(
"{} can't be converted to integer value".format(value))
def to_python(self, value):
return self.validate(value)
def to_database(self, value):
return self.validate(value)
class BaseActivity(Model):
feed_id = columns.Ascii(primary_key=True, partition_key=True)
activity_id = VarInt(primary_key=True, clustering_order='desc')
class Activity(BaseActivity):
actor = columns.Integer(required=False)
extra_context = columns.Bytes(required=False)
object = columns.Integer(required=False)
target = columns.Integer(required=False)
time = columns.DateTime(required=False)
verb = columns.Integer(required=False)
class AggregatedActivity(BaseActivity):
activities = columns.Bytes(required=False)
created_at = columns.DateTime(required=False)
group = columns.Ascii(required=False)
updated_at = columns.DateTime(required=False)
|
from cqlengine import columns
from cqlengine.models import Model
from cqlengine.exceptions import ValidationError
class VarInt(columns.Column):
db_type = 'varint'
def validate(self, value):
val = super(VarInt, self).validate(value)
if val is None:
return
try:
return long(val)
except (TypeError, ValueError):
raise ValidationError(
"{} can't be converted to integer value".format(value))
def to_python(self, value):
return self.validate(value)
def to_database(self, value):
return self.validate(value)
class BaseActivity(Model):
feed_id = columns.Ascii(primary_key=True, partition_key=True)
activity_id = VarInt(primary_key=True, clustering_order='desc')
class Activity(BaseActivity):
actor = columns.Integer(required=False)
extra_context = columns.Bytes(required=False)
object = columns.Integer(required=False)
target = columns.Integer(required=False)
time = columns.DateTime(required=False)
verb = columns.Integer(required=False)
class AggregatedActivity(BaseActivity):
activities = columns.Bytes(required=False)
created_at = columns.DateTime(required=False)
group = columns.Ascii(required=False)
updated_at = columns.DateTime(required=False)
class FanoutStats(Model):
consumer_feed_id = columns.Ascii(primary_key=True, partition_key=True)
fanout_at = columns.DateTime(primary_key=True, partition_key=True)
date = columns.DateTime(primary_key=True, clustering_order='desc')
producer_feed_id = columns.Ascii()
activity_count = columns.Integer(default=1)
operation = columns.Ascii()
class ActivityStats(Model):
producer_feed_id = columns.Ascii(primary_key=True)
date = columns.DateTime(primary_key=True, partition_key=True)
activity_count = columns.Integer(default=1)
|
bsd-3-clause
|
Python
|
f7fc6556e3ef552ed570ad56db7dc3a19b3e75fd
|
Load config from site
|
Xi-Plus/Xiplus-Wikipedia-Bot,Xi-Plus/Xiplus-Wikipedia-Bot
|
fix-broken-double-redirect/edit.py
|
fix-broken-double-redirect/edit.py
|
# -*- coding: utf-8 -*-
import argparse
import json
import os
import re
os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__))
import pywikibot
from config import config_page_name # pylint: disable=E0611,W0614
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--check', action='store_true', dest='check')
parser.set_defaults(check=False)
args = parser.parse_args()
print(args)
os.environ['TZ'] = 'UTC'
site = pywikibot.Site()
site.login()
config_page = pywikibot.Page(site, config_page_name)
cfg = config_page.text
cfg = json.loads(cfg)
print(json.dumps(cfg, indent=4, ensure_ascii=False))
if not cfg['enable']:
exit('disabled\n')
cat = pywikibot.Page(site, cfg['csd_category'])
for sourcePage in site.categorymembers(cat):
print(sourcePage.title())
text = sourcePage.text
if '{{d|bot=Jimmy-bot|g15|' not in text:
print('\tnot g15')
continue
m = re.search(r'#(?:้ๅฎๅ|REDIRECT) ?\[\[(.+?)]]', text, flags=re.I)
if m:
middlePage = pywikibot.Page(site, m.group(1))
logs = list(site.logevents(page=middlePage, total=1))
if len(logs) == 0:
print('\tno logs')
continue
log = logs[0]
if log.type() != 'move':
print('\trecent log not move')
continue
targetPage = log.target_page
print('\ttarget', targetPage.title())
text = re.sub(r'^{{d\|bot=Jimmy-bot\|g15\|.+\n', '', text)
text = re.sub(r'(#(?:้ๅฎๅ|REDIRECT) ?\[\[).+?(]])', r'\g<1>{}\g<2>'.format(targetPage.title()), text)
pywikibot.showDiff(sourcePage.text, text)
summary = cfg['summary'].format(log.logid())
print(summary)
if args.check and input('Save?').lower() not in ['', 'y', 'yes']:
continue
sourcePage.text = text
sourcePage.save(summary=summary, minor=False, asynchronous=True)
else:
print('\tcannot get redirect target')
|
# -*- coding: utf-8 -*-
import argparse
import os
import re
os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__))
import pywikibot
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--check', action='store_true', dest='check')
parser.set_defaults(check=False)
args = parser.parse_args()
print(args)
os.environ['TZ'] = 'UTC'
site = pywikibot.Site()
site.login()
cat = pywikibot.Page(site, 'Category:ๅฟซ้ๅ ้คๅ้')
for sourcePage in site.categorymembers(cat):
print(sourcePage.title())
text = sourcePage.text
if '{{d|bot=Jimmy-bot|g15|' not in text:
print('\tnot g15')
continue
m = re.search(r'#(?:้ๅฎๅ|REDIRECT) ?\[\[(.+?)]]', text, flags=re.I)
if m:
middlePage = pywikibot.Page(site, m.group(1))
logs = list(site.logevents(page=middlePage, total=1))
if len(logs) == 0:
print('\tno logs')
continue
log = logs[0]
if log.type() != 'move':
print('\trecent log not move')
continue
targetPage = log.target_page
print('\ttarget', targetPage.title())
text = re.sub(r'^{{d\|bot=Jimmy-bot\|g15\|.+\n', '', text)
text = re.sub(r'(#(?:้ๅฎๅ|REDIRECT) ?\[\[).+?(]])', r'\g<1>{}\g<2>'.format(targetPage.title()), text)
pywikibot.showDiff(sourcePage.text, text)
summary = '-deleteไธฆไฟฎๅพฉๆๅฃ็้้้ๅฎๅ๏ผ[[Special:Redirect/logid/{}|็ฎๆจ้ ๅทฒ่ขซไธ็้ๅฎๅ็งปๅ]]๏ผ่ฅ่ช็บ้ๅฎๅไธๅ้ฉ่ซๆไบคๅญๅปข่จ่ซ'.format(log.logid())
print(summary)
if args.check and input('Save?').lower() not in ['', 'y', 'yes']:
continue
sourcePage.text = text
sourcePage.save(summary=summary, minor=False, asynchronous=True)
else:
print('\tcannot get redirect target')
|
mit
|
Python
|
5ec45f9d8a7b4c54ecca0ad48f244c2ab0b8d532
|
remove no longer necessary package declaration
|
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
|
src/zeit/content/cp/blocks/tests.py
|
src/zeit/content/cp/blocks/tests.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.blocks
import zeit.content.cp.testing
def test_suite():
return zeit.content.cp.testing.FunctionalDocFileSuite(
'teaser.txt',
'xml.txt')
|
# -*- coding: utf-8 -*-
# Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.blocks
import zeit.content.cp.testing
def test_suite():
return zeit.content.cp.testing.FunctionalDocFileSuite(
'teaser.txt',
'xml.txt',
package=zeit.content.cp.blocks)
|
bsd-3-clause
|
Python
|
69eafa95df4bdeb143d40c321f0a312d06efff1f
|
Add __all__ to segmentation package
|
Midafi/scikit-image,pratapvardhan/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,robintw/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,newville/scikit-image,ajaybhat/scikit-image,emon10005/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,chintak/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,rjeli/scikit-image,rjeli/scikit-image,chintak/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,michaelpacer/scikit-image,paalge/scikit-image,robintw/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,Britefury/scikit-image,newville/scikit-image,juliusbierk/scikit-image,almarklein/scikit-image,SamHames/scikit-image,almarklein/scikit-image,emon10005/scikit-image,keflavich/scikit-image,youprofit/scikit-image,juliusbierk/scikit-image,bennlich/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,SamHames/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,GaZ3ll3/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,Midafi/scikit-image,chintak/scikit-image,jwiggins/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image
|
skimage/segmentation/__init__.py
|
skimage/segmentation/__init__.py
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
__all__ = ['random_walker',
'felzenszwalb',
'slic',
'quickshift',
'find_boundaries',
'visualize_boundaries',
'mark_boundaries',
'clear_border',
'join_segmentations',
'relabel_from_one']
|
from .random_walker_segmentation import random_walker
from ._felzenszwalb import felzenszwalb
from ._slic import slic
from ._quickshift import quickshift
from .boundaries import find_boundaries, visualize_boundaries, mark_boundaries
from ._clear_border import clear_border
from ._join import join_segmentations, relabel_from_one
|
bsd-3-clause
|
Python
|
d59f3259875ffac49668ffb3ce34ca511385ebb7
|
Fix USE_X_FORWARDED_FOR for proxied environments
|
funkybob/django-rated
|
rated/settings.py
|
rated/settings.py
|
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)
|
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
|
bsd-3-clause
|
Python
|
b613ecdb3e543a4c39c5bd80359c81e504c1da33
|
add -mlong-calls to gcc compile parameter
|
gbcwbz/rt-thread,FlyLu/rt-thread,geniusgogo/rt-thread,AubrCool/rt-thread,wolfgangz2013/rt-thread,wolfgangz2013/rt-thread,RT-Thread/rt-thread,geniusgogo/rt-thread,nongxiaoming/rt-thread,AubrCool/rt-thread,zhaojuntao/rt-thread,geniusgogo/rt-thread,ArdaFu/rt-thread,wolfgangz2013/rt-thread,hezlog/rt-thread,zhaojuntao/rt-thread,FlyLu/rt-thread,nongxiaoming/rt-thread,gbcwbz/rt-thread,weety/rt-thread,geniusgogo/rt-thread,AubrCool/rt-thread,weiyuliang/rt-thread,RT-Thread/rt-thread,yongli3/rt-thread,yongli3/rt-thread,igou/rt-thread,igou/rt-thread,weety/rt-thread,armink/rt-thread,nongxiaoming/rt-thread,zhaojuntao/rt-thread,FlyLu/rt-thread,weiyuliang/rt-thread,weiyuliang/rt-thread,gbcwbz/rt-thread,AubrCool/rt-thread,geniusgogo/rt-thread,weety/rt-thread,yongli3/rt-thread,gbcwbz/rt-thread,igou/rt-thread,armink/rt-thread,hezlog/rt-thread,armink/rt-thread,hezlog/rt-thread,hezlog/rt-thread,weiyuliang/rt-thread,igou/rt-thread,hezlog/rt-thread,nongxiaoming/rt-thread,ArdaFu/rt-thread,weiyuliang/rt-thread,armink/rt-thread,AubrCool/rt-thread,zhaojuntao/rt-thread,yongli3/rt-thread,weety/rt-thread,ArdaFu/rt-thread,RT-Thread/rt-thread,weety/rt-thread,igou/rt-thread,hezlog/rt-thread,weety/rt-thread,zhaojuntao/rt-thread,nongxiaoming/rt-thread,nongxiaoming/rt-thread,FlyLu/rt-thread,gbcwbz/rt-thread,AubrCool/rt-thread,igou/rt-thread,RT-Thread/rt-thread,zhaojuntao/rt-thread,zhaojuntao/rt-thread,gbcwbz/rt-thread,yongli3/rt-thread,FlyLu/rt-thread,wolfgangz2013/rt-thread,ArdaFu/rt-thread,nongxiaoming/rt-thread,hezlog/rt-thread,AubrCool/rt-thread,wolfgangz2013/rt-thread,wolfgangz2013/rt-thread,RT-Thread/rt-thread,igou/rt-thread,gbcwbz/rt-thread,ArdaFu/rt-thread,RT-Thread/rt-thread,FlyLu/rt-thread,geniusgogo/rt-thread,weiyuliang/rt-thread,armink/rt-thread,yongli3/rt-thread,yongli3/rt-thread,armink/rt-thread,weiyuliang/rt-thread,wolfgangz2013/rt-thread,FlyLu/rt-thread,RT-Thread/rt-thread,weety/rt-thread,ArdaFu/rt-thread,ArdaFu/rt-thread,geniusgogo/rt-thread,armink/rt-thread
|
examples/module/rtconfig_lm3s.py
|
examples/module/rtconfig_lm3s.py
|
# bsp name
BSP = 'lm3s8962'
# toolchains
EXEC_PATH = 'C:/Program Files/CodeSourcery/Sourcery G++ Lite/bin'
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'so'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m3'
CFLAGS = DEVICE + ' -mthumb -mlong-calls -Dsourcerygxx -O0 -fPIC'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -mthumb -Wl,-z,max-page-size=0x4 -shared -fPIC -e main -nostdlib'
CPATH = ''
LPATH = ''
|
# bsp name
BSP = 'lm3s8962'
# toolchains
EXEC_PATH = 'C:/Program Files/CodeSourcery/Sourcery G++ Lite/bin'
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'so'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m3'
CFLAGS = DEVICE + ' -mthumb -Dsourcerygxx -O0 -fPIC'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -mthumb -Wl,-z,max-page-size=0x4 -shared -fPIC -e main -nostdlib'
CPATH = ''
LPATH = ''
|
apache-2.0
|
Python
|
ce869c128d728af4c296eb96ecae0db6f30996a7
|
Make brnn_ptb_test write checkpoints to temp directory
|
deepmind/sonnet,deepmind/sonnet
|
sonnet/examples/brnn_ptb_test.py
|
sonnet/examples/brnn_ptb_test.py
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for brnn_ptb."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
import string
from sonnet.examples import brnn_ptb
import tensorflow as tf
FLAGS = tf.flags.FLAGS
def _make_random_word():
return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(random.randint(1, 15)))
def _make_random_vocab():
# Make a limited vocab that all the sentences should be made out of, as the
# BRNN model builds a finite vocab internally.
return [_make_random_word() for _ in range(1000)]
def _make_sentence_with_vocab(vocab):
return ' '.join(vocab[random.randint(0, len(vocab) - 1)]
for _ in range(random.randint(1, 30)))
def _make_fake_corpus_with_vocab(vocab, corpus_size):
return '\n'.join(_make_sentence_with_vocab(vocab)
for _ in range(corpus_size))
class BrnnPtbTest(tf.test.TestCase):
def testScriptRunsWithFakeData(self):
# Make some small fake data in same format as real PTB.
tmp_dir = tf.test.get_temp_dir()
vocab = _make_random_vocab()
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.train.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 1000))
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.valid.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 100))
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.test.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 100))
# Make model small, only run for 1 epoch.
FLAGS.num_training_epochs = 1
FLAGS.hidden_size = 50
FLAGS.embedding_size = 50
FLAGS.data_path = tmp_dir
# Checkpoint to tmp directory so that test runs hermetically, and there is
# no possibility of reusing checkpoints from previous runs.
FLAGS.logbasedir = tmp_dir
# Do training, test, evaluation.
brnn_ptb.main(None)
if __name__ == '__main__':
tf.test.main()
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for brnn_ptb."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
import string
from sonnet.examples import brnn_ptb
import tensorflow as tf
FLAGS = tf.flags.FLAGS
def _make_random_word():
return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(random.randint(1, 15)))
def _make_random_vocab():
# Make a limited vocab that all the sentences should be made out of, as the
# BRNN model builds a finite vocab internally.
return [_make_random_word() for _ in range(1000)]
def _make_sentence_with_vocab(vocab):
return ' '.join(vocab[random.randint(0, len(vocab) - 1)]
for _ in range(random.randint(1, 30)))
def _make_fake_corpus_with_vocab(vocab, corpus_size):
return '\n'.join(_make_sentence_with_vocab(vocab)
for _ in range(corpus_size))
class BrnnPtbTest(tf.test.TestCase):
def testScriptRunsWithFakeData(self):
# Make some small fake data in same format as real PTB.
tmp_dir = tf.test.get_temp_dir()
vocab = _make_random_vocab()
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.train.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 1000))
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.valid.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 100))
with tf.gfile.GFile(os.path.join(tmp_dir, 'ptb.test.txt'), 'w') as f:
f.write(_make_fake_corpus_with_vocab(vocab, 100))
# Make model small, only run for 1 epoch.
FLAGS.num_training_epochs = 1
FLAGS.hidden_size = 50
FLAGS.embedding_size = 50
FLAGS.data_path = tmp_dir
# Do training, test, evaluation.
brnn_ptb.main(None)
if __name__ == '__main__':
tf.test.main()
|
apache-2.0
|
Python
|
c2a3443bd129b51df82826806829d50d6c01ee69
|
remove password
|
iamalbert/chinese-segmenter,iamalbert/chinese-segmenter,iamalbert/chinese-segmenter
|
demo.py
|
demo.py
|
import chineseseg
string = "่่ฟชๅ้ขฑ้ขจ้ ๆๅ็ณๅดฉๅก๏ผไพๆๅฐๅๅธ็จๆฐด็ๅๅขๆบชๆพๅธถๅคง้ๆณฅๆฒ๏ผๅๆฐดๆฟๅบฆไธๅบฆ้ฃ้ซใ"
ckip = chineseseg.Ckip("myaccount", "mypassword")
stanford = chineseseg.stanford("/home/wlzhuang/stanford-segmenter-2015-04-20/stanford-segmenter-3.5.2.jar", debug=True)
print( "stanford:", stanford.segment(string) )
print( "ckip:", ckip.segment(string) )
|
import chineseseg
string = "่่ฟชๅ้ขฑ้ขจ้ ๆๅ็ณๅดฉๅก๏ผไพๆๅฐๅๅธ็จๆฐด็ๅๅขๆบชๆพๅธถๅคง้ๆณฅๆฒ๏ผๅๆฐดๆฟๅบฆไธๅบฆ้ฃ้ซใ"
ckip = chineseseg.Ckip("wlzhuang", "xxxxaaaackip")
stanford = chineseseg.stanford("/home/wlzhuang/stanford-segmenter-2015-04-20/stanford-segmenter-3.5.2.jar", debug=True)
print( "stanford:", stanford.segment(string) )
print( "ckip:", ckip.segment(string) )
|
mit
|
Python
|
c1ae43fd33cd0f8eb3e270907a8ed7e728d1e268
|
Add captured_at timestamp to POST payload
|
nicolas-fricke/keypost
|
server.py
|
server.py
|
import evdev
import requests
import json
import datetime
import yaml
def main():
config = load_config()
dev = evdev.InputDevice(config['device_path'])
output_line('Initialized - Capturing device: ' + str(dev))
for event in dev.read_loop():
if event.type == evdev.ecodes.EV_KEY:
output_line(event)
payload = build_payload(event)
output_line('Sending ' + str(payload) + ' to ' + config['post_url'])
response = requests.post(config['post_url'], json.dumps(payload))
output_line(response)
def build_payload(event):
event = evdev.categorize(event)
return {
'code': event.scancode,
'key': event.keycode[0] if type(event.keycode) == list else event.keycode,
'state': {0: 'UP', 1: 'DOWN', 2: 'HOLD'}[event.keystate],
'captured_at': datetime.datetime.fromtimestamp(event.event.timestamp()).isoformat()
}
def load_config():
with open('config.yml', 'r') as f:
return yaml.safe_load(f.read())
def timestamp_s():
return '[' + str(datetime.datetime.now()) + ']'
def output_line(string):
print(timestamp_s() + ' ' + str(string))
if __name__ == '__main__':
main()
|
import evdev
import requests
import json
import datetime
import yaml
def main():
config = load_config()
dev = evdev.InputDevice(config['device_path'])
output_line('Initialized - Capturing device: ' + str(dev))
for event in dev.read_loop():
if event.type == evdev.ecodes.EV_KEY:
event = evdev.categorize(event)
output_line(event)
payload = build_payload(event)
output_line('Sending ' + str(payload) + ' to ' + config['post_url'])
response = requests.post(config['post_url'], json.dumps(payload))
output_line(response)
def build_payload(event):
return {
'code': event.scancode,
'key': event.keycode[0] if type(event.keycode) == list else event.keycode,
'state': {0: 'UP', 1: 'DOWN', 2: 'HOLD'}[event.keystate]
}
def load_config():
with open('config.yml', 'r') as f:
return yaml.safe_load(f.read())
def timestamp_s():
return '[' + str(datetime.datetime.now()) + ']'
def output_line(string):
print(timestamp_s() + ' ' + str(string))
if __name__ == '__main__':
main()
|
mit
|
Python
|
13b6e289f3ced59068d91dff2b2ef12a7805fabe
|
Create test definitions.
|
pyohei/cronquot,pyohei/cronquot
|
test/test_cronquot.py
|
test/test_cronquot.py
|
import unittest
import os
from cronquot.cronquot import has_directory
class CronquotTest(unittest.TestCase):
def test_has_directory(self):
sample_dir = os.path.join(
os.path.dirname(__file__), 'crontab')
self.assertTrue(has_directory(sample_dir))
def test_parse_command(self):
pass
def test_is_cron_script(self):
pass
def test_normalize_cron_script(self):
pass
def test_has_cosistency_in_result(self):
pass
def test_simple_cron_pattern(self):
pass
if __name__ == '__main__':
unittest.test()
|
import unittest
import os
from cronquot.cronquot import has_directory
class CronquotTest(unittest.TestCase):
def test_has_directory(self):
sample_dir = os.path.join(
os.path.dirname(__file__), 'crontab')
self.assertTrue(has_directory(sample_dir))
if __name__ == '__main__':
unittest.test()
|
mit
|
Python
|
5e4d3c0b28104c1e98ed3e426dab9fc5d4d5a960
|
Add more comments to loadfail test
|
3ptscience/git-project,aranzgeo/git-project
|
test/test_loadfail.py
|
test/test_loadfail.py
|
#!bin/env python
import subprocess
import os.path
import unittest, re
class TestSaveLoad(unittest.TestCase):
@classmethod
def setUpClass(self):
# ensure we start with a clean slate, just in case
subprocess.call('rm -rf remote local 2>> /dev/null', shell=True)
# Initialize "remote" repositories
subprocess.call('mkdir remote; mkdir local', shell=True)
subprocess.call('cd remote; mkdir parent; cd parent; git init --bare', shell=True)
subprocess.call('cd remote; mkdir child; cd child; git init --bare', shell=True)
# Initialize "local" repositories
subprocess.call('cd local; git clone ../remote/parent', shell=True)
subprocess.call('cd local; git clone ../remote/child', shell=True)
# Add a .gitproj to the parent repo, and make child a subrepo of parent
subprocess.call('cd local/parent; echo "version: 0.1.0" >> .gitproj', shell=True)
subprocess.call('cd local/parent; echo "repos:" >> .gitproj', shell=True)
subprocess.call('cd local/parent; echo "\tc child ../../remote/child" >> .gitproj', shell=True)
subprocess.call('cd local/parent; git add .gitproj; git commit -m "Initial Commit"; git push -u origin master', shell=True)
def test_init(self):
# Initialize git-project (clones child into parent)
subprocess.call('cd local/parent; git project init', shell=True)
subprocess.call('cd local/parent; git add .gitignore; git commit -m ".gitignore"; git push', shell=True)
# Ensure child was cloned properly
output = subprocess.call('test -d local/parent/child;', shell=True)
self.assertEqual(output, 0)
# Ensure child's origin is set correctly
output = subprocess.check_output('cd local/parent/child; git remote show origin | grep Fetch | grep remote/child | wc -l', shell=True)
self.assertEqual(output.strip(), '1')
# Add a commit to the child and update parent's .gitproj
subprocess.call('cd local/parent/child; echo "Asdf" > test.txt; git add test.txt; git commit -m "Initial Commit"; git push', shell=True)
subprocess.call('cd local/parent; git project save -f', shell=True)
subprocess.call('cd local/parent; git add .gitproj; git commit -m "Save Sub-Repository State"', shell=True)
# Change the .gitproj so it is invalid
subprocess.call('cd local/parent; sed \$d .gitproj > .gitproj2; echo " c master nonexistantcommit" >> .gitproj2', shell=True)
# Ensure loading the invalid .gitproj returns a non-zero error code
subprocess.call('cd local/parent; mv .gitproj2 .gitproj', shell=True)
res = subprocess.call('cd local/parent; git project load', shell=True)
self.assertEqual(res, 1)
@classmethod
def tearDownClass(self):
# Remove remote and local repos
subprocess.call('rm -rf remote local', shell=True)
if __name__ == '__main__':
unittest.main()
|
#!bin/env python
import subprocess
import os.path
import unittest, re
class TestSaveLoad(unittest.TestCase):
@classmethod
def setUpClass(self):
subprocess.call('rm -rf remote local 2>> /dev/null', shell=True)
subprocess.call('mkdir remote; mkdir local', shell=True)
subprocess.call('cd remote; mkdir parent; cd parent; git init --bare', shell=True)
subprocess.call('cd remote; mkdir child; cd child; git init --bare', shell=True)
subprocess.call('cd local; git clone ../remote/parent', shell=True)
subprocess.call('cd local; git clone ../remote/child', shell=True)
subprocess.call('cd local/parent; echo "version: 0.1.0" >> .gitproj', shell=True)
subprocess.call('cd local/parent; echo "repos:" >> .gitproj', shell=True)
subprocess.call('cd local/parent; echo "\tc child ../../remote/child" >> .gitproj', shell=True)
subprocess.call('cd local/parent; git add .gitproj; git commit -m "Initial Commit"; git push -u origin master', shell=True)
def test_init(self):
subprocess.call('cd local/parent; git project init', shell=True)
subprocess.call('cd local/parent; git add .gitignore; git commit -m ".gitignore"; git push', shell=True)
output = subprocess.call('test -d local/parent/child;', shell=True)
self.assertEqual(output, 0)
output = subprocess.check_output('cd local/parent/child; git remote show origin | grep Fetch | grep remote/child | wc -l', shell=True)
self.assertEqual(output.strip(), '1')
subprocess.call('cd local/parent/child; echo "Asdf" > test.txt; git add test.txt; git commit -m "Initial Commit"; git push', shell=True)
subprocess.call('cd local/parent; git project save -f', shell=True)
subprocess.call('cd local/parent; git add .gitproj; git commit -m "Save Sub-Repository State"', shell=True)
subprocess.call('cd local/parent; sed \$d .gitproj > .gitproj2; echo " c master nonexistantcommit" >> .gitproj2', shell=True)
subprocess.call('cd local/parent; mv .gitproj2 .gitproj', shell=True)
res = subprocess.call('cd local/parent; git project load', shell=True)
self.assertEqual(res, 1)
@classmethod
def tearDownClass(self):
subprocess.call('rm -rf remote local', shell=True)
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
38b078eb13a42bf65d1f55141a69fcd8819a1f00
|
add models
|
covrom/django_sample,covrom/django_sample
|
mysite/polls/models.py
|
mysite/polls/models.py
|
from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
|
from django.db import models
# Create your models here.
|
mit
|
Python
|
29835c78d1ddfd934aa552f4c68117a32379c5ea
|
add lmsd.sqlite
|
jerkos/mzOS
|
mzos/tests/__init__.py
|
mzos/tests/__init__.py
|
from __future__ import absolute_import
import zipfile
import os.path as op
import os
import shutil
import logging
class WithHMDBMixin(object):
@staticmethod
def unzip_hmdb():
"""
Utility to unzip hmdb for test purposes
:param self:
:return:
"""
z = zipfile.ZipFile(op.abspath('mzos/ressources/hmdb.zip'))
hmdb_path = z.extract('hmdb.sqlite')
logging.info("Moving extracted archive...")
shutil.move(hmdb_path, 'mzos/ressources/hmdb.sqlite')
logging.info("Done")
@staticmethod
def remove_hmdb():
logging.info("removing 'hmdb.sqlite'...")
try:
os.remove(op.abspath('mzos/ressources/hmdb.sqlite'))
logging.info("Done")
except OSError:
logging.error("Unable to remove sqlite file or file does not exist")
|
from __future__ import absolute_import
import zipfile
import os.path as op
import os
import shutil
import logging
class WithHMDBMixin(object):
@staticmethod
def unzip_hmdb():
"""
Utility to unzip hmdb for test purposes
:param self:
:return:
"""
abspath = op.abspath('mzos/ressources/hmdb.zip')
print abspath
z = zipfile.ZipFile(abspath)
hmdb_path = z.extract('hmdb.sqlite')
logging.info("Moving extracted archive...")
shutil.move(hmdb_path, abspath)
logging.info("Done")
@staticmethod
def remove_hmdb():
logging.info("removing 'hmdb.sqlite'...")
try:
os.remove(op.abspath('mzos/ressources/hmdb.sqlite'))
logging.info("Done")
except OSError:
logging.error("Unable to remove sqlite file or file does not exist")
|
mit
|
Python
|
3ffd045be41d226bcf1b533c3f5abf95a932eac0
|
Remove duplicate test
|
J-CPelletier/webcomix,J-CPelletier/webcomix,J-CPelletier/WebComicToCBZ
|
webcomix/scrapy/tests/test_crawler_worker.py
|
webcomix/scrapy/tests/test_crawler_worker.py
|
import pytest
from webcomix.exceptions import NextLinkNotFound
from webcomix.scrapy.crawler_worker import CrawlerWorker
from webcomix.scrapy.verification.verification_spider import VerificationSpider
from webcomix.tests.fake_websites.fixture import one_webpage_uri
def test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri):
settings = {"LOG_ENABLED": False}
worker = CrawlerWorker(
settings,
False,
VerificationSpider,
start_urls=[one_webpage_uri],
next_page_selector="//div/@href",
comic_image_selector="//img/@src",
number_of_pages_to_check=2,
)
with pytest.raises(NextLinkNotFound):
worker.start()
|
import pytest
from webcomix.exceptions import NextLinkNotFound
from webcomix.scrapy.crawler_worker import CrawlerWorker
from webcomix.scrapy.verification.verification_spider import VerificationSpider
from webcomix.tests.fake_websites.fixture import one_webpage_uri
def test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri):
settings = {"LOG_ENABLED": False}
worker = CrawlerWorker(
settings,
False,
VerificationSpider,
start_urls=[one_webpage_uri],
next_page_selector="//div/@href",
comic_image_selector="//img/@src",
number_of_pages_to_check=2,
)
with pytest.raises(NextLinkNotFound):
worker.start()
def test_spider_raising_error_gets_raised_by_crawler_worker(one_webpage_uri):
settings = {"LOG_ENABLED": False}
worker = CrawlerWorker(
settings,
False,
VerificationSpider,
start_urls=[one_webpage_uri],
next_page_selector="//div/@href",
comic_image_selector="//img/@src",
number_of_pages_to_check=2,
)
with pytest.raises(NextLinkNotFound):
worker.start()
|
mit
|
Python
|
ceb7b806c838a12d3447d0fd9bccc5aae49832d5
|
Use a new session so that server will not receive signals
|
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
|
garage/multiprocessing/__init__.py
|
garage/multiprocessing/__init__.py
|
__all__ = [
'RpcConnectionError',
'RpcError',
'python',
]
import contextlib
import logging
import os
import os.path
import random
import shutil
import subprocess
import tempfile
import time
import garage.multiprocessing.server
from garage.multiprocessing.client import Connector
from garage.multiprocessing.client import RpcConnectionError
from garage.multiprocessing.client import RpcError
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
@contextlib.contextmanager
def python(executable='python2', protocol=2, authkey=None):
"""Start a server and return a Connector object
(default to python2).
"""
authkey = authkey or str(random.randint(1, 1e8))
with create_socket() as addr, start_server(executable, addr, authkey):
connector = Connector(addr, protocol, authkey)
try:
yield connector
finally:
connector.shutdown()
@contextlib.contextmanager
def create_socket():
tempdir = tempfile.mkdtemp()
try:
socket_path = tempfile.mktemp(dir=tempdir)
LOG.info('socket path %s', socket_path)
yield socket_path
finally:
LOG.info('remove socket path %s', socket_path)
shutil.rmtree(tempdir)
@contextlib.contextmanager
def start_server(executable, address, authkey):
script_path = garage.multiprocessing.server.__file__
args = [executable, script_path, '--listen-sock', address]
if LOG.isEnabledFor(logging.INFO):
args.append('-v')
env = dict(os.environ)
env['AUTHKEY'] = authkey
server_proc = subprocess.Popen(args, start_new_session=True, env=env)
try:
wait_file_creation(address, timeout=3)
yield server_proc
finally:
if server_proc.wait() != 0:
LOG.warning('server returns %d', server_proc.returncode)
def wait_file_creation(path, timeout):
end_time = time.time() + timeout
while not os.path.exists(path):
time.sleep(0.1)
if end_time < time.time():
raise Exception('timeout')
|
__all__ = [
'RpcConnectionError',
'RpcError',
'python',
]
import contextlib
import logging
import os
import os.path
import random
import shutil
import subprocess
import tempfile
import time
import garage.multiprocessing.server
from garage.multiprocessing.client import Connector
from garage.multiprocessing.client import RpcConnectionError
from garage.multiprocessing.client import RpcError
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
@contextlib.contextmanager
def python(executable='python2', protocol=2, authkey=None):
"""Start a server and return a Connector object
(default to python2).
"""
authkey = authkey or str(random.randint(1, 1e8))
with create_socket() as addr, start_server(executable, addr, authkey):
connector = Connector(addr, protocol, authkey)
try:
yield connector
finally:
connector.shutdown()
@contextlib.contextmanager
def create_socket():
tempdir = tempfile.mkdtemp()
try:
socket_path = tempfile.mktemp(dir=tempdir)
LOG.info('socket path %s', socket_path)
yield socket_path
finally:
LOG.info('remove socket path %s', socket_path)
shutil.rmtree(tempdir)
@contextlib.contextmanager
def start_server(executable, address, authkey):
script_path = garage.multiprocessing.server.__file__
args = [executable, script_path, '--listen-sock', address]
if LOG.isEnabledFor(logging.INFO):
args.append('-v')
env = dict(os.environ)
env['AUTHKEY'] = authkey
server_proc = subprocess.Popen(args, env=env)
try:
wait_file_creation(address, timeout=3)
yield server_proc
finally:
if server_proc.wait() != 0:
LOG.warning('server returns %d', server_proc.returncode)
def wait_file_creation(path, timeout):
end_time = time.time() + timeout
while not os.path.exists(path):
time.sleep(0.1)
if end_time < time.time():
raise Exception('timeout')
|
mit
|
Python
|
05d2421668e663bf9e98ec51ec1d8977ffe8c1b3
|
Add static folder
|
pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker,pac-club-2017/instant-grade-checker
|
server.py
|
server.py
|
import os
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
from igc.util import cache
app = Flask(__name__, static_url_path='html')
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('database_uri', 'sqlite:///./sqllite.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('html', path)
if __name__ == '__main__':
thread = cache.CacheThread()
thread.start()
app.run(debug=True, port=5000)
# http_server = WSGIServer(('', 5000), app)
# http_server.serve_forever()
|
import os
from flask import Flask
from flask import send_from_directory
from flask_cors import CORS
from igc.controller.controller_register import register_controllers
from igc.util import cache
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('database_uri', 'sqlite:///./sqllite.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
register_controllers(app)
@app.route("/")
def index():
return app.send_static_file('index.html')
@app.route("/<path:path>")
def send_static(path):
return send_from_directory('html', path)
if __name__ == '__main__':
thread = cache.CacheThread()
thread.start()
app.run(debug=True, port=5000)
# http_server = WSGIServer(('', 5000), app)
# http_server.serve_forever()
|
agpl-3.0
|
Python
|
d3f8922394ca2e18d624f1d542f2fc13a18475d3
|
Make sorting links reset pagination
|
hartwork/wnpp.debian.net,hartwork/wnpp.debian.net,hartwork/wnpp.debian.net
|
wnpp_debian_net/templatetags/sorting_urls.py
|
wnpp_debian_net/templatetags/sorting_urls.py
|
# Copyright (C) 2021 Sebastian Pipping <[email protected]>
# Licensed under GNU Affero GPL v3 or later
from django import template
from ..url_tools import url_with_query
register = template.Library()
INTERNAL_DIRECTION_PREFIX_ASCENDING = ''
INTERNAL_DIRECTION_PREFIX_DESCENDING = '-'
EXTERNAL_DIRECTION_SUFFIX_ASCENDING = ';asc'
EXTERNAL_DIRECTION_SUFFIX_DESCENDING = ';desc'
_OPPOSITE_INTERNAL_PREFIX = {
INTERNAL_DIRECTION_PREFIX_ASCENDING: INTERNAL_DIRECTION_PREFIX_DESCENDING,
INTERNAL_DIRECTION_PREFIX_DESCENDING: INTERNAL_DIRECTION_PREFIX_ASCENDING,
}
_EXTERNAL_SUFFIX_FOR = {
INTERNAL_DIRECTION_PREFIX_ASCENDING: EXTERNAL_DIRECTION_SUFFIX_ASCENDING,
INTERNAL_DIRECTION_PREFIX_DESCENDING: EXTERNAL_DIRECTION_SUFFIX_DESCENDING,
}
def parse_sort_param(sort_param) -> tuple[str, str]:
split_sort_param = sort_param.split(';')
if len(split_sort_param) == 2 and split_sort_param[1] == 'desc':
order = INTERNAL_DIRECTION_PREFIX_DESCENDING
else:
order = INTERNAL_DIRECTION_PREFIX_ASCENDING
return split_sort_param[0], order
def combine_sort_param(column, internal_direction_prefix):
return column + _EXTERNAL_SUFFIX_FOR[internal_direction_prefix]
@register.simple_tag(takes_context=True)
def self_url_with_sorting_for(context, future_column):
"""
Takes the current page URL and adjusts the "sort=[..]" part
in the query parameters to sort for a specific column.
If the column is the same as the current one,
direction is flipped: from ascending to descending and back.
"""
url = context['request'].get_full_path()
current_column, internal_direction_prefix = parse_sort_param(context['sort'])
if future_column == current_column:
internal_direction_prefix = _OPPOSITE_INTERNAL_PREFIX[internal_direction_prefix]
future_sort = combine_sort_param(future_column, internal_direction_prefix)
return url_with_query(url, sort=future_sort, page=1)
|
# Copyright (C) 2021 Sebastian Pipping <[email protected]>
# Licensed under GNU Affero GPL v3 or later
from django import template
from ..url_tools import url_with_query
register = template.Library()
INTERNAL_DIRECTION_PREFIX_ASCENDING = ''
INTERNAL_DIRECTION_PREFIX_DESCENDING = '-'
EXTERNAL_DIRECTION_SUFFIX_ASCENDING = ';asc'
EXTERNAL_DIRECTION_SUFFIX_DESCENDING = ';desc'
_OPPOSITE_INTERNAL_PREFIX = {
INTERNAL_DIRECTION_PREFIX_ASCENDING: INTERNAL_DIRECTION_PREFIX_DESCENDING,
INTERNAL_DIRECTION_PREFIX_DESCENDING: INTERNAL_DIRECTION_PREFIX_ASCENDING,
}
_EXTERNAL_SUFFIX_FOR = {
INTERNAL_DIRECTION_PREFIX_ASCENDING: EXTERNAL_DIRECTION_SUFFIX_ASCENDING,
INTERNAL_DIRECTION_PREFIX_DESCENDING: EXTERNAL_DIRECTION_SUFFIX_DESCENDING,
}
def parse_sort_param(sort_param) -> tuple[str, str]:
split_sort_param = sort_param.split(';')
if len(split_sort_param) == 2 and split_sort_param[1] == 'desc':
order = INTERNAL_DIRECTION_PREFIX_DESCENDING
else:
order = INTERNAL_DIRECTION_PREFIX_ASCENDING
return split_sort_param[0], order
def combine_sort_param(column, internal_direction_prefix):
return column + _EXTERNAL_SUFFIX_FOR[internal_direction_prefix]
@register.simple_tag(takes_context=True)
def self_url_with_sorting_for(context, future_column):
"""
Takes the current page URL and adjusts the "sort=[..]" part
in the query parameters to sort for a specific column.
If the column is the same as the current one,
direction is flipped: from ascending to descending and back.
"""
url = context['request'].get_full_path()
current_column, internal_direction_prefix = parse_sort_param(context['sort'])
if future_column == current_column:
internal_direction_prefix = _OPPOSITE_INTERNAL_PREFIX[internal_direction_prefix]
future_sort = combine_sort_param(future_column, internal_direction_prefix)
return url_with_query(url, sort=future_sort)
|
agpl-3.0
|
Python
|
300e1461174107f1c2f8523ce105739d42d71803
|
Write EMAIL_HOST to settings only if specified
|
brutasse-archive/fab-bundle,linovia/fab-bundle,linovia/fab-bundle
|
fab_bundle/templates/settings.py
|
fab_bundle/templates/settings.py
|
from {{ base_settings }} import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = ({% for admin in admins %}
('{{ admin.name }}', '{{ admin.email }}'),{% endfor %}
)
MANAGERS = ADMINS
SEND_BROKEN_LINK_EMAILS = True
SECRET_KEY = '{{ secret_key }}'
BASE_URL = 'http{% if ssl_cert %}s{% endif %}://{{ http_host }}'
MEDIA_ROOT = '{{ media_root }}'
MEDIA_URL = BASE_URL + '/media/'
{% if staticfiles %}
STATIC_ROOT = '{{ static_root }}'
STATIC_URL = BASE_URL + '/static/'
{% endif %}
{% if cache >= 0 %}
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'localhost:6379',
'OPTIONS': {
'DB': {{ cache }},
},
},
}
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
{% endif %}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '{{ http_host }}',
'USER': 'postgres',
}
}
{% if sentry_dsn %}
SENTRY_DSN = '{{ sentry_dsn }}'
{% endif %}
{% if email %}
EMAIL_SUBJECT_PREFIX = '[{{ http_host }}] '
SERVER_EMAIL = DEFAULT_FROM_EMAIL = '{{ email.from }}'
{% if email.host %}EMAIL_HOST = '{{ email.host }}'{% endif %}
{% if email.user %}EMAIL_HOST_USER = '{{ email.user }}'{% endif %}
{% if email.password %}EMAIL_HOST_PASSWORD = '{{ email.password }}'{% endif %}
{% if email.port %}EMAIL_PORT = {{ email.port }}{% endif %}
{% if email.backend %}EMAIL_BACKEND = '{{ email.user }}'{% endif %}
{% if email.tls %}EMAIL_USE_TLS = True{% endif %}
{% endif %}
SESSION_COOKIE_HTTPONLY = True{% if ssl_cert %}
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https'){% endif %}
{% if settings %}{{ settings|safe }}{%ย endif %}
|
from {{ base_settings }} import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = ({% for admin in admins %}
('{{ admin.name }}', '{{ admin.email }}'),{% endfor %}
)
MANAGERS = ADMINS
SEND_BROKEN_LINK_EMAILS = True
SECRET_KEY = '{{ secret_key }}'
BASE_URL = 'http{% if ssl_cert %}s{% endif %}://{{ http_host }}'
MEDIA_ROOT = '{{ media_root }}'
MEDIA_URL = BASE_URL + '/media/'
{% if staticfiles %}
STATIC_ROOT = '{{ static_root }}'
STATIC_URL = BASE_URL + '/static/'
{% endif %}
{% if cache >= 0 %}
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'localhost:6379',
'OPTIONS': {
'DB': {{ cache }},
},
},
}
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
{% endif %}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '{{ http_host }}',
'USER': 'postgres',
}
}
{% if sentry_dsn %}
SENTRY_DSN = '{{ sentry_dsn }}'
{% endif %}
{% if email %}
EMAIL_SUBJECT_PREFIX = '[{{ http_host }}] '
SERVER_EMAIL = DEFAULT_FROM_EMAIL = '{{ email.from }}'
EMAIL_HOST = '{{ email.host }}'
{% if email.user %}EMAIL_HOST_USER = '{{ email.user }}'{% endif %}
{% if email.password %}EMAIL_HOST_PASSWORD = '{{ email.password }}'{% endif %}
{% if email.port %}EMAIL_PORT = {{ email.port }}{% endif %}
{% if email.backend %}EMAIL_BACKEND = '{{ email.user }}'{% endif %}
{% if email.tls %}EMAIL_USE_TLS = True{% endif %}
{% endif %}
SESSION_COOKIE_HTTPONLY = True{% if ssl_cert %}
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https'){% endif %}
{% if settings %}{{ settings|safe }}{%ย endif %}
|
bsd-3-clause
|
Python
|
9694d5d0cbdcca874b791e6616dda831f8961373
|
Add a little debugging to the Papilio target
|
scanlime/flipsyfat,scanlime/flipsyfat
|
flipsyfat/targets/papilio_pro.py
|
flipsyfat/targets/papilio_pro.py
|
#!/usr/bin/env python3
import argparse
from migen import *
from flipsyfat.cores.sd_emulator import SDEmulator
from flipsyfat.cores.sd_trigger import SDTrigger
from misoc.targets.papilio_pro import BaseSoC
from migen.build.generic_platform import *
from misoc.integration.soc_sdram import *
from misoc.integration.builder import *
io = [
("sdemu", 0,
Subsignal("clk", Pins("C:8")),
Subsignal("cmd", Pins("C:9")),
Subsignal("d", Pins("C:10 C:11 C:12 C:13")),
IOStandard("LVCMOS33")
),
("trigger", 0,
Pins("C:0 C:1 C:2 C:3 C:4 C:5 C:6 C:7"),
IOStandard("LVCMOS33")
),
("debug", 0,
Pins("C:14 C:15"),
IOStandard("LVCMOS33")
),
]
class Flipsyfat(BaseSoC):
mem_map = {
"sdemu": 0x30000000,
}
mem_map.update(BaseSoC.mem_map)
def __init__(self, **kwargs):
BaseSoC.__init__(self, **kwargs)
self.platform.add_extension(io)
self.submodules.sdemu = SDEmulator(self.platform, self.platform.request("sdemu"))
self.register_mem("sdemu", self.mem_map["sdemu"], self.sdemu.bus, self.sdemu.mem_size)
self.csr_devices += ["sdemu"]
self.interrupt_devices += ["sdemu"]
self.submodules.sdtrig = SDTrigger(self.sdemu.ll, self.platform.request("trigger"))
self.csr_devices += ["sdtrig"]
# Activity LED
self.io_activity = (self.sdemu.ll.block_read_act | self.sdemu.ll.block_write_act )
self.sync += self.platform.request("user_led").eq(self.io_activity)
# Just for debugging
self.comb += self.platform.request("debug").eq(Cat(
self.sdemu.ll.card_status[5], # appcmd
self.sdemu.ll.cmd_in_act
))
def main():
parser = argparse.ArgumentParser(description="Flipsyfat port to the Papilio Pro")
builder_args(parser)
soc_sdram_args(parser)
args = parser.parse_args()
soc = Flipsyfat(**soc_sdram_argdict(args))
builder = Builder(soc, **builder_argdict(args))
builder.build()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import argparse
from migen import *
from flipsyfat.cores.sd_emulator import SDEmulator
from flipsyfat.cores.sd_trigger import SDTrigger
from misoc.targets.papilio_pro import BaseSoC
from migen.build.generic_platform import *
from misoc.integration.soc_sdram import *
from misoc.integration.builder import *
io = [
("sdemu", 0,
Subsignal("clk", Pins("C:8")),
Subsignal("cmd", Pins("C:9")),
Subsignal("d", Pins("C:10 C:11 C:12 C:13")),
IOStandard("LVCMOS33")
),
("trigger", 0,
Pins("C:0 C:1 C:2 C:3 C:4 C:5 C:6 C:7"),
IOStandard("LVCMOS33")
),
("debug", 0,
Pins("C:14 C:15"),
IOStandard("LVCMOS33")
),
]
class Flipsyfat(BaseSoC):
mem_map = {
"sdemu": 0x30000000,
}
mem_map.update(BaseSoC.mem_map)
def __init__(self, **kwargs):
BaseSoC.__init__(self, **kwargs)
self.platform.add_extension(io)
self.submodules.sdemu = SDEmulator(self.platform, self.platform.request("sdemu"))
self.register_mem("sdemu", self.mem_map["sdemu"], self.sdemu.bus, self.sdemu.mem_size)
self.csr_devices += ["sdemu"]
self.interrupt_devices += ["sdemu"]
self.submodules.sdtrig = SDTrigger(self.sdemu.ll, self.platform.request("trigger"))
self.csr_devices += ["sdtrig"]
# Activity LED
self.io_activity = (self.sdemu.ll.block_read_act | self.sdemu.ll.block_write_act )
self.sync += self.platform.request("user_led").eq(self.io_activity)
def main():
parser = argparse.ArgumentParser(description="Flipsyfat port to the Papilio Pro")
builder_args(parser)
soc_sdram_args(parser)
args = parser.parse_args()
soc = Flipsyfat(**soc_sdram_argdict(args))
builder = Builder(soc, **builder_argdict(args))
builder.build()
if __name__ == "__main__":
main()
|
mit
|
Python
|
177bd7546faea56750a182c46a8fd6a892ff5d6a
|
Update State turns, those aren't game attributes
|
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
|
game.py
|
game.py
|
import datetime
import json
import map_loader
import queue
import state
import utils
class GAME_STATUS(object):
""" Game status constants. """
lobby = 'lobby' # In matchmaking lobby, waiting for all players
playing = 'playing' # In game mode, waiting for turns
complete = 'complete' # Game finished
cancelled = 'cancelled' # Broken?
class PLAYER_STATUS(object):
waiting = 'waiting' # Hasn't joined the lobby yet
joined = 'joined' # Has joined the lobby
playing = 'playing' # Sending moves and waiting for game state
lost = 'lost' # Missed turns/broken?
class Game(object):
def __init__(self, id=None, players=None, name=None, map_name='default', max_turns=17):
"""
Initialize a new game.
Note that when we load a game from the repo, we init an empty
game, so all our arguments to the constructor are optional.
"""
self.id = id
self.name = name
self.map_name = map_name
self.players = players # List of player usernames
self.status = GAME_STATUS.lobby
self.created = datetime.datetime.now()
# These attributes are persisted in the state, not DB properties
map = map_loader.read_map_file(map_name)
print(map)
self.state = state.State(map=map, max_turns=max_turns)
self.queue = queue.Queue(players=players)
def update(self, username, move):
""" Execute a round. """
#TODO: Definitely somethign with the Queue! Not sure what at the moment...
self.queue.increment_move()
self.state.current_turn += 1
if self.state.current_turn == self.state.max_turns:
self.status = GAME_STATUS.complete
|
import datetime
import json
import map_loader
import queue
import state
import utils
class GAME_STATUS(object):
""" Game status constants. """
lobby = 'lobby' # In matchmaking lobby, waiting for all players
playing = 'playing' # In game mode, waiting for turns
complete = 'complete' # Game finished
cancelled = 'cancelled' # Broken?
class PLAYER_STATUS(object):
waiting = 'waiting' # Hasn't joined the lobby yet
joined = 'joined' # Has joined the lobby
playing = 'playing' # Sending moves and waiting for game state
lost = 'lost' # Missed turns/broken?
class Game(object):
def __init__(self, id=None, players=None, name=None, map_name='default', max_turns=17):
"""
Initialize a new game.
Note that when we load a game from the repo, we init an empty
game, so all our arguments to the constructor are optional.
"""
self.id = id
self.name = name
self.map_name = map_name
self.players = players # List of player usernames
self.status = GAME_STATUS.lobby
self.created = datetime.datetime.now()
# These attributes are persisted in the state, not DB properties
map = map_loader.read_map_file(map_name)
print(map)
self.state = state.State(map=map, max_turns=max_turns)
self.queue = queue.Queue(players=players)
def update(self, username, move):
""" Execute a round. """
#TODO: Definitely somethign with the Queue! Not sure what at the moment...
self.current_turn += 1
if self.current_turn == self.max_turns:
self.status = GAME_STATUS.complete
|
mit
|
Python
|
2a696cd458ab2f67df5a6cfce0fe2016a8106eb4
|
add default channels
|
shortdudey123/gbot
|
gbot.py
|
gbot.py
|
#!/usr/bin/env python
# =============================================================================
# file = gbot.py
# description = IRC bot
# author = GR <https://github.com/shortdudey123>
# create_date = 2014-07-09
# mod_date = 2014-07-09
# version = 0.1
# usage = called as a class
# notes =
# python_ver = 2.7.6
# =============================================================================
import src.bot as bot
if __name__ == "__main__":
gbot = bot.IRCBot(server="chat.freenode.com", nick="grbot", port=6667, realName='gbot', identify='', debug=True, connectDelay=4)
gbot.setDefautChannels({'##gbot': ''})
gbot.run()
|
#!/usr/bin/env python
# =============================================================================
# file = gbot.py
# description = IRC bot
# author = GR <https://github.com/shortdudey123>
# create_date = 2014-07-09
# mod_date = 2014-07-09
# version = 0.1
# usage = called as a class
# notes =
# python_ver = 2.7.6
# =============================================================================
import src.bot as bot
if __name__ == "__main__":
gbot = bot.IRCBot(server="chat.freenode.com", nick="grbot", port=6667, realName='gbot', identify='', debug=True, connectDelay=4)
gbot.run()
|
apache-2.0
|
Python
|
40ae754565f52c7631798823d13332b37f52e0c5
|
fix misuse of msg_split
|
ryansb/netHUD
|
nethud/proto/telnet.py
|
nethud/proto/telnet.py
|
from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.user.user_name in self.users:
del self.users[self.user.user_name]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg_split[1])
elif msg_split[0] == 'QUIT':
self.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
|
from __future__ import print_function
from twisted.internet import reactor, protocol, threads, defer
from twisted.protocols.basic import LineReceiver
from nethud.proto.client import NethackFactory
class TelnetConnection(LineReceiver):
def __init__(self, users):
self.users = users
self.uname = ''
def connectionLost(self, reason):
if NethackFactory.client:
NethackFactory.client.deassoc_client(self.uname)
if self.user.user_name in self.users:
del self.users[self.user.user_name]
self.uname = ''
print(reason)
def lineReceived(self, line):
msg_split = line.split()
if msg_split[0] == 'AUTH':
if len(msg_split[0]) != 2:
self.sendLine("ERR 406 Invalid Parameters.")
return
self.handle_auth(msg[1])
elif msg[0] == 'QUIT':
self.loseConnection()
else:
self.sendLine("ERR 452 Invalid Command")
def handle_auth(uname):
self.users[uname] = self
self.uname = uname
if NethackFactory.client:
NethackFactory.client.assoc_client(uname, self)
def TelnetFactory(protocol.Factory):
def __init__(self):
self.users = {}
def buildProtocol(self, addr):
return TelnetConnection(users = self.users)
|
mit
|
Python
|
4d810f6f447cdab43187e6da1cca2830766731f1
|
add config check test
|
akfullfo/taskforce
|
tests/test_05_task.py
|
tests/test_05_task.py
|
# ________________________________________________________________________
#
# Copyright (C) 2014 Andrew Fullford
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ________________________________________________________________________
#
import os, sys, logging, errno, time
import support
import taskforce.poll as poll
import taskforce.task as task
start_dir = os.path.realpath('.')
base_dir = "tests"
test_dir = os.path.realpath(os.path.join(base_dir, 'tmp'))
examples_dir = os.path.realpath("examples")
examples_bin = os.path.join(examples_dir, "bin")
config_file = 'example.conf'
roles_file = os.path.join(test_dir, 'test.roles')
test_roles = ['frontend', 'backend']
class Test(object):
@classmethod
def setUpAll(self):
self.log = support.logger()
self.log.info("%s started", self.__module__)
self.start_fds = len(support.find_open_fds())
self.startenv = {}
for tag in ['PATH', 'PYTHONPATH']:
if tag in os.environ:
self.startenv[tag] = os.environ[tag]
self.log.info("%d files open before task testing", self.start_fds)
if not os.path.isdir(test_dir):
os.mkdir(test_dir, 0777)
self.file_list = [roles_file]
@classmethod
def tearDownAll(self):
for path in self.file_list:
try: os.unlink(path)
except: pass
if os.path.isdir(test_dir):
os.rmdir(test_dir)
self.log.info("%s ended", self.__module__)
def setUp(self):
self.log.info("setup: cd %s", examples_dir)
os.chdir(examples_dir)
def tearDown(self):
self.log.info("teardown: cd %s", start_dir)
os.chdir(start_dir)
def set_path(self, tag, val):
if tag in self.startenv:
os.environ[tag] = val + ':' + self.startenv[tag]
else:
os.environ[tag] = val
def set_roles(self, roles):
if not type(roles) is list:
roles = [roles]
with open(roles_file, 'w') as f:
f.write('\n'.join(roles) + '\n')
def Test_A_check_config(self):
self.set_roles(test_roles[0])
self.set_path('PATH', examples_bin)
l = task.legion(log=self.log)
l.set_roles_file(roles_file)
l.set_config_file(config_file)
|
# ________________________________________________________________________
#
# Copyright (C) 2014 Andrew Fullford
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ________________________________________________________________________
#
import os, sys, logging, errno, time
import support
import taskforce.poll as poll
import taskforce.task as task
base_dir = "tests"
working_dir = os.path.join(base_dir, "work")
class Test(object):
@classmethod
def setUpAll(self, mode=None):
self.log = support.logger()
self.log.info("%s started", self.__module__)
self.start_fds = len(support.find_open_fds())
self.log.info("%d files open before task testing", self.start_fds)
if not os.path.isdir(working_dir):
os.mkdir(working_dir, 0777)
@classmethod
def tearDownAll(self):
if os.path.isdir(working_dir):
os.rmdir(working_dir)
self.log.info("%s ended", self.__module__)
def Test_A_add(self):
pass
|
apache-2.0
|
Python
|
ddfc569ba310ce2de3b4a4ae63111556646496f8
|
remove more f-strings
|
lilydjwg/nvchecker
|
tests/test_keyfile.py
|
tests/test_keyfile.py
|
# MIT licensed
# Copyright (c) 2018 lilydjwg <[email protected]>, et al.
import os
import tempfile
import contextlib
from nvchecker.source import HTTPError
import pytest
pytestmark = [pytest.mark.asyncio]
@contextlib.contextmanager
def unset_github_token_env():
token = os.environ.get('NVCHECKER_GITHUB_TOKEN')
try:
if token:
del os.environ['NVCHECKER_GITHUB_TOKEN']
yield token
finally:
if token:
os.environ['NVCHECKER_GITHUB_TOKEN'] = token
async def test_keyfile_missing(run_source):
test_conf = '''\
[example]
github = harry-sanabria/ReleaseTestRepo
'''
assert await run_source(test_conf) in ['20140122.012101', None]
async def test_keyfile_invalid(run_source):
with tempfile.NamedTemporaryFile(mode='w') as f, \
unset_github_token_env():
f.write('''\
[keys]
github = xxx
''')
f.flush()
test_conf = '''\
[example]
github = harry-sanabria/ReleaseTestRepo
[__config__]
keyfile = {name}
'''.format(name=f.name)
try:
version = await run_source(test_conf)
assert version is None # out of allowance
return
except HTTPError as e:
assert e.code == 401
return
raise Exception('expected 401 response')
@pytest.mark.skipif('NVCHECKER_GITHUB_TOKEN' not in os.environ,
reason='no key given')
async def test_keyfile_valid(run_source):
with tempfile.NamedTemporaryFile(mode='w') as f, \
unset_github_token_env() as token:
f.write('''\
[keys]
github = {token}
'''.format(token=token))
f.flush()
test_conf = '''\
[example]
github = harry-sanabria/ReleaseTestRepo
[__config__]
keyfile = {name}
'''.format(name=f.name)
assert await run_source(test_conf) == '20140122.012101'
|
# MIT licensed
# Copyright (c) 2018 lilydjwg <[email protected]>, et al.
import os
import tempfile
import contextlib
from nvchecker.source import HTTPError
import pytest
pytestmark = [pytest.mark.asyncio]
@contextlib.contextmanager
def unset_github_token_env():
token = os.environ.get('NVCHECKER_GITHUB_TOKEN')
try:
if token:
del os.environ['NVCHECKER_GITHUB_TOKEN']
yield token
finally:
if token:
os.environ['NVCHECKER_GITHUB_TOKEN'] = token
async def test_keyfile_missing(run_source):
test_conf = '''\
[example]
github = harry-sanabria/ReleaseTestRepo
'''
assert await run_source(test_conf) in ['20140122.012101', None]
async def test_keyfile_invalid(run_source):
with tempfile.NamedTemporaryFile(mode='w') as f, \
unset_github_token_env():
f.write('''\
[keys]
github = xxx
''')
f.flush()
test_conf = f'''\
[example]
github = harry-sanabria/ReleaseTestRepo
[__config__]
keyfile = {f.name}
'''
try:
version = await run_source(test_conf)
assert version is None # out of allowance
return
except HTTPError as e:
assert e.code == 401
return
raise Exception('expected 401 response')
@pytest.mark.skipif('NVCHECKER_GITHUB_TOKEN' not in os.environ,
reason='no key given')
async def test_keyfile_valid(run_source):
with tempfile.NamedTemporaryFile(mode='w') as f, \
unset_github_token_env() as token:
f.write('''\
[keys]
github = {token}
'''.format(token=token))
f.flush()
test_conf = '''\
[example]
github = harry-sanabria/ReleaseTestRepo
[__config__]
keyfile = {name}
'''.format(name=f.name)
assert await run_source(test_conf) == '20140122.012101'
|
mit
|
Python
|
9e62b41dc762b1088bd5c1474678d7e7ed120add
|
test case with stage parameter
|
ownport/pkgstack
|
tests/test_profile.py
|
tests/test_profile.py
|
import os
import sys
import pytest
from pkgstack.profile import Profile
TESTS_PATH=os.path.realpath(os.path.dirname(__file__))
def test_profile_create(tmpdir):
config = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).config
assert config == [
{'install': 'pytest', 'stage': 'test'},
{'name': 'Install pytest-cov', 'install': 'pytest-cov', 'stage': 'test'},
{'name': 'Install codecov', 'install': 'codecov', 'alternatives': ['test1', 'test2'], 'stage': 'test'},
{'name': 'Install dtguess', 'install': 'dtguess==0.1.3'},
{'install': 'dtguess==0.1.3',
'alternatives': ['https://github.com/ownport/dtguess/releases/download/v0.1.3/dtguess-0.1.3.tar.gz'],
}
]
def test_process():
assert Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).process() == {
'packages.successed': 1,
'packages.failed': 1,
'packages.total': 5
}
def test_profile_process_via_stage():
assert Profile(os.path.join(TESTS_PATH, 'resources/sample.yml'), stages=['test',]).process() == {
'packages.successed': 5,
'packages.failed': 0,
'packages.total': 5
}
def test_profile_incorrect_stage_type():
with pytest.raises(RuntimeError):
p = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml'), stages='test')
|
import os
import sys
import pytest
from pkgstack.profile import Profile
TESTS_PATH=os.path.realpath(os.path.dirname(__file__))
def test_profile_create(tmpdir):
config = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).config
assert config == [
{'install': 'pytest', 'stage': 'test'},
{'name': 'Install pytest-cov', 'install': 'pytest-cov', 'stage': 'test'},
{'name': 'Install codecov', 'install': 'codecov', 'alternatives': ['test1', 'test2'], 'stage': 'test'},
{'name': 'Install dtguess', 'install': 'dtguess==0.1.3'},
{'install': 'dtguess==0.1.3',
'alternatives': ['https://github.com/ownport/dtguess/releases/download/v0.1.3/dtguess-0.1.3.tar.gz'],
}
]
def test_process():
assert Profile(os.path.join(TESTS_PATH, 'resources/sample.yml')).process() == {
'packages.successed': 1,
'packages.failed': 1,
'packages.total': 5
}
def test_profile_incorrect_stage_type():
with pytest.raises(RuntimeError):
p = Profile(os.path.join(TESTS_PATH, 'resources/sample.yml'), stages='test')
|
mit
|
Python
|
244fc6b436398055f650ea3a64e9388586604cd9
|
Add test for group collection access.
|
hoover/search,hoover/search,hoover/search
|
testsuite/test_acl.py
|
testsuite/test_acl.py
|
import pytest
pytestmark = pytest.mark.django_db
def test_collections_acl_users(client):
from django.contrib.auth.models import User, AnonymousUser
from hoover.search.models import Collection
from hoover.search.views import collections_acl
anonymous = AnonymousUser()
alice = User.objects.create_user('alice')
foo = Collection.objects.create(name='foo', public=True)
bar = Collection.objects.create(name='bar')
baz = Collection.objects.create(name='baz')
assert collections_acl(anonymous, []) == set()
assert collections_acl(anonymous, ['foo']) == {foo}
assert collections_acl(anonymous, ['foo', 'bar', 'baz']) == {foo}
assert collections_acl(anonymous, ['foo', 'bar', 'foo', 'bar']) == {foo}
assert collections_acl(alice, []) == set()
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo}
baz.users.add(alice)
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo, baz}
def test_collections_acl_groups(client):
from django.contrib.auth.models import User, AnonymousUser, Group
from hoover.search.models import Collection
from hoover.search.views import collections_acl
anonymous = AnonymousUser()
alice = User.objects.create_user('alice')
bob = User.objects.create_user('bob')
alice_group = Group.objects.create(name='alice1')
alice_group.user_set.add(alice)
foo = Collection.objects.create(name='foo', public=True)
bar = Collection.objects.create(name='bar')
baz = Collection.objects.create(name='baz')
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo}
assert collections_acl(bob, ['foo', 'bar', 'baz']) == {foo}
bar.groups.add(alice_group)
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo, bar}
assert collections_acl(bob, ['foo', 'bar', 'baz']) == {foo}
alice_group.user_set.add(bob)
assert collections_acl(bob, ['foo', 'bar', 'baz']) == {foo, bar}
|
import pytest
pytestmark = pytest.mark.django_db
def test_collections_acl(client):
from django.contrib.auth.models import User, AnonymousUser
from hoover.search.models import Collection
from hoover.search.views import collections_acl
anonymous = AnonymousUser()
alice = User.objects.create_user('alice')
foo = Collection.objects.create(name='foo', public=True)
bar = Collection.objects.create(name='bar')
baz = Collection.objects.create(name='baz')
assert collections_acl(anonymous, []) == set()
assert collections_acl(anonymous, ['foo']) == {foo}
assert collections_acl(anonymous, ['foo', 'bar', 'baz']) == {foo}
assert collections_acl(anonymous, ['foo', 'bar', 'foo', 'bar']) == {foo}
assert collections_acl(alice, []) == set()
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo}
baz.users.add(alice)
assert collections_acl(alice, ['foo', 'bar', 'baz']) == {foo, baz}
|
mit
|
Python
|
fa7b2a707be689c57d744d0ada5049dfb6b15789
|
Set leave=False on pbar
|
spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc
|
thinc/neural/train.py
|
thinc/neural/train.py
|
from __future__ import unicode_literals, print_function
from .optimizers import Eve, Adam, SGD, linear_decay
from .util import minibatch
import numpy.random
from tqdm import tqdm
class Trainer(object):
def __init__(self, model, **cfg):
self.ops = model.ops
self.model = model
self.L2 = cfg.get('L2', 0.0)
self.optimizer = Adam(model.ops, 0.001, decay=0.0, eps=1e-8, L2=self.L2)
self.batch_size = cfg.get('batch_size', 128)
self.nb_epoch = cfg.get('nb_epoch', 20)
self.i = 0
self.dropout = cfg.get('dropout', 0.)
self.dropout_decay = cfg.get('dropout_decay', 0.)
self.each_epoch = []
def __enter__(self):
return self, self.optimizer
def __exit__(self, exc_type, exc_val, exc_tb):
self.model.use_params(self.optimizer.averages)
def iterate(self, train_X, train_y, progress_bar=True):
orig_dropout = self.dropout
for i in range(self.nb_epoch):
indices = numpy.arange(len(train_X))
numpy.random.shuffle(indices)
indices = self.ops.asarray(indices)
j = 0
with tqdm(total=indices.shape[0], leave=False) as pbar:
while j < indices.shape[0]:
slice_ = indices[j : j + self.batch_size]
X = _take_slice(train_X, slice_)
y = _take_slice(train_y, slice_)
yield X, y
self.dropout = linear_decay(orig_dropout, self.dropout_decay,
self.optimizer.nr_iter)
j += self.batch_size
if progress_bar:
pbar.update(self.batch_size)
for func in self.each_epoch:
func()
def _take_slice(data, slice_):
if isinstance(data, list) or isinstance(data, tuple):
return [data[int(i)] for i in slice_]
else:
return data[slice_]
|
from __future__ import unicode_literals, print_function
from .optimizers import Eve, Adam, SGD, linear_decay
from .util import minibatch
import numpy.random
from tqdm import tqdm
class Trainer(object):
def __init__(self, model, **cfg):
self.ops = model.ops
self.model = model
self.L2 = cfg.get('L2', 0.0)
self.optimizer = Adam(model.ops, 0.001, decay=0.0, eps=1e-8, L2=self.L2)
self.batch_size = cfg.get('batch_size', 128)
self.nb_epoch = cfg.get('nb_epoch', 20)
self.i = 0
self.dropout = cfg.get('dropout', 0.)
self.dropout_decay = cfg.get('dropout_decay', 0.)
self.each_epoch = []
def __enter__(self):
return self, self.optimizer
def __exit__(self, exc_type, exc_val, exc_tb):
self.model.use_params(self.optimizer.averages)
def iterate(self, train_X, train_y, progress_bar=True):
orig_dropout = self.dropout
for i in range(self.nb_epoch):
indices = numpy.arange(len(train_X))
numpy.random.shuffle(indices)
indices = self.ops.asarray(indices)
j = 0
with tqdm(total=indices.shape[0]) as pbar:
while j < indices.shape[0]:
slice_ = indices[j : j + self.batch_size]
X = _take_slice(train_X, slice_)
y = _take_slice(train_y, slice_)
yield X, y
self.dropout = linear_decay(orig_dropout, self.dropout_decay,
self.optimizer.nr_iter)
j += self.batch_size
if progress_bar:
pbar.update(self.batch_size)
for func in self.each_epoch:
func()
def _take_slice(data, slice_):
if isinstance(data, list) or isinstance(data, tuple):
return [data[int(i)] for i in slice_]
else:
return data[slice_]
|
mit
|
Python
|
603bfdc9cb0f9bf8e29306e161728423f1f57f86
|
Update dependency bazelbuild/bazel to latest version
|
google/copybara,google/copybara,google/copybara
|
third_party/bazel.bzl
|
third_party/bazel.bzl
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "6820bea27ff251086d683793c379e1d327bdd193"
bazel_sha256 = "48d36a03f519f17ea0f0e082857c5e71dd375c7a5f1ce198f72d790ffa833356"
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "87550ea81d92796facbb068a6ce7365326d00aff"
bazel_sha256 = "7abdba2562f80ae8327627b554785173b65f8758cf98f901b595d8d252210b94"
|
apache-2.0
|
Python
|
34721c0078d564538a4cf20ac15560a1bf119bac
|
Update dependency bazelbuild/bazel to latest version
|
google/copybara,google/copybara,google/copybara
|
third_party/bazel.bzl
|
third_party/bazel.bzl
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "dc7db2490a9bf6941a1bcb1fbfc709fff1f37739"
bazel_sha256 = "0a15609a976bc1d8e588d6252b12880723ff14bf06d6b73488853bcef8717840"
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is autogenerated by copybara, please do not edit.
bazel_version = "81f5771b0a5d26884841bbdecc77166142c87ca1"
bazel_sha256 = "d4787388b73177edf0d618e49a802d76b60e91adcee8e8aec882aeb02671046d"
|
apache-2.0
|
Python
|
fd641ebb631d4b7d03bf978de2dc22f4c2966dd5
|
Update Bazel to latest version
|
google/copybara,google/copybara,google/copybara
|
third_party/bazel.bzl
|
third_party/bazel.bzl
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "1c03c8c4fac1e1028fcb5bb342da3a7fdfc88327"
bazel_sha256 = "e2d73ad1de6001669c87e0ec954738641dc6eb7e960f04601f7a260699bb0b9e"
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
bazel_version = "f3d1683fcb3c4a25c4c4b8251d00ddfa66f958ba"
bazel_sha256 = "44643a2437c709cec8bf4a0f7a1bb0c31ba5e8a04680a6a669352b7e3a0545ef"
|
apache-2.0
|
Python
|
b3abe856ff2e430f64c60d28b77e95c73b842b47
|
fix wrong table header
|
marcorosa/wos-cli
|
src/search.py
|
src/search.py
|
import xml.etree.ElementTree as ET
import texttable as tt
import re
from config import user_id, password
from datetime import date
from wos import WosClient
def _draw_table(data):
# Generate table
tab = tt.Texttable()
tab.add_rows(data)
tab.set_cols_align(['l', 'l', 'l'])
tab.header(['Year', 'Title', 'ID WOS'])
tab.set_cols_width([5, 55, 20]) # Use fixed terminal dimension (80 char)
s = tab.draw()
print s
def search(author, years, results):
client = WosClient(user_id, password)
client.connect()
# Build query
query = 'AU=%s' % author
# Build timespan
current_year = date.today().year
sq = client.search(query,
count=results,
offset=1,
timeSpan={'begin': '%s-01-01' % (current_year - years),
'end': '%s-01-01' % (current_year + 1)})
# Format xml
my_xml = re.sub(' xmlns="[^"]+"', '', sq.records, count=1).encode('utf-8')
tree = ET.fromstring(my_xml)
# Get results
res = []
for t in tree:
element = list(t)
idwos = element[0].text
data = list(element[1]) # static_data
summary = list(data[0]) # summary
titles = list(summary[2]) # titles
year = summary[1].attrib['pubyear']
paper = ''
for title in titles:
if title.attrib['type'] == 'item':
paper = title.text
res.append([year, paper, idwos])
_draw_table(res)
|
import xml.etree.ElementTree as ET
import texttable as tt
import re
from config import user_id, password
from datetime import date
from wos import WosClient
def _draw_table(data):
# Generate table
tab = tt.Texttable()
tab.add_rows(data)
tab.set_cols_align(['l', 'l', 'l'])
tab.header(['year', 'id', 'title'])
tab.set_cols_width([5, 55, 20]) # Use fixed terminal dimension (80 char)
s = tab.draw()
print s
def search(author, years, results):
client = WosClient(user_id, password)
client.connect()
# Build query
query = 'AU=%s' % author
# Build timespan
current_year = date.today().year
sq = client.search(query,
count=results,
offset=1,
timeSpan={'begin': '%s-01-01' % (current_year - years),
'end': '%s-01-01' % (current_year + 1)})
# Format xml
my_xml = re.sub(' xmlns="[^"]+"', '', sq.records, count=1).encode('utf-8')
tree = ET.fromstring(my_xml)
# Get results
res = []
for t in tree:
element = list(t)
idwos = element[0].text
data = list(element[1]) # static_data
summary = list(data[0]) # summary
titles = list(summary[2]) # titles
year = summary[1].attrib['pubyear']
paper = ''
for title in titles:
if title.attrib['type'] == 'item':
paper = title.text
res.append([year, paper, idwos])
_draw_table(res)
|
mit
|
Python
|
f09c65f980fd9a7364d038ca8eb0b007f74677f5
|
Increase version
|
ITCase-django/django-tinymce-4,ITCase-django/django-tinymce-4,ITCase-django/django-tinymce-4
|
tinymce_4/__init__.py
|
tinymce_4/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '0.0.25-dev'
|
# -*- coding: utf-8 -*-
__version__ = '0.0.24'
|
mit
|
Python
|
cca2ef0f3700c4eafe66c8f751ecb2fc03318e2b
|
Disable boto3 deprecation warning logs
|
chaordic/ignition-core,chaordic/ignition-core,chaordic/ignition-core
|
tools/delete_fleet.py
|
tools/delete_fleet.py
|
import sys
from time import sleep
import boto3
from botocore.exceptions import ClientError
boto3.compat.filter_python_deprecation_warnings()
def describe_fleets(region, fleet_id):
ec2 = boto3.client('ec2', region_name=region)
response = ec2.describe_fleets(
FleetIds=[
fleet_id
],
)
errors = response['Fleets'][0]['Errors']
instances = response['Fleets'][0]['Instances']
# to ensure we are returning an array anyway
if len(errors) > 0 and len(instances) == 0:
return ['']
return instances[0]['InstanceIds']
def delete_fleet(region, fleet_id):
ec2 = boto3.client('ec2', region_name=region)
response = ec2.delete_fleets(
FleetIds=[
fleet_id,
],
TerminateInstances=True
)
return response['SuccessfulFleetDeletions'][0]['CurrentFleetState']
if __name__ == '__main__':
region = sys.argv[1]
fleet_id = sys.argv[2]
try:
# Delete the fleet
fleet_deleted_states = ["deleted", "deleted_running", "deleted_terminating"]
fleet_state = None
while fleet_state not in fleet_deleted_states:
sleep(5)
fleet_state = delete_fleet(region=region, fleet_id=fleet_id)
print(f"Fleet deleted. Fleet state: {fleet_state}")
# get the instance ids from the fleet
print(describe_fleets(region=region, fleet_id=fleet_id))
except (ClientError, Exception) as e:
print(e)
|
import sys
from time import sleep
import boto3
from botocore.exceptions import ClientError
def describe_fleets(region, fleet_id):
ec2 = boto3.client('ec2', region_name=region)
response = ec2.describe_fleets(
FleetIds=[
fleet_id
],
)
errors = response['Fleets'][0]['Errors']
instances = response['Fleets'][0]['Instances']
# to ensure we are returning an array anyway
if len(errors) > 0 and len(instances) == 0:
return ['']
return instances[0]['InstanceIds']
def delete_fleet(region, fleet_id):
ec2 = boto3.client('ec2', region_name=region)
response = ec2.delete_fleets(
FleetIds=[
fleet_id,
],
TerminateInstances=True
)
return response['SuccessfulFleetDeletions'][0]['CurrentFleetState']
if __name__ == '__main__':
region = sys.argv[1]
fleet_id = sys.argv[2]
try:
# Delete the fleet
fleet_deleted_states = ["deleted", "deleted_running", "deleted_terminating"]
fleet_state = None
while fleet_state not in fleet_deleted_states:
sleep(5)
fleet_state = delete_fleet(region=region, fleet_id=fleet_id)
print(f"Fleet deleted. Fleet state: {fleet_state}")
# get the instance ids from the fleet
print(describe_fleets(region=region, fleet_id=fleet_id))
except (ClientError, Exception) as e:
print(e)
|
mit
|
Python
|
8a6370f7c91fec6c220bc2e438a236816c636341
|
Revert throttle Arlo api calls (#13174)
|
sander76/home-assistant,home-assistant/home-assistant,turbokongen/home-assistant,tboyce1/home-assistant,kennedyshead/home-assistant,Cinntax/home-assistant,aronsky/home-assistant,tboyce021/home-assistant,jabesq/home-assistant,mezz64/home-assistant,robbiet480/home-assistant,soldag/home-assistant,mKeRix/home-assistant,nugget/home-assistant,Danielhiversen/home-assistant,balloob/home-assistant,nkgilley/home-assistant,fbradyirl/home-assistant,nkgilley/home-assistant,jamespcole/home-assistant,nugget/home-assistant,rohitranjan1991/home-assistant,FreekingDean/home-assistant,tinloaf/home-assistant,rohitranjan1991/home-assistant,robbiet480/home-assistant,molobrakos/home-assistant,MartinHjelmare/home-assistant,tinloaf/home-assistant,postlund/home-assistant,adrienbrault/home-assistant,leppa/home-assistant,HydrelioxGitHub/home-assistant,HydrelioxGitHub/home-assistant,mKeRix/home-assistant,mKeRix/home-assistant,persandstrom/home-assistant,toddeye/home-assistant,auduny/home-assistant,fbradyirl/home-assistant,soldag/home-assistant,PetePriority/home-assistant,PetePriority/home-assistant,molobrakos/home-assistant,Teagan42/home-assistant,persandstrom/home-assistant,fbradyirl/home-assistant,jnewland/home-assistant,tchellomello/home-assistant,HydrelioxGitHub/home-assistant,MartinHjelmare/home-assistant,lukas-hetzenecker/home-assistant,mezz64/home-assistant,sdague/home-assistant,tboyce1/home-assistant,persandstrom/home-assistant,jnewland/home-assistant,partofthething/home-assistant,joopert/home-assistant,tboyce1/home-assistant,jabesq/home-assistant,jawilson/home-assistant,home-assistant/home-assistant,aronsky/home-assistant,rohitranjan1991/home-assistant,tboyce021/home-assistant,DavidLP/home-assistant,mKeRix/home-assistant,toddeye/home-assistant,aequitas/home-assistant,sander76/home-assistant,titilambert/home-assistant,joopert/home-assistant,GenericStudent/home-assistant,DavidLP/home-assistant,tboyce1/home-assistant,jabesq/home-assistant,MartinHjelmare/home-assistant,jamespcole/home-assistant,jamespcole/home-assistant,turbokongen/home-assistant,lukas-hetzenecker/home-assistant,Teagan42/home-assistant,pschmitt/home-assistant,adrienbrault/home-assistant,auduny/home-assistant,partofthething/home-assistant,w1ll1am23/home-assistant,qedi-r/home-assistant,PetePriority/home-assistant,balloob/home-assistant,sdague/home-assistant,aequitas/home-assistant,pschmitt/home-assistant,auduny/home-assistant,nugget/home-assistant,postlund/home-assistant,jawilson/home-assistant,FreekingDean/home-assistant,jnewland/home-assistant,aequitas/home-assistant,w1ll1am23/home-assistant,molobrakos/home-assistant,Cinntax/home-assistant,DavidLP/home-assistant,leppa/home-assistant,kennedyshead/home-assistant,Danielhiversen/home-assistant,balloob/home-assistant,qedi-r/home-assistant,titilambert/home-assistant,tchellomello/home-assistant,tinloaf/home-assistant,GenericStudent/home-assistant
|
homeassistant/components/arlo.py
|
homeassistant/components/arlo.py
|
"""
This component provides support for Netgear Arlo IP cameras.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/arlo/
"""
import logging
import voluptuous as vol
from requests.exceptions import HTTPError, ConnectTimeout
from homeassistant.helpers import config_validation as cv
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
REQUIREMENTS = ['pyarlo==0.1.2']
_LOGGER = logging.getLogger(__name__)
CONF_ATTRIBUTION = "Data provided by arlo.netgear.com"
DATA_ARLO = 'data_arlo'
DEFAULT_BRAND = 'Netgear Arlo'
DOMAIN = 'arlo'
NOTIFICATION_ID = 'arlo_notification'
NOTIFICATION_TITLE = 'Arlo Component Setup'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up an Arlo component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
try:
from pyarlo import PyArlo
arlo = PyArlo(username, password, preload=False)
if not arlo.is_connected:
return False
hass.data[DATA_ARLO] = arlo
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Netgear Arlo: %s", str(ex))
hass.components.persistent_notification.create(
'Error: {}<br />'
'You will need to restart hass after fixing.'
''.format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
return True
|
"""
This component provides support for Netgear Arlo IP cameras.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/arlo/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from requests.exceptions import HTTPError, ConnectTimeout
from homeassistant.helpers import config_validation as cv
from homeassistant.util import Throttle
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
REQUIREMENTS = ['pyarlo==0.1.2']
_LOGGER = logging.getLogger(__name__)
CONF_ATTRIBUTION = "Data provided by arlo.netgear.com"
DATA_ARLO = 'data_arlo'
DEFAULT_BRAND = 'Netgear Arlo'
DOMAIN = 'arlo'
NOTIFICATION_ID = 'arlo_notification'
NOTIFICATION_TITLE = 'Arlo Component Setup'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up an Arlo component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
try:
from pyarlo import PyArlo
arlo = PyArlo(username, password, preload=False)
if not arlo.is_connected:
return False
arlo.update = Throttle(timedelta(seconds=10))(arlo.update)
hass.data[DATA_ARLO] = arlo
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Netgear Arlo: %s", str(ex))
hass.components.persistent_notification.create(
'Error: {}<br />'
'You will need to restart hass after fixing.'
''.format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
return True
|
apache-2.0
|
Python
|
b221ed2e83cd352b1eec0ad74a3e02946db39197
|
Add an example of yielding a dict in plpy
|
HazyResearch/deepdive,HazyResearch/deepdive,AlexisBRENON/deepdive,shahin/deepdive,zifeishan/deepdive,Atlas7/deepdive,topojoy/deepdive,AlexisBRENON/deepdive,sky-xu/deepdive,gaapt/deepdive,zifeishan/deepdive,kod3r/deepdive,sai16vicky/deepdive,Atlas7/deepdive,vasyvas/deepdive,zifeishan/deepdive,AlexisBRENON/deepdive,RTsWorld/deepdive,gaapt/deepdive,vsoch/deepdive,maalmeida1837/deepdive,feiranwang/deepdive,gaapt/deepdive,vsoch/deepdive,zifeishan/deepdive,kod3r/deepdive,gaapt/deepdive,vasyvas/deepdive,Atlas7/deepdive,infinitespace/deepdive,RTsWorld/deepdive,AlexisBRENON/deepdive,vsoch/deepdive,nvoron23/deepdive,nvoron23/deepdive,gaapt/deepdive,wgapl/deepdive,nvoron23/deepdive,wgapl/deepdive,infinitespace/deepdive,infinitespace/deepdive,AlexisBRENON/deepdive,sai16vicky/deepdive,RTsWorld/deepdive,closedLoop/deepdive,Gela/deepdive,kod3r/deepdive,topojoy/deepdive,wgapl/deepdive,feiranwang/deepdive,RTsWorld/deepdive,kod3r/deepdive,maalmeida1837/deepdive,Gela/deepdive,vasyvas/deepdive,HazyResearch/deepdive,RTsWorld/deepdive,Gela/deepdive,nvoron23/deepdive,topojoy/deepdive,topojoy/deepdive,sai16vicky/deepdive,sai16vicky/deepdive,closedLoop/deepdive,vsoch/deepdive,zifeishan/deepdive,closedLoop/deepdive,feiranwang/deepdive,kod3r/deepdive,feiranwang/deepdive,HazyResearch/deepdive,infinitespace/deepdive,nvoron23/deepdive,maalmeida1837/deepdive,wgapl/deepdive,Gela/deepdive,topojoy/deepdive,shahin/deepdive,maalmeida1837/deepdive,maalmeida1837/deepdive,Atlas7/deepdive,wgapl/deepdive,sai16vicky/deepdive,vasyvas/deepdive,infinitespace/deepdive,sky-xu/deepdive,vsoch/deepdive,gaapt/deepdive,infinitespace/deepdive,Atlas7/deepdive,closedLoop/deepdive,Gela/deepdive,vasyvas/deepdive,closedLoop/deepdive
|
examples/spouse_example/plpy_extractor/udf/ext_people.py
|
examples/spouse_example/plpy_extractor/udf/ext_people.py
|
#! /usr/bin/env python
import ddext
import itertools
# Format of plpy_extractor:
# Anything Write functions "init", "run" will not be accepted.
# In "init", import libraries, specify input variables and return types
# In "run", write your extractor. Return a list containing your results, each item in the list should be a list/tuple of your return types.
# Do not print.
def init():
# SD['json'] = __import__('json')
ddext.import_lib('itertools')
# Input commands MUST HAVE CORRECT ORDER
ddext.input('sentence_id', 'bigint')
ddext.input('words', 'text[]')
ddext.input('ner_tags', 'text[]')
# Returns commands MUST HAVE CORRECT ORDER
ddext.returns('sentence_id', 'bigint')
ddext.returns('start_position', 'int')
# ddext.returns('start_index', 'int')
ddext.returns('length', 'int')
ddext.returns('text', 'text')
def run(sentence_id, words, ner_tags):
# Find phrases that are tagged with PERSON
phrases_indicies = []
start_index = 0
ner_list = list(enumerate(ner_tags))
while True:
sublist = ner_list[start_index:]
next_phrase = list(itertools.takewhile(lambda x: (x[1] in ["PERSON"]), sublist))
if next_phrase:
phrases_indicies.append([x[0] for x in next_phrase])
start_index = next_phrase[-1][0] + 1
elif start_index == len(ner_list)+1: break
else: start_index = start_index + 1
# You can yield a tuple or a dict to database
for phrase in phrases_indicies:
# yield (sentence_id,
# phrase[0],
# len(phrase),
# " ".join(words[phrase[0]:phrase[-1]+1]))
yield {
'sentence_id': sentence_id,
'start_position': phrase[0],
'text': " ".join(words[phrase[0]:phrase[-1]+1]),
'length': len(phrase)
}
# # Or you can return a list of tuples
# return [(sentence_id, phrase[0], len(phrase),
# " ".join(words[phrase[0]:phrase[-1]+1])) for phrase in phrases_indicies]
|
#! /usr/bin/env python
import ddext
import itertools
# Format of plpy_extractor:
# Anything Write functions "init", "run" will not be accepted.
# In "init", import libraries, specify input variables and return types
# In "run", write your extractor. Return a list containing your results, each item in the list should be a list/tuple of your return types.
# Do not print.
def init():
# SD['json'] = __import__('json')
ddext.import_lib('itertools')
# Input commands MUST HAVE CORRECT ORDER
ddext.input('sentence_id', 'bigint')
ddext.input('words', 'text[]')
ddext.input('ner_tags', 'text[]')
# Returns commands MUST HAVE CORRECT ORDER
ddext.returns('sentence_id', 'bigint')
ddext.returns('start_position', 'int')
# ddext.returns('start_index', 'int')
ddext.returns('length', 'int')
ddext.returns('text', 'text')
def run(sentence_id, words, ner_tags):
# Find phrases that are tagged with PERSON
phrases_indicies = []
start_index = 0
ner_list = list(enumerate(ner_tags))
while True:
sublist = ner_list[start_index:]
next_phrase = list(itertools.takewhile(lambda x: (x[1] in ["PERSON"]), sublist))
if next_phrase:
phrases_indicies.append([x[0] for x in next_phrase])
start_index = next_phrase[-1][0] + 1
elif start_index == len(ner_list)+1: break
else: start_index = start_index + 1
# You can yield a tuple to database
for phrase in phrases_indicies:
yield (sentence_id,
phrase[0],
len(phrase),
" ".join(words[phrase[0]:phrase[-1]+1]))
# # Or you can return a list of tuples
# return [(sentence_id, phrase[0], len(phrase),
# " ".join(words[phrase[0]:phrase[-1]+1])) for phrase in phrases_indicies]
|
apache-2.0
|
Python
|
3010b38a15ca90f51a72e0cf3698ca218aaa144f
|
Remove an execution warning.
|
ProjetPP/Scripts,ProjetPP/Scripts
|
requests_graph.py
|
requests_graph.py
|
#!/usr/bin/env python3
import sys
import time
import array
import datetime
import requests
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
NB_HOURS = 24
GRANULOMETRY = 15 # must be a divisor of 60
if len(sys.argv) != 2:
print('Syntax: %s file.png' % sys.argv[0])
exit(1)
# Get data
data = requests.get('http://gunicorn9005.ppp.pony.ovh/', params={'limit': 10000}).json()
# Convert to datetime
data = [datetime.datetime(*time.strptime(x[1].split('.')[0], "%Y-%m-%d %H:%M:%S")[:6]) for x in data]
# Compute the difference
now = datetime.datetime.now()
data = [now - x for x in data]
max_ = datetime.timedelta(hours=NB_HOURS)
# Shrink and convert to minutes
data = [x.seconds//(60*GRANULOMETRY) for x in data if x <= max_]
# Compute the height of the bars
requests_per_minute = array.array('I', (0 for x in range(0, 60*NB_HOURS//GRANULOMETRY)))
for x in data:
requests_per_minute[(60*NB_HOURS//GRANULOMETRY) - x - 1] += 1
# Final plot
x = range(0, 60*NB_HOURS//GRANULOMETRY)
plt.plot(x, requests_per_minute, label=None)
plt.title("Requests to the PPP")
plt.xlabel("Time (%s minutes)" % str(GRANULOMETRY))
plt.ylabel("Requests")
#plt.legend()
plt.savefig(sys.argv[1])
|
#!/usr/bin/env python3
import sys
import time
import array
import datetime
import requests
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
NB_HOURS = 24
GRANULOMETRY = 15 # must be a divisor of 60
if len(sys.argv) != 2:
print('Syntax: %s file.png' % sys.argv[0])
exit(1)
# Get data
data = requests.get('http://gunicorn9005.ppp.pony.ovh/', params={'limit': 10000}).json()
# Convert to datetime
data = [datetime.datetime(*time.strptime(x[1].split('.')[0], "%Y-%m-%d %H:%M:%S")[:6]) for x in data]
# Compute the difference
now = datetime.datetime.now()
data = [now - x for x in data]
max_ = datetime.timedelta(hours=NB_HOURS)
# Shrink and convert to minutes
data = [x.seconds//(60*GRANULOMETRY) for x in data if x <= max_]
# Compute the height of the bars
requests_per_minute = array.array('I', (0 for x in range(0, 60*NB_HOURS//GRANULOMETRY)))
for x in data:
requests_per_minute[(60*NB_HOURS//GRANULOMETRY) - x - 1] += 1
# Final plot
x = range(0, 60*NB_HOURS//GRANULOMETRY)
plt.plot(x, requests_per_minute, label=None)
plt.title("Requests to the PPP")
plt.xlabel("Time (%s minutes)" % str(GRANULOMETRY))
plt.ylabel("Requests")
plt.legend()
plt.savefig(sys.argv[1])
|
cc0-1.0
|
Python
|
a240cbaa13be8682e5611241634a761df581efff
|
fix format
|
kurtwood/sw_assignment3,kurtwood/sw_assignment3,kurtwood/sw_assignment3
|
sw-project.py
|
sw-project.py
|
# Import the SDK
import facebook
# import the secret token
import secret
# For date and time operations
from datetime import datetime, date, time
# open connection
g = facebook.GraphAPI(secret.ACCESS_TOKEN)
# retrieve friends
friends = g.get_connections("me", "friends")['data']
# retrieve their likes
likes = {friend['name']: g.get_connections(friend['id'], "likes")['data']
for friend in friends}
statuses = {friend['name']: g.get_connections(friend['id'], "statuses")['data']
for friend in friends}
# take a look at a 'created_time' value of a random like, cf http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
fb_date_format = "%Y-%m-%dT%H:%M:%S+0000"
likes_times = []
for friend in likes:
for like in likes[friend]:
likes_times.append(datetime.strptime(like['created_time'], fb_date_format))
statuses_times = []
for friend in statuses:
for status in statuses[friend]:
statuses_times.append(datetime.strptime(status['updated_time'], fb_date_format))
# Retrieve the number of Statuses per day for the last 7 days
now = datetime.now()
# How many...
seconds_per_week = 7 * 24 * 60 * 60
statuses_last_week = [0, 0, 0, 0, 0, 0, 0]
for status_time in statuses_times:
if (now - status_time).total_seconds() < seconds_per_week:
statuses_last_week[status_time.weekday()] += 1
likes_last_week = [0, 0, 0, 0, 0, 0, 0]
for like_time in likes_times:
if (now - like_time).total_seconds() < seconds_per_week:
likes_last_week[like_time.weekday()] += 1
|
# Import the SDK
import facebook
# import the secret token
import secret
# For date and time operations
from datetime import datetime, date, time
# open connection
g = facebook.GraphAPI(secret.ACCESS_TOKEN)
# retrieve friends
friends = g.get_connections("me", "friends")['data']
# retrieve their likes
likes = { friend['name'] : g.get_connections(friend['id'], "likes")['data']
for friend in friends }
statuses = { friend['name'] : g.get_connections(friend['id'], "statuses")['data']
for friend in friends }
# take a look at a 'created_time' value of a random like, cf http://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
fb_date_format = "%Y-%m-%dT%H:%M:%S+0000"
likes_times = []
for friend in likes:
for like in likes[friend]:
likes_times.append(datetime.strptime(like['created_time'], fb_date_format))
statuses_times = []
for friend in statuses:
for status in statuses[friend]:
statuses_times.append(datetime.strptime(status['updated_time'], fb_date_format))
# Retrieve the number of Statuses per day for the last 7 days
now = datetime.now()
# How many...
seconds_per_week = 7 * 24 * 60 * 60
statuses_last_week = [0, 0, 0, 0, 0, 0, 0]
for status_time in statuses_times:
if (now - status_time).total_seconds() < seconds_per_week:
statuses_last_week[status_time.weekday()] += 1
likes_last_week = [0, 0, 0, 0, 0, 0, 0]
for like_time in likes_times:
if (now - like_time).total_seconds() < seconds_per_week:
likes_last_week[like_time.weekday()] += 1
|
bsd-3-clause
|
Python
|
e99239184cffbdc1ca08ba0050f6e4f23e1155fd
|
Allow import error to propagate up
|
girder/girder_worker,girder/girder_worker,Kitware/romanesco,girder/girder_worker,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco
|
romanesco/spark.py
|
romanesco/spark.py
|
import six
import romanesco
import os
import sys
from ConfigParser import ConfigParser, NoOptionError
def setup_spark_env():
# Setup pyspark
try:
spark_home = romanesco.config.get('spark', 'spark_home')
# If not configured try the environment
if not spark_home:
spark_home = os.environ.get('SPARK_HOME')
if not spark_home:
raise Exception('spark_home must be set or SPARK_HOME must be set in '
'the environment')
# Need to set SPARK_HOME
os.environ['SPARK_HOME'] = spark_home
if not os.path.exists(spark_home):
raise Exception('spark_home is not a valid directory')
except NoOptionError:
raise Exception('spark_home must be configured')
sys.path.append(os.path.join(spark_home, 'python'))
sys.path.append(os.path.join(spark_home, 'bin'))
# Check that we can import SparkContext
from pyspark import SparkConf, SparkContext
def create_spark_context(task_spark_conf):
from pyspark import SparkConf, SparkContext
# Set can spark configuration parameter user has specified
spark_conf = SparkConf()
for (name, value) in romanesco.config.items('spark'):
spark_conf.set(name, value)
# Override with any task specific configuration
for (name, value) in task_spark_conf.items():
spark_conf.set(name, value)
# Build up the context, using the master URL
sc = SparkContext(conf=spark_conf)
return sc
|
import six
import romanesco
import os
import sys
from ConfigParser import ConfigParser, NoOptionError
def setup_spark_env():
# Setup pyspark
try:
spark_home = romanesco.config.get('spark', 'spark_home')
# If not configured try the environment
if not spark_home:
spark_home = os.environ.get('SPARK_HOME')
if not spark_home:
raise Exception('spark_home must be set or SPARK_HOME must be set in '
'the environment')
# Need to set SPARK_HOME
os.environ['SPARK_HOME'] = spark_home
if not os.path.exists(spark_home):
raise Exception('spark_home is not a valid directory')
except NoOptionError:
raise Exception('spark_home must be configured')
sys.path.append(os.path.join(spark_home, 'python'))
sys.path.append(os.path.join(spark_home, 'bin'))
# Check that we can import SparkContext
try:
from pyspark import SparkConf, SparkContext
except Exception as ex:
six.raise_from(Exception('Unable to create SparkContext, check Spark '
'installation'), ex)
def create_spark_context(task_spark_conf):
from pyspark import SparkConf, SparkContext
# Set can spark configuration parameter user has specified
spark_conf = SparkConf()
for (name, value) in romanesco.config.items('spark'):
spark_conf.set(name, value)
# Override with any task specific configuration
for (name, value) in task_spark_conf.items():
spark_conf.set(name, value)
# Build up the context, using the master URL
sc = SparkContext(conf=spark_conf)
return sc
|
apache-2.0
|
Python
|
7a81c289d944bad4505a51c80b701f5f11159787
|
stop bandwagon leaving temp files around
|
mstriemer/addons-server,mstriemer/addons-server,wagnerand/zamboni,clouserw/zamboni,Witia1/olympia,SuriyaaKudoIsc/olympia,diox/olympia,diox/olympia,mozilla/addons-server,jpetto/olympia,mudithkr/zamboni,beni55/olympia,spasovski/zamboni,anaran/olympia,kumar303/zamboni,psiinon/addons-server,Hitechverma/zamboni,mozilla/addons-server,jamesthechamp/zamboni,muffinresearch/olympia,luckylavish/zamboni,mstriemer/olympia,psiinon/addons-server,Hitechverma/zamboni,eviljeff/zamboni,muffinresearch/addons-server,andymckay/addons-server,kumar303/addons-server,Witia1/olympia,mudithkr/zamboni,mudithkr/zamboni,luckylavish/zamboni,kumar303/addons-server,ngokevin/zamboni,Prashant-Surya/addons-server,mozilla/olympia,lavish205/olympia,luckylavish/zamboni,Witia1/olympia,crdoconnor/olympia,andymckay/olympia,Joergen/zamboni,kmaglione/olympia,mozilla/zamboni,mstriemer/zamboni,jasonthomas/zamboni,wagnerand/addons-server,andymckay/addons-server,muffinresearch/olympia,johancz/olympia,magopian/olympia,mrrrgn/olympia,eviljeff/olympia,mdaif/olympia,mstriemer/olympia,Joergen/zamboni,jbalogh/zamboni,tsl143/addons-server,diox/zamboni,andymckay/addons-server,Joergen/olympia,robhudson/zamboni,shahbaz17/zamboni,Witia1/olympia,andymckay/zamboni,yfdyh000/olympia,Jobava/zamboni,tsl143/zamboni,robhudson/zamboni,jbalogh/zamboni,wagnerand/addons-server,crdoconnor/olympia,mudithkr/zamboni,tsl143/zamboni,muffinresearch/addons-server,anaran/olympia,bqbn/addons-server,johancz/olympia,atiqueahmedziad/addons-server,Jobava/zamboni,ayushagrawal288/zamboni,ddurst/zamboni,Revanth47/addons-server,mrrrgn/olympia,jpetto/olympia,kmaglione/olympia,eviljeff/zamboni,wagnerand/olympia,Nolski/olympia,ddurst/zamboni,jamesthechamp/zamboni,ddurst/zamboni,tsl143/addons-server,kumar303/olympia,washort/zamboni,yfdyh000/olympia,washort/zamboni,harry-7/addons-server,washort/zamboni,lavish205/olympia,mdaif/olympia,harry-7/addons-server,wagnerand/zamboni,ingenioustechie/zamboni,clouserw/zamboni,shahbaz17/zamboni,mozilla/olympia,andymckay/olympia,robhudson/zamboni,diox/zamboni,mozilla/addons-server,beni55/olympia,elysium001/zamboni,koehlermichael/olympia,kumar303/addons-server,Prashant-Surya/addons-server,andymckay/olympia,SuriyaaKudoIsc/olympia,Joergen/zamboni,Revanth47/addons-server,anaran/olympia,wagnerand/addons-server,kumar303/addons-server,Prashant-Surya/addons-server,Joergen/olympia,kumar303/zamboni,shahbaz17/zamboni,wagnerand/zamboni,mozilla/addons-server,jasonthomas/zamboni,muffinresearch/addons-server,kumar303/olympia,andymckay/zamboni,elysium001/zamboni,Revanth47/addons-server,atiqueahmedziad/addons-server,mrrrgn/olympia,jasonthomas/zamboni,mdaif/olympia,ingenioustechie/zamboni,koehlermichael/olympia,robhudson/zamboni,clouserw/zamboni,kumar303/zamboni,mrrrgn/olympia,lavish205/olympia,magopian/olympia,ayushagrawal288/zamboni,ingenioustechie/zamboni,diox/zamboni,kumar303/zamboni,eviljeff/zamboni,Hitechverma/zamboni,harikishen/addons-server,eviljeff/zamboni,muffinresearch/addons-server,jamesthechamp/zamboni,tsl143/zamboni,muffinresearch/olympia,diox/olympia,kmaglione/olympia,ngokevin/zamboni,Joergen/olympia,Jobava/zamboni,diox/olympia,Nolski/olympia,wagnerand/olympia,wagnerand/addons-server,wagnerand/olympia,mdaif/olympia,bqbn/addons-server,anaran/olympia,psiinon/addons-server,psiinon/addons-server,washort/zamboni,shahbaz17/zamboni,wagnerand/zamboni,eviljeff/olympia,clouserw/zamboni,koehlermichael/olympia,aviarypl/mozilla-l10n-addons-server,harikishen/addons-server,Prashant-Surya/addons-server,mozilla/zamboni,ddurst/zamboni,jasonthomas/zamboni,ayushagrawal288/zamboni,andymckay/addons-server,ngokevin/zamboni,jpetto/olympia,mozilla/olympia,andymckay/zamboni,Nolski/olympia,Nolski/olympia,mstriemer/olympia,yfdyh000/olympia,ingenioustechie/zamboni,Joergen/olympia,tsl143/addons-server,muffinresearch/addons-server,Jobava/zamboni,mozilla/zamboni,ayushagrawal288/zamboni,lavish205/olympia,harikishen/addons-server,muffinresearch/olympia,johancz/olympia,diox/zamboni,jpetto/olympia,mozilla/zamboni,Nolski/olympia,aviarypl/mozilla-l10n-addons-server,kmaglione/olympia,elysium001/zamboni,Joergen/zamboni,beni55/olympia,mstriemer/olympia,spasovski/zamboni,yfdyh000/olympia,eviljeff/olympia,bqbn/addons-server,crdoconnor/olympia,magopian/olympia,mozilla/olympia,wagnerand/olympia,johancz/olympia,mrrrgn/olympia,koehlermichael/olympia,johancz/olympia,tsl143/zamboni,Witia1/olympia,spasovski/zamboni,bqbn/addons-server,atiqueahmedziad/addons-server,aviarypl/mozilla-l10n-addons-server,jbalogh/zamboni,andymckay/olympia,mstriemer/zamboni,Revanth47/addons-server,beni55/olympia,mdaif/olympia,jamesthechamp/zamboni,koehlermichael/olympia,mstriemer/addons-server,harry-7/addons-server,magopian/olympia,crdoconnor/olympia,SuriyaaKudoIsc/olympia,aviarypl/mozilla-l10n-addons-server,yfdyh000/olympia,beni55/olympia,muffinresearch/olympia,mstriemer/zamboni,luckylavish/zamboni,eviljeff/olympia,spasovski/zamboni,kumar303/olympia,magopian/olympia,kumar303/olympia,jbalogh/zamboni,Joergen/zamboni,atiqueahmedziad/addons-server,Hitechverma/zamboni,mstriemer/addons-server,harry-7/addons-server,harikishen/addons-server,elysium001/zamboni,tsl143/addons-server,Joergen/zamboni,Joergen/olympia,kmaglione/olympia,crdoconnor/olympia,SuriyaaKudoIsc/olympia,mstriemer/zamboni
|
apps/bandwagon/tests/test_tasks.py
|
apps/bandwagon/tests/test_tasks.py
|
import os
import shutil
import tempfile
from django.conf import settings
from nose.tools import eq_
from PIL import Image
from amo.tests.test_helpers import get_image_path
from bandwagon.tasks import resize_icon
def test_resize_icon():
somepic = get_image_path('mozilla.png')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False, dir=settings.TMP_PATH)
dest = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
dir=settings.TMP_PATH)
# resize_icon removes the original
shutil.copyfile(somepic, src.name)
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
resize_icon(src.name, dest.name)
dest_image = Image.open(dest.name)
eq_(dest_image.size, (32, 12))
assert not os.path.exists(src.name)
def test_resize_icon_poorly():
"""If we attempt to set the src/dst, we do nothing."""
somepic = get_image_path('mozilla.png')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False, dir=settings.TMP_PATH)
shutil.copyfile(somepic, src.name)
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
resize_icon(src.name, src.name)
# assert nothing happenned
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
|
import os
import shutil
import tempfile
from django.conf import settings
from nose.tools import eq_
from PIL import Image
from amo.tests.test_helpers import get_image_path
from bandwagon.tasks import resize_icon
def test_resize_icon():
somepic = get_image_path('mozilla.png')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False)
dest = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png")
# resize_icon removes the original
shutil.copyfile(somepic, src.name)
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
resize_icon(src.name, dest.name)
dest_image = Image.open(dest.name)
eq_(dest_image.size, (32, 12))
assert not os.path.exists(src.name)
def test_resize_icon_poorly():
"""If we attempt to set the src/dst, we do nothing."""
somepic = get_image_path('mozilla.png')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False)
shutil.copyfile(somepic, src.name)
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
resize_icon(src.name, src.name)
# assert nothing happenned
src_image = Image.open(src.name)
eq_(src_image.size, (82, 31))
|
bsd-3-clause
|
Python
|
4e7bc1dc4cc571f09667a9b29ceff8b5acdfbb13
|
Drop supplementary variables from formula
|
almey/policycompass-services,mmilaprat/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,almey/policycompass-services,mmilaprat/policycompass-services,mmilaprat/policycompass-services,almey/policycompass-services,policycompass/policycompass-services
|
apps/metricsmanager/serializers.py
|
apps/metricsmanager/serializers.py
|
from rest_framework import serializers
from .models import *
from .formula import validate_formula
from .formula import ComputeSemantics
from drf_compound_fields import fields as compound_fields
class MetricSerializer(serializers.ModelSerializer):
formula = serializers.CharField()
creator_path = serializers.Field(source='creator_path')
def validate(self, attrs):
"""
Check formula and that provided mappings cover all variables and filter
supplementary mappings.
"""
variables_used = validate_formula(attrs['formula'], attrs['variables'])
# Accept if too many vars are provided and filter them here
attrs['variables'] = { var_name: value for var_name, value
in attrs['variables'].items()
if var_name in variables_used }
return attrs
class Meta:
model = Metric
class OperationalizeMappingSerializer(serializers.Serializer):
variable = serializers.RegexField("__[0-9]+__")
dataset = serializers.IntegerField(min_value=0)
def restore_object(self, validated_data, instance=None):
return (validated_data['variable'], validated_data['dataset'])
class OperationalizeSerializer(serializers.Serializer):
title = serializers.CharField(max_length=100)
acronym = serializers.CharField(max_length=20)
datasets = compound_fields.ListField(OperationalizeMappingSerializer())
def restore_object(self, validated_data, instance=None):
return {
"title": validated_data["title"],
"acronym": validated_data["acronym"],
"datasets": dict(validated_data['datasets'])
}
class NormalizerSerializer(serializers.Serializer):
name = serializers.CharField(max_length=100)
acronym = serializers.CharField(max_length=20)
description = serializers.CharField(max_length=500)
arguments = serializers.Field(source="get_arguments")
|
from rest_framework import serializers
from .models import *
from .formula import validate_formula
from .formula import ComputeSemantics
from drf_compound_fields import fields as compound_fields
class MetricSerializer(serializers.ModelSerializer):
formula = serializers.CharField()
creator_path = serializers.Field(source='creator_path')
def validate(self, attrs):
"""
Check formula and that provided mappings cover all variables and filter
supplementary mappings.
"""
validate_formula(attrs['formula'], attrs['variables'])
return attrs
class Meta:
model = Metric
class OperationalizeMappingSerializer(serializers.Serializer):
variable = serializers.RegexField("__[0-9]+__")
dataset = serializers.IntegerField(min_value=0)
def restore_object(self, validated_data, instance=None):
return (validated_data['variable'], validated_data['dataset'])
class OperationalizeSerializer(serializers.Serializer):
title = serializers.CharField(max_length=100)
acronym = serializers.CharField(max_length=20)
datasets = compound_fields.ListField(OperationalizeMappingSerializer())
def restore_object(self, validated_data, instance=None):
return {
"title": validated_data["title"],
"acronym": validated_data["acronym"],
"datasets": dict(validated_data['datasets'])
}
class NormalizerSerializer(serializers.Serializer):
name = serializers.CharField(max_length=100)
acronym = serializers.CharField(max_length=20)
description = serializers.CharField(max_length=500)
arguments = serializers.Field(source="get_arguments")
|
agpl-3.0
|
Python
|
2209d03532d6c0ed7d55cf4cf759fd82585b5ad3
|
Update item.py
|
Lincoln-Cybernetics/Explore-
|
item.py
|
item.py
|
import pygame
class Item(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Item, self).__init__(*groups)
#the game level
self.level = level
#base image
self.level.animator.set_Img(6,0)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
#type
self.flavor_saver = ['gem', 'axe', 'sammich', 'telescope', 'canteen']
self.flavor = 'gem'
#location
self.firstflag = True
self.scrnx = 0
self.scrny = 0
self.mapx = 0
self.mapy = 0
def spawn(self,x,y):
self.mapx = x
self.mapy = y
def position(self,x,y):
self.scrnx = x
self.scrny = y
self.rect = pygame.rect.Rect((self.scrnx*self.level.tilex, self.scrny*self.level.tiley), self.image.get_size())
def set_type(self, itype):
self.flavor = self.flavor_saver[itype]
if itype == 0:
xind = 6
yind = 0
if itype == 1:
xind = 6
yind = 5
if itype == 2:
xind = 6
yind = 4
if itype == 3:
xind = 6
yind = 3
if itype == 4:
xind = 4
yind = 4
self.level.animator.set_Img(xind,yind)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
def set_Index(self, x, y):
self.scrnx = x
self.rect.x = x*self.level.tilex
self.scrny = y
self.rect.y = y*self.level.tiley
def get_Index(self, axis):
if axis == 'X':
return self.scrnx
if axis == 'Y':
return self.scrny
return -1
def draw(self):
self.level.screen.blit(self.image, (self.rect.x,self.rect.y))
|
import pygame
class Item(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Item, self).__init__(*groups)
#the game level
self.level = level
#base image
#self.level.animator.set_Img(0,5)
#self.image = self.level.animator.get_Img().convert()
#self.image.set_colorkey((255,0,0))
self.level.animator.set_Img(6,0)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
#type
self.flavor_saver = ['gem', 'axe', 'sammich', 'telescope']
self.flavor = 'gem'
#location
self.firstflag = True
self.scrnx = 0
self.scrny = 0
self.mapx = 0
self.mapy = 0
def spawn(self,x,y):
self.scrnx = self.level.mymap[x][y].scrnx
self.mapx = x
self.scrny = self.level.mymap[x][y].scrny
self.mapy = y
self.rect = pygame.rect.Rect((self.scrnx * self.level.tilex, self.scrny * self.level.tiley), self.image.get_size())
#self.scrnx = x
#self.scrny = y
#if self.firstflag:
# self.mapx = x
# self.mapy = y
# self.firstflag = False
#self.rect = pygame.rect.Rect((x * self.level.tilex, y * self.level.tiley), self.image.get_size())
def set_type(self, itype):
self.flavor = self.flavor_saver[itype]
if itype == 0:
xind = 6
yind = 0
if itype == 1:
xind = 6
yind = 5
if itype == 2:
xind = 6
yind = 4
if itype == 3:
xind = 6
yind = 3
self.level.animator.set_Img(xind,yind)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
#def reveal(self):
# self.image = self.secretimage
def set_Index(self, x, y):
self.scrnx = x
self.rect.x = x*self.level.tilex
self.scrny = y
self.rect.y = y*self.level.tiley
def get_Index(self, axis):
if axis == 'X':
return self.scrnx
if axis == 'Y':
return self.scrny
return -1
|
unlicense
|
Python
|
f408b1368b641be2349266a59b32f7fd1fa53265
|
Fix a couple of minor bugs
|
klmitch/dtest,klmitch/dtest
|
stream.py
|
stream.py
|
from StringIO import StringIO
import sys
from eventlet.corolocal import local
_installed = False
_save_out = None
_save_err = None
class _StreamLocal(local):
def __init__(self):
# Initialize the output and error streams
self.out = StringIO()
self.err = StringIO()
_stlocal = _StreamLocal()
def pop():
# Get the out stream contents, then close and replace the stream
out = _stlocal.out.getvalue()
_stlocal.out.close()
_stlocal.out = StringIO()
# Ditto with the error stream contents
err = _stlocal.err.getvalue()
_stlocal.err.close()
_stlocal.err = StringIO()
return out, err
class StreamProxy(object):
def __init__(self, stname):
# Save the stream name of interest
super(StreamProxy, self).__setattr__('_stname', stname)
def __getattr__(self, attr):
# Proxy out to the appropriate stream
return getattr(getattr(_stlocal, self._stname), attr)
def __setattr__(self, attr, value):
# Proxy out to the appropriate stream
return setattr(getattr(_stlocal, self._stname), attr, value)
def __delattr__(self, attr):
# Proxy out to the appropriate stream
return delattr(getattr(_stlocal, self._stname), attr)
def install():
global _installed
global _save_out
global _save_err
# Do nothing if we're already installed
if _installed:
return
# Remember that we've been installed
_installed = True
# Save original stdout and stderr
_save_out = sys.stdout
_save_err = sys.stderr
# Replace them with StreamProxy instances
sys.stdout = StreamProxy('out')
sys.stderr = StreamProxy('err')
def uninstall():
global _installed
global _save_out
global _save_err
# Do nothing if we haven't been installed
if not _installed:
return
# Restore original stdout and stderr
sys.stdout = _save_out
sys.stderr = _save_err
# Reset our state
_save_out = None
_save_err = None
_installed = False
|
from StringIO import StringIO
import sys
from eventlet.corolocal import local
_installed = False
_save_out = None
_save_err = None
class _StreamLocal(local):
def __init__(self):
# Initialize the output and error streams
self.out = StringIO()
self.err = StringIO()
_stlocal = _StreamLocal()
def pop():
# Get the out stream contents, then close and replace the stream
out = _stlocal.out.getvalue()
_stlocal.out.close()
_stlocal.out = StringIO()
# Ditto with the error stream contents
err = _stlocal.err.getvalue()
_stlocal.err.close()
_stlocal.err = StringIO()
return out, err
class StreamProxy(object):
def __init__(self, stname):
# Save the stream name of interest
self._stname = stname
def __getattr__(self, attr):
# Proxy out to the appropriate stream
return getattr(getattr(_stlocal, self._stname), attr)
def __setattr__(self, attr, value):
# Proxy out to the appropriate stream
return setattr(getattr(_stlocal, self._stname), attr, value)
def __delattr__(self, attr):
# Proxy out to the appropriate stream
return delattr(getattr(_stlocal, self._stname), attr)
def install():
global _installed
global _save_out
global _save_err
# Do nothing if we're already installed
if _installed:
return
# Remember that we've been installed
_installed = True
# Save original stdout and stderr
_save_out = sys.stdout
_save_err = sys.stderr
# Replace them with StreamProxy instances
sys.stdout = StreamProxy('out')
sys.stderr = StreamProxy('err')
def uninstall():
global _save_out
global _save_err
# Do nothing if we haven't been installed
if not _installed:
return
# Restore original stdout and stderr
sys.stdout = _save_out
sys.stderr = _save_err
# Reset our state
_save_out = None
_save_err = None
_installed = False
|
apache-2.0
|
Python
|
cbcb89a7a3ee4884768e272bbe3435bb6e08d224
|
Add constraints for the same column and the same row
|
asmeurer/sudoku
|
sudoku.py
|
sudoku.py
|
import datetime
def generate_info(name, version, depends):
return {
"{name}-{version}-0.tar.bz2".format(name=name, version=version): {
"build": "0",
"build_number": 0,
"date": datetime.date.today().strftime("%Y-%m-%d"),
"depends": depends,
"name": name,
"size": 0,
"version": str(version)
}
}
REPODATA = {
"info": {
"arch": "x86_64",
"platform": "osx"
},
"packages": {}
}
def generate_cells():
packages = {}
for row in range(1, 10):
for column in range(1, 10):
for entry in range(1, 10):
depends1 = []
depends0 = []
for d in range(1, 10):
if d == entry:
continue
# Each entry being set (version 1) requires that the other
# entries are not set (version 0)
depends1.append("%sx%s-is-%s 0" % (row, column, d))
for other_row in range(1, 10):
if other_row == row:
continue
# If an entry is set, other cells in the same column can't
# have the same entry.
depends1.append("%sx%s-is-%s 0" % (other_row, column, entry))
for other_column in range(1, 10):
if other_column == column:
continue
# If an entry is set, other cells in the same row can't
# have the same entry.
depends1.append("%sx%s-is-%s 0" % (row, other_column, entry))
p1 = generate_info("%sx%s-is-%s" % (row, column, entry), 1, depends1)
p0 = generate_info("%sx%s-is-%s" % (row, column, entry), 0, depends0)
packages.update({**p0, **p1})
return packages
print(generate_cells())
|
import datetime
def generate_info(name, version, depends):
return {
"{name}-{version}-0.tar.bz2".format(name=name, version=version): {
"build": "0",
"build_number": 0,
"date": datetime.date.today().strftime("%Y-%m-%d"),
"depends": depends,
"name": name,
"size": 0,
"version": str(version)
}
}
REPODATA = {
"info": {
"arch": "x86_64",
"platform": "osx"
},
"packages": {}
}
def generate_cells():
packages = {}
for row in range(1, 10):
for column in range(1, 10):
for entry in range(1, 10):
depends = []
for d in range(1, 10):
if d == entry:
continue
# Each entry being 1 requires that the other entries be 0
depends.append("%sx%s-is-%s 0" % (row, column, d))
p1 = generate_info("%sx%s-is-%s" % (row, column, entry), 1,
depends)
p0 = generate_info("%sx%s-is-%s" % (row, column, entry), 0, [])
packages.update({**p0, **p1})
return packages
print(generate_cells())
|
mit
|
Python
|
2fb9e916155fce16a807c1c7eebf4a607c22ef94
|
Correct Celery support to be backwards compatible (fixes GH-124)
|
someonehan/raven-python,johansteffner/raven-python,ronaldevers/raven-python,akalipetis/raven-python,dbravender/raven-python,akheron/raven-python,jmp0xf/raven-python,jbarbuto/raven-python,recht/raven-python,lepture/raven-python,arthurlogilab/raven-python,arthurlogilab/raven-python,smarkets/raven-python,nikolas/raven-python,tarkatronic/opbeat_python,jbarbuto/raven-python,icereval/raven-python,Photonomie/raven-python,patrys/opbeat_python,someonehan/raven-python,icereval/raven-python,lepture/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,getsentry/raven-python,smarkets/raven-python,lepture/raven-python,ticosax/opbeat_python,arthurlogilab/raven-python,percipient/raven-python,Photonomie/raven-python,dirtycoder/opbeat_python,patrys/opbeat_python,Goldmund-Wyldebeast-Wunderliebe/raven-python,dbravender/raven-python,nikolas/raven-python,tarkatronic/opbeat_python,openlabs/raven,ronaldevers/raven-python,getsentry/raven-python,daikeren/opbeat_python,jmp0xf/raven-python,Photonomie/raven-python,nikolas/raven-python,inspirehep/raven-python,jbarbuto/raven-python,patrys/opbeat_python,nikolas/raven-python,hzy/raven-python,icereval/raven-python,beniwohli/apm-agent-python,akheron/raven-python,danriti/raven-python,arthurlogilab/raven-python,someonehan/raven-python,recht/raven-python,dbravender/raven-python,danriti/raven-python,percipient/raven-python,getsentry/raven-python,inspirehep/raven-python,recht/raven-python,daikeren/opbeat_python,jmp0xf/raven-python,hzy/raven-python,daikeren/opbeat_python,dirtycoder/opbeat_python,dirtycoder/opbeat_python,tarkatronic/opbeat_python,smarkets/raven-python,johansteffner/raven-python,icereval/raven-python,hzy/raven-python,inspirehep/raven-python,akalipetis/raven-python,jmagnusson/raven-python,ewdurbin/raven-python,percipient/raven-python,ewdurbin/raven-python,danriti/raven-python,collective/mr.poe,inspirehep/raven-python,jbarbuto/raven-python,ronaldevers/raven-python,ewdurbin/raven-python,ticosax/opbeat_python,jmagnusson/raven-python,jmagnusson/raven-python,patrys/opbeat_python,Goldmund-Wyldebeast-Wunderliebe/raven-python,alex/raven,beniwohli/apm-agent-python,beniwohli/apm-agent-python,johansteffner/raven-python,lopter/raven-python-old,akheron/raven-python,beniwohli/apm-agent-python,akalipetis/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,ticosax/opbeat_python,smarkets/raven-python
|
raven/contrib/celery/__init__.py
|
raven/contrib/celery/__init__.py
|
"""
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from celery.task import task
except ImportError:
from celery.decorators import task
from celery.signals import after_setup_logger, task_failure
from raven.base import Client
from raven.handlers.logging import SentryHandler
class CeleryMixin(object):
def send_encoded(self, message):
"Errors through celery"
self.send_raw.delay(message)
@task(routing_key='sentry')
def send_raw(self, message):
return super(CeleryMixin, self).send_encoded(message)
class CeleryClient(CeleryMixin, Client):
pass
class CeleryFilter(object):
def filter(self, record):
if record.funcName in ('_log_error',):
return 0
else:
return 1
def register_signal(client):
def process_failure_signal(sender, task_id, exception, args, kwargs,
traceback, einfo, **kw):
client.captureException(
exc_info=einfo.exc_info,
extra={
'task_id': task_id,
'task': sender,
'args': args,
'kwargs': kwargs,
})
task_failure.connect(process_failure_signal, weak=False)
def process_logger_event(sender, logger, loglevel, logfile, format,
colorize, **kw):
import logging
logger = logging.getLogger()
handler = SentryHandler(client)
if handler.__class__ in map(type, logger.handlers):
return False
handler.setLevel(logging.ERROR)
handler.addFilter(CeleryFilter())
logger.addHandler(handler)
after_setup_logger.connect(process_logger_event, weak=False)
|
"""
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from celery.task import task
except ImportError:
from celery.decorators import task
from celery.signals import after_setup_logger, task_failure
from raven.base import Client
from raven.handlers.logging import SentryHandler
class CeleryMixin(object):
def send_encoded(self, message):
"Errors through celery"
self.send_raw.delay(message)
@task(routing_key='sentry')
def send_raw(self, message):
return super(CeleryMixin, self).send_encoded(message)
class CeleryClient(CeleryMixin, Client):
pass
class CeleryFilter(object):
def filter(self, record):
if record.funcName in ('_log_error',):
return 0
else:
return 1
def register_signal(client):
@task_failure.connect(weak=False)
def process_failure_signal(sender, task_id, exception, args, kwargs,
traceback, einfo, **kw):
client.captureException(
exc_info=einfo.exc_info,
extra={
'task_id': task_id,
'task': sender,
'args': args,
'kwargs': kwargs,
})
@after_setup_logger.connect(weak=False)
def process_logger_event(sender, logger, loglevel, logfile, format,
colorize, **kw):
import logging
logger = logging.getLogger()
handler = SentryHandler(client)
if handler.__class__ in map(type, logger.handlers):
return False
handler.setLevel(logging.ERROR)
handler.addFilter(CeleryFilter())
logger.addHandler(handler)
|
bsd-3-clause
|
Python
|
8fb149400a115fd0abf595c6716aed22c396eb86
|
remove call to curCycle in panic() The panic() function already prints the current tick value. This call to curCycle() is as such redundant. Since we are trying to move towards multiple clock domains, this call will print misleading time.
|
LingxiaoJIA/gem5,haowu4682/gem5,LingxiaoJIA/gem5,haowu4682/gem5,haowu4682/gem5,haowu4682/gem5,haowu4682/gem5,LingxiaoJIA/gem5,LingxiaoJIA/gem5,haowu4682/gem5,haowu4682/gem5,haowu4682/gem5,LingxiaoJIA/gem5,haowu4682/gem5,LingxiaoJIA/gem5,LingxiaoJIA/gem5
|
src/mem/slicc/ast/AST.py
|
src/mem/slicc/ast/AST.py
|
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.util import PairContainer, Location
class AST(PairContainer):
def __init__(self, slicc, pairs=None):
self.slicc = slicc
self.location = slicc.currentLocation()
self.pairs = {}
if pairs:
self.pairs.update(getattr(pairs, "pairs", pairs))
@property
def symtab(self):
return self.slicc.symtab
@property
def state_machine(self):
return self.slicc.symtab.state_machine
def warning(self, message, *args):
self.location.warning(message, *args)
def error(self, message, *args):
self.location.error(message, *args)
def embedError(self, message, *args):
if args:
message = message % args
code = self.slicc.codeFormatter()
code('''
panic("Runtime Error at ${{self.location}}: %s.\\n", $message);
''')
return code
|
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.util import PairContainer, Location
class AST(PairContainer):
def __init__(self, slicc, pairs=None):
self.slicc = slicc
self.location = slicc.currentLocation()
self.pairs = {}
if pairs:
self.pairs.update(getattr(pairs, "pairs", pairs))
@property
def symtab(self):
return self.slicc.symtab
@property
def state_machine(self):
return self.slicc.symtab.state_machine
def warning(self, message, *args):
self.location.warning(message, *args)
def error(self, message, *args):
self.location.error(message, *args)
def embedError(self, message, *args):
if args:
message = message % args
code = self.slicc.codeFormatter()
code('''
panic("Runtime Error at ${{self.location}}, Ruby Time: %d, %s.\\n",
curCycle(), $message);
''')
return code
|
bsd-3-clause
|
Python
|
723abaf9bb1ad6d0b8c67e06522bb1d87f3ab82d
|
Fix broken test, handle terminate on the REQUEST
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
test/test_listallobjects_handler.py
|
test/test_listallobjects_handler.py
|
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_listallobjects
from groundstation.transfer.response_handlers import handle_terminate
import groundstation.transfer.response as response
from groundstation.proto.object_list_pb2 import ObjectList
class TestHandlerListAllObjects(StationHandlerTestCase):
def test_handle_listallobjects_returns_stream_for_few_objects(self):
# Make ourselves cached
self.station.station.mark_queried(self.station.origin)
oids = list()
for i in xrange(64):
oids.append(self.station.station.write("test_%i" % (i)))
handle_listallobjects(self.station)
resp = self.station.stream.pop()
self.assertIsInstance(resp, response.Response)
objects = ObjectList()
objects.ParseFromString(resp.payload)
self.assertEqual(len(objects.objectname), len(oids))
for i in objects.objectname:
self.assertIn(i, oids)
def test_follows_up_on_channels(self):
self.station.set_real_terminate(True)
self.station.set_real_id(True)
self.station.set_real_register(True)
handle_listallobjects(self.station)
req1 = self.station.stream.pop(0)
self.assertEqual(req1.verb, "LISTALLOBJECTS")
while self.station.stream:
resp = self.station.stream.pop(0)
if resp.verb == "TERMINATE":
break
self.assertEqual(resp.verb, "DESCRIBEOBJECTS")
self.assertEqual(len(self.station.stream), 0)
resp.stream = self.station.stream
handle_terminate(req1)
req2 = self.station.stream.pop(0)
self.assertEqual(req2.verb, "LISTALLCHANNELS")
class TestHandlerListAllObjectsCached(StationHandlerTestCase):
def test_has_cache(self):
handle_listallobjects(self.station)
req1 = self.station.stream.pop(0)
self.assertEqual(req1.verb, "LISTALLOBJECTS")
while self.station.stream:
resp = self.station.stream.pop()
self.assertEqual(resp.verb, "DESCRIBEOBJECTS")
handle_listallobjects(self.station)
resp = self.station.stream.pop(0)
self.assertIsInstance(resp, response.Response)
|
from handler_fixture import StationHandlerTestCase
from groundstation.transfer.request_handlers import handle_listallobjects
from groundstation.transfer.response_handlers import handle_terminate
import groundstation.transfer.response as response
from groundstation.proto.object_list_pb2 import ObjectList
class TestHandlerListAllObjects(StationHandlerTestCase):
def test_handle_listallobjects_returns_stream_for_few_objects(self):
# Make ourselves cached
self.station.station.mark_queried(self.station.origin)
oids = list()
for i in xrange(64):
oids.append(self.station.station.write("test_%i" % (i)))
handle_listallobjects(self.station)
resp = self.station.stream.pop()
self.assertIsInstance(resp, response.Response)
objects = ObjectList()
objects.ParseFromString(resp.payload)
self.assertEqual(len(objects.objectname), len(oids))
for i in objects.objectname:
self.assertIn(i, oids)
def test_follows_up_on_channels(self):
self.station.set_real_terminate(True)
self.station.set_real_id(True)
self.station.set_real_register(True)
handle_listallobjects(self.station)
req1 = self.station.stream.pop(0)
self.assertEqual(req1.verb, "LISTALLOBJECTS")
while self.station.stream:
resp = self.station.stream.pop(0)
if resp.verb == "TERMINATE":
break
self.assertEqual(resp.verb, "DESCRIBEOBJECTS")
self.assertEqual(len(self.station.stream), 0)
resp.stream = self.station.stream
handle_terminate(resp)
req2 = self.station.stream.pop(0)
self.assertEqual(req2.verb, "LISTALLCHANNELS")
class TestHandlerListAllObjectsCached(StationHandlerTestCase):
def test_has_cache(self):
handle_listallobjects(self.station)
req1 = self.station.stream.pop(0)
self.assertEqual(req1.verb, "LISTALLOBJECTS")
while self.station.stream:
resp = self.station.stream.pop()
self.assertEqual(resp.verb, "DESCRIBEOBJECTS")
handle_listallobjects(self.station)
resp = self.station.stream.pop(0)
self.assertIsInstance(resp, response.Response)
|
mit
|
Python
|
4c5550420b8a9f1bf88f4329952f6e2a161cd20f
|
Fix test on kaos with latest qt5
|
pyQode/pyqode.json,pyQode/pyqode.json
|
test/test_panels/test_navigation.py
|
test/test_panels/test_navigation.py
|
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text().replace('&', '').lower() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
|
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
|
mit
|
Python
|
6878860d8b8d3377960a8310b6b733a4cbc30959
|
use environment variable
|
nzinov/phystech-seabattle,nzinov/phystech-seabattle,nzinov/phystech-seabattle
|
main.py
|
main.py
|
import os
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(os.environ['PORT'])
tornado.ioloop.IOLoop.current().start()
|
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(80)
tornado.ioloop.IOLoop.current().start()
|
agpl-3.0
|
Python
|
944515624ec57f94b6bdb4e9a46988b9604f4c3b
|
add basic command line parser.
|
yumaokao/gdrv,yumaokao/gdrv
|
main.py
|
main.py
|
#!/usr/bin/python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
import sys
import argparse
import re
import logging
lg = logging.getLogger("DRIVE_MAIN")
lg.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(name)s] %(levelname)s - %(message)s')
ch.setFormatter(formatter)
lg.addHandler(ch)
drive_commands = ['info', 'list']
def command_list():
lg.debug("YMK command_list!!!")
parser = argparse.ArgumentParser(
description='YMK google drive command line tool -- list')
parser.add_argument('list', nargs='+')
args = parser.parse_args()
if args.list[0] != 'list':
print parser.print_help()
if len(args.list) > 1 and args.list[1] == 'help':
print parser.print_help()
lg.debug(args)
def main():
lg.debug("YMK Goodbye World!!!")
parser = argparse.ArgumentParser(
description='YMK google drive command line tool')
parser.add_argument('command', nargs=1, choices=drive_commands)
parser.add_argument('others', nargs='?')
args = parser.parse_args()
lg.debug(args.command)
if args.command[0] == 'list':
command_list()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
import sys
import argparse
import re
def main():
print("YMK Goodbye World!!!")
if __name__ == '__main__':
main()
|
mit
|
Python
|
4bcf8ea9572b90782e2f1d6150ec96e28002378f
|
set loglevel to warning
|
kordless/wisdom,kordless/wisdom,kordless/wisdom,kordless/wisdom
|
main.py
|
main.py
|
"""
The MIT License (MIT)
Copyright (c) 2014 Kord Campbell, StackGeek
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Kord Campbell'
__website__ = 'http://www.stackmonkey.com/'
import os,sys
# python paths
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'web/models'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib'))
# imports
import webapp2
import config
import routes
import logging
from web.basehandler import handle_error
logging.getLogger().setLevel(logging.WARNING)
# base application
app = webapp2.WSGIApplication(debug = os.environ['SERVER_SOFTWARE'].startswith('Dev'), config=config.webapp2_config)
# error handling
app.error_handlers[403] = handle_error
app.error_handlers[404] = handle_error
# debug output
if not app.debug:
app.error_handlers[500] = handle_error
# add routes
routes.add_routes(app)
|
"""
The MIT License (MIT)
Copyright (c) 2014 Kord Campbell, StackGeek
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Kord Campbell'
__website__ = 'http://www.stackmonkey.com/'
import os,sys
# python paths
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'web/models'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib'))
# imports
import webapp2
import config
import routes
from web.basehandler import handle_error
# base application
app = webapp2.WSGIApplication(debug = os.environ['SERVER_SOFTWARE'].startswith('Dev'), config=config.webapp2_config)
# error handling
app.error_handlers[403] = handle_error
app.error_handlers[404] = handle_error
# debug output
if not app.debug:
app.error_handlers[500] = handle_error
# add routes
routes.add_routes(app)
|
mit
|
Python
|
c126348a70f316c9ef25d70dc87d6b25f69f83af
|
remove routes
|
Radcliffe/ParkLife
|
main.py
|
main.py
|
from flask import Flask
app = Flask(__name__)
app.config['DEBUG'] = True
from util import today
from models import Event
from google.appengine.ext import ndb
import twilio.twiml
@app.route('/message', methods=['GET', 'POST'])
def reply():
query = Event.query(Event.date == today())
messages = []
for event in query:
messages.append('%s %s (%s)' %
(event.start, event.summary, event.location))
response = twilio.twiml.Response()
if len(messages) == 0:
response.message('No events today')
else:
response.message(' | '.join(messages))
return str(response)
|
from flask import Flask
app = Flask(__name__)
app.config['DEBUG'] = True
from util import today
from models import Event
from google.appengine.ext import ndb
import twilio.twiml
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.route('/message', methods=['GET', 'POST'])
def reply():
query = Event.query(Event.date == today())
messages = []
for event in query:
messages.append('%s %s (%s)' %
(event.start, event.summary, event.location))
response = twilio.twiml.Response()
if len(messages) == 0:
response.message('No events today')
else:
response.message(' | '.join(messages))
return str(response)
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
|
apache-2.0
|
Python
|
60ae3ae54ccc573983cb9c283844eab1b62ba7a7
|
Use multiprocessing instead of threading
|
repos-bitcoin/bitcoind-ncurses,azeteki/bitcoind-ncurses,esotericnonsense/bitcoind-ncurses
|
main.py
|
main.py
|
#!/usr/bin/env python
###############################################################################
# bitcoind-ncurses by Amphibian
# thanks to jgarzik for bitcoinrpc
# wumpus and kylemanna for configuration file parsing
# all the users for their suggestions and testing
# and of course the bitcoin dev team for that bitcoin gizmo, pretty neat stuff
###############################################################################
import multiprocessing, ConfigParser, argparse, signal
import rpc
import interface
import config
def interrupt_signal(signal, frame):
s = {'stop': "Interrupt signal caught"}
interface_queue.put(s)
def debug(rpc_queue):
# coinbase testnet transaction for debugging
#s = {'txid': "cfb8bc436ca1d8b8b2d324a9cb2ef097281d2d8b54ba4239ce447b31b8757df2"}
# tx with 1001 inputs, 1002 outputs
s = {'txid': 'e1dc93e7d1ee2a6a13a9d54183f91a5ae944297724bee53db00a0661badc3005'}
rpc_queue.put(s)
if __name__ == '__main__':
# initialise queues
interface_queue = multiprocessing.Queue()
rpc_queue = multiprocessing.Queue()
# parse commandline arguments
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config",
help="path to config file [bitcoin.conf]",
default="bitcoin.conf")
args = parser.parse_args()
# parse config file
try:
cfg = config.read_file(args.config)
except IOError:
cfg = {}
s = {'stop': "configuration file [" + args.config + "] does not exist or could not be read"}
interface_queue.put(s)
# initialise interrupt signal handler (^C)
signal.signal(signal.SIGINT, interrupt_signal)
# start RPC thread
rpc_thread = multiprocessing.Process(target=rpc.loop, args = (interface_queue, rpc_queue, cfg))
rpc_thread.daemon = True
rpc_thread.start()
#debug(rpc_queue)
# main loop
interface.loop(interface_queue, rpc_queue)
# ensure RPC thread exits cleanly
rpc_thread.join()
|
#!/usr/bin/env python
###############################################################################
# bitcoind-ncurses by Amphibian
# thanks to jgarzik for bitcoinrpc
# wumpus and kylemanna for configuration file parsing
# all the users for their suggestions and testing
# and of course the bitcoin dev team for that bitcoin gizmo, pretty neat stuff
###############################################################################
import threading, Queue, ConfigParser, argparse, signal
import rpc
import interface
import config
def interrupt_signal(signal, frame):
s = {'stop': "Interrupt signal caught"}
interface_queue.put(s)
def debug(rpc_queue):
# coinbase testnet transaction for debugging
#s = {'txid': "cfb8bc436ca1d8b8b2d324a9cb2ef097281d2d8b54ba4239ce447b31b8757df2"}
# tx with 1001 inputs, 1002 outputs
s = {'txid': 'e1dc93e7d1ee2a6a13a9d54183f91a5ae944297724bee53db00a0661badc3005'}
rpc_queue.put(s)
if __name__ == '__main__':
# initialise queues
interface_queue = Queue.Queue()
rpc_queue = Queue.Queue()
# parse commandline arguments
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config",
help="path to config file [bitcoin.conf]",
default="bitcoin.conf")
args = parser.parse_args()
# parse config file
try:
cfg = config.read_file(args.config)
except IOError:
cfg = {}
s = {'stop': "configuration file [" + args.config + "] does not exist or could not be read"}
interface_queue.put(s)
# initialise interrupt signal handler (^C)
signal.signal(signal.SIGINT, interrupt_signal)
# start RPC thread
rpc_thread = threading.Thread(target=rpc.loop, args = (interface_queue, rpc_queue, cfg))
rpc_thread.daemon = True
rpc_thread.start()
#debug(rpc_queue)
# main loop
interface.loop(interface_queue, rpc_queue)
# ensure RPC thread exits cleanly
rpc_thread.join()
|
mit
|
Python
|
34a3b5c626e077907c46835b1759a818b3fc332a
|
Make 2-legged calls with the help of tweepy, Twitter API lib.
|
uservoice/uservoice-python
|
uservoice/__init__.py
|
uservoice/__init__.py
|
from Crypto.Cipher import AES
import base64
import hashlib
import urllib
import operator
import array
import simplejson as json
import urllib
import urllib2
import datetime
import pytz
from tweepy import oauth
def generate_sso_token(subdomain_name, sso_key, user_attributes):
current_time = (datetime.datetime.now(pytz.utc) + datetime.timedelta(minutes=5)).strftime('%Y-%m-%d %H:%M:%S')
user_attributes.setdefault('expires', current_time)
user_json = json.dumps(user_attributes, separators=(',',':'))
iv = "OpenSSL for Ruby"
block_size = 16
salted = sso_key + subdomain_name
saltedHash = hashlib.sha1(salted).digest()[:16]
json_bytes = array.array('b', user_json[0 : len(user_json)])
iv_bytes = array.array('b', iv[0 : len(iv)])
for i in range(0, 16):
json_bytes[i] = operator.xor(json_bytes[i], iv_bytes[i])
pad = block_size - len(json_bytes.tostring()) % block_size
data = json_bytes.tostring() + pad * chr(pad)
aes = AES.new(saltedHash, AES.MODE_CBC, iv)
encrypted_bytes = aes.encrypt(data)
return urllib.quote(base64.b64encode(encrypted_bytes))
class OAuth:
def __init__(self, subdomain_name, api_key, api_secret):
self.api_url = "https://" + subdomain_name + ".uservoice.com"
self.consumer = oauth.OAuthConsumer(api_key, api_secret)
def request(self, method, path, params={}):
url = self.api_url + path
request = oauth.OAuthRequest.from_consumer_and_token(
self.consumer, http_method=method.upper(), http_url=url, parameters={})
request.sign_request(oauth.OAuthSignatureMethod_HMAC_SHA1(), self.consumer, None)
headers = request.to_header()
req = urllib2.Request(url, None, headers)
return json.load(urllib2.urlopen(req))
|
from Crypto.Cipher import AES
import base64
import hashlib
import urllib
import operator
import array
import simplejson as json
import urllib
import urllib2
import datetime
import pytz
from tweepy import oauth
def generate_sso_token(subdomain_name, sso_key, user_attributes):
current_time = (datetime.datetime.now(pytz.utc) + datetime.timedelta(minutes=5)).strftime('%Y-%m-%d %H:%M:%S')
user_attributes.setdefault('expires', current_time)
user_json = json.dumps(user_attributes, separators=(',',':'))
iv = "OpenSSL for Ruby"
block_size = 16
salted = sso_key + subdomain_name
saltedHash = hashlib.sha1(salted).digest()[:16]
json_bytes = array.array('b', user_json[0 : len(user_json)])
iv_bytes = array.array('b', iv[0 : len(iv)])
# # xor the iv into the first 16 bytes.
for i in range(0, 16):
json_bytes[i] = operator.xor(json_bytes[i], iv_bytes[i])
pad = block_size - len(json_bytes.tostring()) % block_size
data = json_bytes.tostring() + pad * chr(pad)
aes = AES.new(saltedHash, AES.MODE_CBC, iv)
encrypted_bytes = aes.encrypt(data)
return urllib.quote(base64.b64encode(encrypted_bytes))
|
mit
|
Python
|
3be6ed2f32492d79b639e657cbf5782451b527e7
|
Disable broken upload test
|
skylines-project/skylines,kerel-fs/skylines,kerel-fs/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,skylines-project/skylines,shadowoneau/skylines,Turbo87/skylines,shadowoneau/skylines,Turbo87/skylines,RBE-Avionik/skylines,shadowoneau/skylines,kerel-fs/skylines,RBE-Avionik/skylines,skylines-project/skylines,Harry-R/skylines,RBE-Avionik/skylines,Harry-R/skylines,RBE-Avionik/skylines,shadowoneau/skylines,Harry-R/skylines,Turbo87/skylines
|
tests/frontend/views/upload_test.py
|
tests/frontend/views/upload_test.py
|
import os
from io import BytesIO
import pytest
from skylines.database import db
from skylines.model import User
pytestmark = pytest.mark.usefixtures('db_session', 'files_folder')
HERE = os.path.dirname(__file__)
DATADIR = os.path.join(HERE, '..', '..', 'data')
@pytest.fixture(scope='function')
def bill(app):
bill = User(first_name='bill',
email_address='[email protected]',
password='pass')
with app.app_context():
db.session.add(bill)
db.session.commit()
return bill
@pytest.fixture(scope='function')
def logged_in_browser(browser, bill):
form = browser.getForm(index=1)
form.getControl(name='email_address').value = bill.email_address
form.getControl(name='password').value = 'pass'
form.submit()
return browser
def test_upload_broken_igc(logged_in_browser):
b = logged_in_browser
b.open('/flights/upload')
# we should be logged in now
assert 'IGC or ZIP file(s)' in b.contents
b.getControl('IGC or ZIP file(s)').add_file(BytesIO('broken'),
'text/plain',
'/tmp/broken.igc')
b.getControl('Upload').click()
assert 'No flight was saved.' in b.contents
@pytest.mark.skip(reason="endless loop in airspace analysis code")
def test_upload_single(logged_in_browser, bill):
assert bill.id is not None
b = logged_in_browser
b.open('/flights/upload')
# we should be logged in now
assert 'IGC or ZIP file(s)' in b.contents
f_igc = open(os.path.join(DATADIR, 'simple.igc'))
b.getControl('IGC or ZIP file(s)').add_file(f_igc,
'text/plain',
'/tmp/simple.igc')
b.getControl('Upload').click()
assert 'Your flights have been saved.' in b.contents
|
import os
from io import BytesIO
import pytest
from skylines.database import db
from skylines.model import User
pytestmark = pytest.mark.usefixtures('db_session', 'files_folder')
HERE = os.path.dirname(__file__)
DATADIR = os.path.join(HERE, '..', '..', 'data')
@pytest.fixture(scope='function')
def bill(app):
bill = User(first_name='bill',
email_address='[email protected]',
password='pass')
with app.app_context():
db.session.add(bill)
db.session.commit()
return bill
@pytest.fixture(scope='function')
def logged_in_browser(browser, bill):
form = browser.getForm(index=1)
form.getControl(name='email_address').value = bill.email_address
form.getControl(name='password').value = 'pass'
form.submit()
return browser
def test_upload_broken_igc(logged_in_browser):
b = logged_in_browser
b.open('/flights/upload')
# we should be logged in now
assert 'IGC or ZIP file(s)' in b.contents
b.getControl('IGC or ZIP file(s)').add_file(BytesIO('broken'),
'text/plain',
'/tmp/broken.igc')
b.getControl('Upload').click()
assert 'No flight was saved.' in b.contents
def test_upload_single(logged_in_browser, bill):
assert bill.id is not None
b = logged_in_browser
b.open('/flights/upload')
# we should be logged in now
assert 'IGC or ZIP file(s)' in b.contents
f_igc = open(os.path.join(DATADIR, 'simple.igc'))
b.getControl('IGC or ZIP file(s)').add_file(f_igc,
'text/plain',
'/tmp/simple.igc')
b.getControl('Upload').click()
assert 'Your flights have been saved.' in b.contents
|
agpl-3.0
|
Python
|
71a1d2b40a03bde4969f0eea5f2c48d4ba7ace1b
|
Fix batch tests on Python 3
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
tests/integration/cli/test_batch.py
|
tests/integration/cli/test_batch.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.case import ShellCase
class BatchTest(ShellCase):
'''
Integration tests for the salt.cli.batch module
'''
def test_batch_run(self):
'''
Tests executing a simple batch command to help catch regressions
'''
ret = 'Executing run on [{0}]'.format(repr('sub_minion'))
cmd = self.run_salt('\'*minion\' test.echo \'batch testing\' -b 50%')
self.assertIn(ret, cmd)
def test_batch_run_number(self):
'''
Tests executing a simple batch command using a number division instead of
a percentage with full batch CLI call.
'''
ret = "Executing run on [{0}, {1}]".format(repr('minion'), repr('sub_minion'))
cmd = self.run_salt('\'*minion\' test.ping --batch-size 2')
self.assertIn(ret, cmd)
def test_batch_run_grains_targeting(self):
'''
Tests executing a batch command using a percentage divisor as well as grains
targeting.
'''
os_grain = ''
sub_min_ret = "Executing run on [{0}]".format(repr('sub_minion'))
min_ret = "Executing run on [{0}]".format(repr('minion'))
for item in self.run_salt('minion grains.get os'):
if item != 'minion':
os_grain = item
os_grain = os_grain.strip()
cmd = self.run_salt('-C \'G@os:{0} and not localhost\' -b 25% test.ping'.format(os_grain))
self.assertIn(sub_min_ret, cmd)
self.assertIn(min_ret, cmd)
def test_batch_exit_code(self):
'''
Test that a failed state returns a non-zero exit code in batch mode
'''
cmd = self.run_salt(' "*minion" state.single test.fail_without_changes name=test_me -b 33%', with_retcode=True)
self.assertEqual(cmd[-1], 2)
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.case import ShellCase
class BatchTest(ShellCase):
'''
Integration tests for the salt.cli.batch module
'''
def test_batch_run(self):
'''
Tests executing a simple batch command to help catch regressions
'''
ret = 'Executing run on [u\'sub_minion\']'
cmd = self.run_salt('\'*minion\' test.echo \'batch testing\' -b 50%')
self.assertIn(ret, cmd)
def test_batch_run_number(self):
'''
Tests executing a simple batch command using a number division instead of
a percentage with full batch CLI call.
'''
ret = "Executing run on [u'minion', u'sub_minion']"
cmd = self.run_salt('\'*minion\' test.ping --batch-size 2')
self.assertIn(ret, cmd)
def test_batch_run_grains_targeting(self):
'''
Tests executing a batch command using a percentage divisor as well as grains
targeting.
'''
os_grain = ''
sub_min_ret = "Executing run on [u'sub_minion']"
min_ret = "Executing run on [u'minion']"
for item in self.run_salt('minion grains.get os'):
if item != 'minion':
os_grain = item
os_grain = os_grain.strip()
cmd = self.run_salt('-C \'G@os:{0} and not localhost\' -b 25% test.ping'.format(os_grain))
self.assertIn(sub_min_ret, cmd)
self.assertIn(min_ret, cmd)
def test_batch_exit_code(self):
'''
Test that a failed state returns a non-zero exit code in batch mode
'''
cmd = self.run_salt(' "*minion" state.single test.fail_without_changes name=test_me -b 33%', with_retcode=True)
self.assertEqual(cmd[-1], 2)
|
apache-2.0
|
Python
|
6f391d4113b55f538cfeed26c36b17846c7b758f
|
fix alt-svc test
|
hansroh/skitai,hansroh/skitai,hansroh/skitai
|
tests/level4/test_http3_response.py
|
tests/level4/test_http3_response.py
|
import pytest
import os
import socket
import time
import sys
#@pytest.mark.skip
def test_http2 (launch):
serve = './examples/http3.py'
with launch (serve, port = 30371, quic = 30371, ssl = True) as engine:
resp = engine.http2.get ('/hello?num=1')
assert resp.text == 'hello'
if sys.version_info.major > 3 or (sys.version_info.major == 3 and sys.version_info.minor >= 6):
assert resp.headers ['alt-svc'] == 'h3-23=":30371"; ma=86400'
resp = engine.http2.get ('/hello?num=2')
assert resp.text == 'hello\nhello'
resp = engine.http2.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http2.get ('/lb/project/rs4/')
assert 'pip install rs4' in resp.text
resp = engine.http2.post ('/post', {'username': 'a' * 1000000})
assert len (resp.text) == 1000006
def test_http3 (launch):
if sys.version_info.major == 3 and sys.version_info.minor < 6:
return
serve = './examples/http3.py'
with launch (serve, port = 30371, quic = 30371, ssl = True) as engine:
resp = engine.http3.get ('/hello?num=1')
assert resp.text == 'hello'
resp = engine.http3.get ('/hello?num=2')
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 1})
assert resp.text == 'hello'
resp = engine.http3.get ('/lb/project/rs4/')
assert 'pip install rs4' in resp.text
resp = engine.http3.post ('/post', {'username': 'a' * 1000000})
assert len (resp.text) == 1000006
|
import pytest
import os
import socket
import time
import sys
#@pytest.mark.skip
def test_http2 (launch):
serve = './examples/http3.py'
with launch (serve, port = 30371, quic = 30371, ssl = True) as engine:
resp = engine.http2.get ('/hello?num=1')
assert resp.text == 'hello'
assert 'alt-svc' in resp.headers
assert resp.headers ['alt-svc'] == 'h3-23=":30371"; ma=86400'
resp = engine.http2.get ('/hello?num=2')
assert resp.text == 'hello\nhello'
resp = engine.http2.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http2.get ('/lb/project/rs4/')
assert 'pip install rs4' in resp.text
resp = engine.http2.post ('/post', {'username': 'a' * 1000000})
assert len (resp.text) == 1000006
def test_http3 (launch):
if sys.version_info.major == 3 and sys.version_info.minor < 6:
return
serve = './examples/http3.py'
with launch (serve, port = 30371, quic = 30371, ssl = True) as engine:
resp = engine.http3.get ('/hello?num=1')
assert resp.text == 'hello'
resp = engine.http3.get ('/hello?num=2')
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 2})
assert resp.text == 'hello\nhello'
resp = engine.http3.post ('/hello', {'num': 1})
assert resp.text == 'hello'
resp = engine.http3.get ('/lb/project/rs4/')
assert 'pip install rs4' in resp.text
resp = engine.http3.post ('/post', {'username': 'a' * 1000000})
assert len (resp.text) == 1000006
|
mit
|
Python
|
1e9ebf139ae76eddfe8dd01290e41735e7d1011b
|
Rewrite syntax to be Python 3.5+
|
ipython/ipython,ipython/ipython
|
IPython/utils/tests/test_openpy.py
|
IPython/utils/tests/test_openpy.py
|
import io
import os.path
import nose.tools as nt
from IPython.utils import openpy
mydir = os.path.dirname(__file__)
nonascii_path = os.path.join(mydir, '../../core/tests/nonascii.py')
def test_detect_encoding():
with open(nonascii_path, 'rb') as f:
enc, lines = openpy.detect_encoding(f.readline)
nt.assert_equal(enc, 'iso-8859-5')
def test_read_file():
with io.open(nonascii_path, encoding='iso-8859-5') as f:
read_specified_enc = f.read()
read_detected_enc = openpy.read_py_file(nonascii_path, skip_encoding_cookie=False)
nt.assert_equal(read_detected_enc, read_specified_enc)
assert 'coding: iso-8859-5' in read_detected_enc
read_strip_enc_cookie = openpy.read_py_file(nonascii_path, skip_encoding_cookie=True)
assert 'coding: iso-8859-5' not in read_strip_enc_cookie
def test_source_to_unicode():
with io.open(nonascii_path, 'rb') as f:
source_bytes = f.read()
nt.assert_equal(openpy.source_to_unicode(source_bytes, skip_encoding_cookie=False).splitlines(),
source_bytes.decode('iso-8859-5').splitlines())
source_no_cookie = openpy.source_to_unicode(source_bytes, skip_encoding_cookie=True)
nt.assert_not_in('coding: iso-8859-5', source_no_cookie)
|
import io
import os.path
import nose.tools as nt
from IPython.utils import openpy
mydir = os.path.dirname(__file__)
nonascii_path = os.path.join(mydir, '../../core/tests/nonascii.py')
def test_detect_encoding():
with open(nonascii_path, 'rb') as f:
enc, lines = openpy.detect_encoding(f.readline)
nt.assert_equal(enc, 'iso-8859-5')
def test_read_file():
with io.open(nonascii_path, encoding='iso-8859-5') as f:
read_specified_enc = f.read()
read_detected_enc = openpy.read_py_file(nonascii_path, skip_encoding_cookie=False)
nt.assert_equal(read_detected_enc, read_specified_enc)
assert u'coding: iso-8859-5' in read_detected_enc
read_strip_enc_cookie = openpy.read_py_file(nonascii_path, skip_encoding_cookie=True)
assert u'coding: iso-8859-5' not in read_strip_enc_cookie
def test_source_to_unicode():
with io.open(nonascii_path, 'rb') as f:
source_bytes = f.read()
nt.assert_equal(openpy.source_to_unicode(source_bytes, skip_encoding_cookie=False).splitlines(),
source_bytes.decode('iso-8859-5').splitlines())
source_no_cookie = openpy.source_to_unicode(source_bytes, skip_encoding_cookie=True)
nt.assert_not_in(u'coding: iso-8859-5', source_no_cookie)
|
bsd-3-clause
|
Python
|
64ed1185fca6ba60e06d508ac401f68d5be1ce56
|
bring tests up to #442 change
|
craigds/mapnik2,makinacorpus/mapnik2,makinacorpus/mapnik2,craigds/mapnik2,makinacorpus/mapnik2,craigds/mapnik2,makinacorpus/mapnik2,craigds/mapnik2
|
tests/python_tests/load_map_test.py
|
tests/python_tests/load_map_test.py
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, sys, glob, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
# We expect these files to not raise any
# exceptions at all
def assert_loads_successfully(file):
m = mapnik.Map(512, 512)
strict = True
mapnik.load_map(m, file, strict)
# libxml2 is not smart about paths, and clips the last directory off
# of a path if it does not end in a trailing slash
base_path = os.path.dirname(file) + '/'
mapnik.load_map_from_string(m,open(file,'rb').read(),strict,base_path)
# We expect these files to raise a RuntimeError
# and fail if there isn't one (or a different type
# of exception)
@raises(RuntimeError)
def assert_raises_runtime_error(file):
m = mapnik.Map(512, 512)
strict = True
mapnik.load_map(m, file, strict)
def test_broken_files():
broken_files = glob.glob("../data/broken_maps/*.xml")
# Add a filename that doesn't exist
broken_files.append("../data/broken/does_not_exist.xml")
for file in broken_files:
yield assert_raises_runtime_error, file
def test_good_files():
good_files = glob.glob("../data/good_maps/*.xml")
for file in good_files:
yield assert_loads_successfully, file
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, sys, glob, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
# We expect these files to not raise any
# exceptions at all
def assert_loads_successfully(file):
m = mapnik.Map(512, 512)
strict = True
mapnik.load_map(m, file, strict)
# libxml2 is not smart about paths, and clips the last directory off
# of a path if it does not end in a trailing slash
base_path = os.path.dirname(file) + '/'
mapnik.load_map_from_string(m,open(file,'rb').read(),strict,base_path)
# We expect these files to raise a UserWarning
# and fail if there isn't one (or a different type
# of exception)
@raises(UserWarning)
def assert_raises_userwarning(file):
m = mapnik.Map(512, 512)
strict = True
mapnik.load_map(m, file, strict)
def test_broken_files():
broken_files = glob.glob("../data/broken_maps/*.xml")
# Add a filename that doesn't exist
broken_files.append("../data/broken/does_not_exist.xml")
for file in broken_files:
yield assert_raises_userwarning, file
def test_good_files():
good_files = glob.glob("../data/good_maps/*.xml")
for file in good_files:
yield assert_loads_successfully, file
|
lgpl-2.1
|
Python
|
ad14d77a137c924357bca51d39b91b4d502d2ce6
|
Improve pylint score
|
IndyActuaries/epic-fhir,IndyActuaries/epic-fhir
|
scripts/extract.py
|
scripts/extract.py
|
"""
## CODE OWNERS: Kyle Baird, Shea Parkes
### OWNERS ATTEST TO THE FOLLOWING:
* The `master` branch will meet Milliman QRM standards at all times.
* Deliveries will only be made from code in the `master` branch.
* Review/Collaboration notes will be captured in Pull Requests.
### OBJECTIVE:
Extract data from the EHR to feed the analytics
### DEVELOPER NOTES:
<none>
"""
import csv
from pathlib import Path
import prm_fhir.extractors
PATH_DATA = Path(prm_fhir.extractors.__file__).parents[2] / "data"
#==============================================================================
# LIBRARIES, LOCATIONS, LITERALS, ETC. GO ABOVE HERE
#==============================================================================
if __name__ == "__main__":
URLS = [
"https://open-ic.epic.com/FHIR/api/FHIR/DSTU2",
#"http://134.68.33.32/fhir/",
]
SEARCH_STRUCTS = [
{"family": "Argonaut", "given": "*"},
{"family": "Ragsdale", "given": "*"},
]
PATH_PATIENTS = PATH_DATA / "patients.csv"
with PATH_PATIENTS.open("w", newline="") as patients:
FIELDNAMES = prm_fhir.extractors.extract_patients.fieldnames
WRITER = csv.DictWriter(
patients,
fieldnames=FIELDNAMES,
)
WRITER.writeheader()
for url in URLS:
for search_struct in SEARCH_STRUCTS:
WRITER.writerows(
prm_fhir.extractors.extract_patients(url, search_struct)
)
|
"""
## CODE OWNERS: Kyle Baird, Shea Parkes
### OWNERS ATTEST TO THE FOLLOWING:
* The `master` branch will meet Milliman QRM standards at all times.
* Deliveries will only be made from code in the `master` branch.
* Review/Collaboration notes will be captured in Pull Requests.
### OBJECTIVE:
Extract data from the EHR to feed the analytics
### DEVELOPER NOTES:
<none>
"""
import csv
from pathlib import Path
import prm_fhir.extractors
PATH_DATA = Path(prm_fhir.extractors.__file__).parents[2] / "data"
#==============================================================================
# LIBRARIES, LOCATIONS, LITERALS, ETC. GO ABOVE HERE
#==============================================================================
if __name__ == "__main__":
URLS = [
"https://open-ic.epic.com/FHIR/api/FHIR/DSTU2",
#"http://134.68.33.32/fhir/",
]
SEARCH_STRUCTS = [
{"family": "Argonaut", "given": "*"},
{"family": "Ragsdale", "given": "*"},
]
PATH_PATIENTS = PATH_DATA / "patients.csv"
with PATH_PATIENTS.open("w", newline="") as patients:
fieldnames = prm_fhir.extractors.extract_patients.fieldnames
writer = csv.DictWriter(
patients,
fieldnames=fieldnames,
)
writer.writeheader()
for url in URLS:
for search_struct in SEARCH_STRUCTS:
writer.writerows(prm_fhir.extractors.extract_patients(url, search_struct))
|
mit
|
Python
|
f276d6fdb412b8ad93de8ba6d921d29a57710077
|
Update usage message
|
SB-Technology-Holdings-International/WateringWebClient,SB-Technology-Holdings-International/WateringWebClient,SB-Technology-Holdings-International/Water,SB-Technology-Holdings-International/Water,SB-Technology-Holdings-International/WateringWebClient,SB-Technology-Holdings-International/Water,SB-Technology-Holdings-International/Water,SB-Technology-Holdings-International/WateringWebClient,SB-Technology-Holdings-International/Water
|
server/messages.py
|
server/messages.py
|
'''Endpoints messages.'''
from protorpc import messages
class Status(messages.Enum):
OK = 1
MISSING_DATA = 2
EXISTS = 3
BAD_DATA = 4
ERROR = 5
NO_DEVICE = 6
class DataMessage(messages.Message):
device_id = messages.StringField(1)
status = messages.EnumField(Status, 2)
class StatusResponse(messages.Message):
status = messages.EnumField(Status, 1)
class ScheduledWater(messages.Message):
'''Request to add to watering schedule'''
valve = messages.IntegerField(1)
start_time = messages.IntegerField(2)
duration_seconds = messages.IntegerField(3)
status = messages.EnumField(Status, 4)
class ScheduleResponse(messages.Message):
status = messages.EnumField(Status, 1)
schedule = messages.MessageField(ScheduledWater, 2, repeated=True)
class UsageResponse(messages.Message):
usage = messages.IntegerField(1, repeated=True)
datapoint_num = messages.IntegerField(2)
datapoint_freq = messages.EnumField()
class Frequency(messages.Enum):
DAY = 1
WEEK = 2
MONTH = 3
class SetupRequest(messages.Message):
device_id = messages.StringField(1)
lat = messages.FloatField(2)
lng = messages.FloatField(3)
class Valve(messages.Message):
number = messages.IntegerField(1)
name = messages.StringField(2, required=False)
device_id = messages.StringField(3)
status = messages.EnumField(Status, 4)
start_time = messages.IntegerField(5)
duration_seconds = messages.IntegerField(6)
crop_id = messages.IntegerField(7)
class ValveDataResponse(messages.Message):
valves = messages.MessageField(Valve, 1, repeated=True)
status = messages.EnumField(Status, 2)
class ScheduleAdd(messages.Message):
device_id = messages.StringField(1)
valve = messages.IntegerField(2)
seconds_per_day = messages.IntegerField(3)
crop_id = messages.IntegerField(4)
start_time = messages.IntegerField(5)
|
'''Endpoints messages.'''
from protorpc import messages
class Status(messages.Enum):
OK = 1
MISSING_DATA = 2
EXISTS = 3
BAD_DATA = 4
ERROR = 5
NO_DEVICE = 6
class DataMessage(messages.Message):
device_id = messages.StringField(1)
status = messages.EnumField(Status, 2)
class StatusResponse(messages.Message):
status = messages.EnumField(Status, 1)
class ScheduledWater(messages.Message):
'''Request to add to watering schedule'''
valve = messages.IntegerField(1)
start_time = messages.IntegerField(2)
duration_seconds = messages.IntegerField(3)
status = messages.EnumField(Status, 4)
class ScheduleResponse(messages.Message):
status = messages.EnumField(Status, 1)
schedule = messages.MessageField(ScheduledWater, 2, repeated=True)
class UsageResponse(messages.Message):
usage = messages.StringField(1)
class SetupRequest(messages.Message):
device_id = messages.StringField(1)
lat = messages.FloatField(2)
lng = messages.FloatField(3)
class Valve(messages.Message):
number = messages.IntegerField(1)
name = messages.StringField(2, required=False)
device_id = messages.StringField(3)
status = messages.EnumField(Status, 4)
start_time = messages.IntegerField(5)
duration_seconds = messages.IntegerField(6)
crop_id = messages.IntegerField(7)
class ValveDataResponse(messages.Message):
valves = messages.MessageField(Valve, 1, repeated=True)
status = messages.EnumField(Status, 2)
class ScheduleAdd(messages.Message):
device_id = messages.StringField(1)
valve = messages.IntegerField(2)
seconds_per_day = messages.IntegerField(3)
crop_id = messages.IntegerField(4)
start_time = messages.IntegerField(5)
|
bsd-3-clause
|
Python
|
917dde63ece9e552427487c7639be64e1b113d3d
|
Update zibra download fields.
|
blab/nextstrain-db,nextstrain/fauna,blab/nextstrain-db,nextstrain/fauna
|
vdb/zibra_download.py
|
vdb/zibra_download.py
|
import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from download import download
from download import parser
class zibra_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
self.virus_specific_fasta_fields = []
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = ['strain', 'amplicon_concentration', 'ct', 'date', 'division', 'location', 'microcephaly',
'onset_date', 'patient_age', 'patient_sex', 'rt_positive', 'sample_type']
setattr(args, 'fasta_fields', fasta_fields)
connVDB = zibra_download(**args.__dict__)
connVDB.download(**args.__dict__)
|
import os, re, time, datetime, csv, sys
import rethinkdb as r
from Bio import SeqIO
from download import download
from download import parser
class zibra_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
self.virus_specific_fasta_fields = []
if __name__=="__main__":
args = parser.parse_args()
fasta_fields = ['strain', 'amplicon_concentration', 'citation', 'ct', 'country', 'date', 'division', 'location',
'onset_date', 'patient_age', 'patient_sex', 'public', 'region', 'rt_positive', 'timestamp', 'virus']
setattr(args, 'fasta_fields', fasta_fields)
connVDB = zibra_download(**args.__dict__)
connVDB.download(**args.__dict__)
|
agpl-3.0
|
Python
|
749441ed678f69ba813b3af74454af5b1e855482
|
Refactor and cleanup model mixins.
|
openbudgets/openbudgets,moshe742/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,moshe742/openbudgets,shaib/openbudgets,pwalsh/openbudgets,shaib/openbudgets
|
openbudget/commons/mixins/models.py
|
openbudget/commons/mixins/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from uuidfield import UUIDField
from openbudget.settings import base as settings
class ClassMethodMixin(object):
"""A mixin for commonly used classmethods on models."""
@classmethod
def get_class_name(cls):
value = cls.__name__.lower()
return value
class TimeStampedModel(models.Model):
"""A mixin to add timestamps to models that inherit it."""
created_on = models.DateTimeField(
_('Created on'),
db_index=True,
auto_now_add=True,
editable=False
)
last_modified = models.DateTimeField(
_('Last modified'),
db_index=True,
auto_now=True,
editable=False
)
class Meta:
abstract = True
class UUIDModel(models.Model):
"""A mixin to add UUIDs to models that inherit it."""
uuid = UUIDField(
db_index=True,
auto=True
)
class Meta:
abstract = True
class PeriodStartModel(models.Model):
"""A mixin to add a period_start field to models that inherit it.
The primary use of this mixin is for model objects with data that applies
over a period of time, and where the applicable period is only determined by
the presence of another object with a future-dated value for period_start.
Example: CBS data (valid until the next dataset), official budget template for
municipalities in Israel (valid until a new template will come to
replace/extend the existing one.)
"""
period_start = models.DateField(
_('Period start'),
db_index=True,
null=True,
blank=True,
help_text=_('The start date for this %(class)s'),
)
class Meta:
abstract = True
# TODO: Implement a period method here.
# However, we do not currently have a use case for it.
class PeriodicModel(PeriodStartModel):
"""A mixin to add a defined period of validity to models that inherit it."""
period_end = models.DateField(
_('Period end'),
db_index=True,
null=True,
blank=True,
help_text=_('The end date for this %(class)s')
)
@property
def period(self):
"""Get the applicable period for this object."""
# TODO: Support ranges other than yearly, including multiple ranges.
value = None
ranges = settings.OPENBUDGET_PERIOD_RANGES
if len(ranges) == 1 and 'yearly' in ranges:
value = self.period_start.year
else:
# TODO: Verify - in the current codebase, we should never get here.
pass
return value
class Meta:
abstract = True
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from uuidfield import UUIDField
class ClassMethodMixin(object):
"""Mixin for commonly used class methods on models"""
@classmethod
def get_class_name(cls):
value = cls.__name__.lower()
return value
class TimeStampedModel(models.Model):
"""A simple mixin to timestamp models that inherit from it"""
class Meta:
abstract = True
created_on = models.DateTimeField(
_('Created on'),
auto_now_add=True,
editable=False
)
last_modified = models.DateTimeField(
_('Last modified'),
db_index=True,
auto_now=True,
editable=False
)
class UUIDModel(models.Model):
"""A simple mixin to add a uuid to models that inherit from it"""
class Meta:
abstract = True
uuid = UUIDField(
db_index=True,
auto=True
)
class PeriodStartModel(models.Model):
class Meta:
abstract = True
period_start = models.DateField(
_('Period start'),
help_text=_('The start date for this %(class)s'),
null=True,
blank=True
)
#TODO: Move period method here
# if hass attr period_end, elif get other models in future from period_start, else period is til now.
# then this method will work with classes that subclass this class.
class PeriodicModel(PeriodStartModel):
class Meta:
abstract = True
period_end = models.DateField(
_('Period end'),
help_text=_('The end date for this %(class)s')
)
#TODO: implement a shortcut from period_start/end to year
@property
def period(self):
# TODO: Write a smarter method for the general use case
# naive, just for current purposes
tmp = self.period_end - self.period_start
if tmp.days <= 365:
return self.period_start.year
else:
return unicode(self.period_start.year) + ' - ' + self.period_end.year
|
bsd-3-clause
|
Python
|
bf90f726da9954edb69f4c0cb29206ff82444d63
|
Add custom admin classes
|
recipi/recipi,recipi/recipi,recipi/recipi
|
src/recipi/food/admin.py
|
src/recipi/food/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from recipi.food.models import (
FoodGroup, Food, Language, LanguageDescription, Nutrient,
Weight, Footnote)
class FoodGroupAdmin(admin.ModelAdmin):
pass
class FoodAdmin(admin.ModelAdmin):
pass
class LanguageAdmin(admin.ModelAdmin):
pass
class LanguageDescriptionAdmin(admin.ModelAdmin):
pass
class NutrientAdmin(admin.ModelAdmin):
pass
class WeightAdmin(admin.ModelAdmin):
pass
class FootnoteAdmin(admin.ModelAdmin):
pass
admin.site.register(FoodGroup, FoodGroupAdmin)
admin.site.register(Food, FoodAdmin)
admin.site.register(Language, LanguageAdmin)
admin.site.register(LanguageDescription, LanguageDescriptionAdmin)
admin.site.register(Nutrient, NutrientAdmin)
admin.site.register(Weight, WeightAdmin)
admin.site.register(Footnote, FootnoteAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from recipi.food.models import (
FoodGroup, Food, Language, LanguageDescription, Nutrient,
Weight, Footnote)
admin.site.register(FoodGroup)
admin.site.register(Food)
admin.site.register(Language)
admin.site.register(LanguageDescription)
admin.site.register(Nutrient)
admin.site.register(Weight)
admin.site.register(Footnote)
|
isc
|
Python
|
cf7f5dc359bb49743750c9ace6c317092b275653
|
remove the use of refine_results because it is changed to private method
|
tumluliu/mmrp-jsonrpc,tumluliu/mmrp-jsonrpc
|
mmrp.py
|
mmrp.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging.config
import json
LOGGING_CONF_FILE = 'logging.json'
DEFAULT_LOGGING_LVL = logging.INFO
path = LOGGING_CONF_FILE
value = os.getenv('LOG_CFG', None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
from flask import Flask
PROJECT_DIR, PROJECT_MODULE_NAME = os.path.split(
os.path.dirname(os.path.realpath(__file__))
)
FLASK_JSONRPC_PROJECT_DIR = os.path.join(PROJECT_DIR, os.pardir)
if os.path.exists(FLASK_JSONRPC_PROJECT_DIR) \
and FLASK_JSONRPC_PROJECT_DIR not in sys.path:
sys.path.append(FLASK_JSONRPC_PROJECT_DIR)
from flask_cors import CORS
from flask_jsonrpc import JSONRPC
from pymmrouting.routeplanner import MultimodalRoutePlanner
from pymmrouting.inferenceengine import RoutingPlanInferer
app = Flask(__name__)
cors = CORS(app)
jsonrpc = JSONRPC(app, '/api', enable_web_browsable_api=True)
@jsonrpc.method('mmrp.index')
def index():
return u'Welcome using Multimodal Route Planner (mmrp) JSON-RPC API'
@jsonrpc.method('mmrp.echo')
def echo(input):
logger.debug("input value: %s", input)
return u'Receive {0}'.format(input)
@jsonrpc.method('mmrp.findMultimodalPaths')
def find_multimodal_paths(options):
inferer = RoutingPlanInferer()
inferer.load_routing_options(options)
plans = inferer.generate_routing_plan()
planner = MultimodalRoutePlanner()
results = planner.batch_find_path(plans)
return results
@jsonrpc.method('mmrp.fails')
def fails(string):
raise ValueError
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging.config
import json
LOGGING_CONF_FILE = 'logging.json'
DEFAULT_LOGGING_LVL = logging.INFO
path = LOGGING_CONF_FILE
value = os.getenv('LOG_CFG', None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
from flask import Flask
PROJECT_DIR, PROJECT_MODULE_NAME = os.path.split(
os.path.dirname(os.path.realpath(__file__))
)
FLASK_JSONRPC_PROJECT_DIR = os.path.join(PROJECT_DIR, os.pardir)
if os.path.exists(FLASK_JSONRPC_PROJECT_DIR) \
and FLASK_JSONRPC_PROJECT_DIR not in sys.path:
sys.path.append(FLASK_JSONRPC_PROJECT_DIR)
from flask_cors import CORS
from flask_jsonrpc import JSONRPC
from pymmrouting.routeplanner import MultimodalRoutePlanner
from pymmrouting.inferenceengine import RoutingPlanInferer
app = Flask(__name__)
cors = CORS(app)
jsonrpc = JSONRPC(app, '/api', enable_web_browsable_api=True)
@jsonrpc.method('mmrp.index')
def index():
return u'Welcome using Multimodal Route Planner (mmrp) JSON-RPC API'
@jsonrpc.method('mmrp.echo')
def echo(input):
logger.debug("input value: %s", input)
return u'Receive {0}'.format(input)
@jsonrpc.method('mmrp.findMultimodalPaths')
def find_multimodal_paths(options):
inferer = RoutingPlanInferer()
inferer.load_routing_options(options)
plans = inferer.generate_routing_plan()
planner = MultimodalRoutePlanner()
rough_results = planner.batch_find_path(plans)
results = planner.refine_results(rough_results)
return results
@jsonrpc.method('mmrp.fails')
def fails(string):
raise ValueError
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
mit
|
Python
|
8e6662a4aaf654ddf18c1c4e733c58db5b9b5579
|
Add cache in opps menu list via context processors
|
YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps
|
opps/channels/context_processors.py
|
opps/channels/context_processors.py
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from django.core.cache import cache
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = cache.get('opps_menu')
if not opps_menu:
opps_menu = [channel for channel in Channel.objects.filter(
site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).distinct().order_by('order')]
cache.set('opps_menu', opps_menu, settings.OPPS_CACHE_EXPIRE)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF,
'site': site}
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF,
'site': site}
|
mit
|
Python
|
621565e0daa4e06ff6a67f985af124fa7f101d77
|
Refactor dbaas test helpers
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
dbaas/dbaas/tests/helpers.py
|
dbaas/dbaas/tests/helpers.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from physical.tests.factory import InstanceFactory
class UsedAndTotalValidator(object):
@staticmethod
def assertEqual(a, b):
assert a == b, "{} NOT EQUAL {}".format(a, b)
@classmethod
def instances_sizes(cls, instances=None, expected_used_size=40, expected_total_size=90):
for instance in instances:
cls.assertEqual(instance.used_size_in_bytes, expected_used_size)
cls.assertEqual(instance.total_size_in_bytes, expected_total_size)
class InstanceHelper(object):
model = InstanceFactory.FACTORY_FOR
quantity_of_masters = 1
@classmethod
def kill_instances(cls, instances):
for instance in instances:
instance.status = cls.model.DEAD
instance.save()
@staticmethod
def change_instances_type(instances, instance_type):
for instance in instances:
instance.instance_type = instance_type
instance.save()
@staticmethod
def check_instance_is_master(instance):
"""
Method for mock the real check_instance_is_master.
This method return master if the last digit minus 1 of address
is divisible by 2
Ex. Address = '127.0.0.1' the last char is 1. Now subtract 1 and we
have 0. Now check if 0 is divisible by 2. This case return True
Ex. Address = '127.0.0.2' the last char is 2. Now subtract 1 and we
have 1. Now check if 1 is divisible by 2. This case return False
Ex. Address = '127.0.0.3' the last char is 3. Now subtract 1 and we
have 2. Now check if 2 is divisible by 2. This case return True
"""
quantity_of_masters = instance.databaseinfra.instances.count() / 2
return instance.id in (instance.databaseinfra.instances.values_list(
'id', flat=True)[quantity_of_masters:])
@staticmethod
def create_instances_by_quant(infra, port=3306, qt=1, total_size_in_bytes=50,
used_size_in_bytes=25, instance_type=1,
base_address='127'):
"""
Helper create instances by quantity
"""
def _create(n):
return InstanceFactory(
databaseinfra=infra,
address='{0}.7{1}.{2}.{2}'.format(base_address, infra.id, n), port=port,
instance_type=instance_type,
total_size_in_bytes=total_size_in_bytes,
used_size_in_bytes=used_size_in_bytes
)
return map(_create, range(1, qt + 1))
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from physical.tests import factory as factory_physical
class InstanceHelper(object):
@staticmethod
def check_instance_is_master(instance):
"""
Method for mock the real check_instance_is_master.
This method return master if the last digit minus 1 of address
is divisible by 2
Ex. Address = '127.0.0.1' the last char is 1. Now subtract 1 and we
have 0. Now check if 0 is divisible by 2. This case return True
Ex. Address = '127.0.0.2' the last char is 2. Now subtract 1 and we
have 1. Now check if 1 is divisible by 2. This case return False
Ex. Address = '127.0.0.3' the last char is 3. Now subtract 1 and we
have 2. Now check if 2 is divisible by 2. This case return True
"""
n = int(instance.address.split('.')[-1]) - 1
return n % 2 == 0
@staticmethod
def create_instances_by_quant(infra, port=3306, qt=1, total_size_in_bytes=50,
used_size_in_bytes=25, instance_type=1):
"""
Helper create instances by quantity
"""
def _create(n):
return factory_physical.InstanceFactory(
databaseinfra=infra,
address='127.7{0}.{1}.{1}'.format(infra.id, n), port=port,
instance_type=instance_type,
total_size_in_bytes=total_size_in_bytes,
used_size_in_bytes=used_size_in_bytes
)
return map(_create, range(1, qt + 1))
|
bsd-3-clause
|
Python
|
56aa00210b5adb663abea62ecd297f094dcbfeb0
|
remove prodigal from the subcommand module
|
widdowquinn/find_differential_primers,widdowquinn/find_differential_primers
|
diagnostic_primers/scripts/subcommands/__init__.py
|
diagnostic_primers/scripts/subcommands/__init__.py
|
# -*- coding: utf-8 -*-
"""Module providing subcommands for pdp."""
from .subcmd_config import subcmd_config
from .subcmd_filter import subcmd_filter
from .subcmd_eprimer3 import subcmd_eprimer3
from .subcmd_primersearch import subcmd_primersearch
from .subcmd_dedupe import subcmd_dedupe
from .subcmd_blastscreen import subcmd_blastscreen
from .subcmd_classify import subcmd_classify
from .subcmd_extract import subcmd_extract
from .subcmd_plot import subcmd_plot
|
# -*- coding: utf-8 -*-
"""Module providing subcommands for pdp."""
from .subcmd_config import subcmd_config
from .subcmd_prodigal import subcmd_prodigal
from .subcmd_filter import subcmd_filter
from .subcmd_eprimer3 import subcmd_eprimer3
from .subcmd_primersearch import subcmd_primersearch
from .subcmd_dedupe import subcmd_dedupe
from .subcmd_blastscreen import subcmd_blastscreen
from .subcmd_classify import subcmd_classify
from .subcmd_extract import subcmd_extract
from .subcmd_plot import subcmd_plot
|
mit
|
Python
|
f95cc3e8657ec3c03dd828bf0462df10b2897a5b
|
adjust host
|
geomin/djangovpshosting,geomin/djangovpshosting,geomin/djangovpshosting
|
djangovpshosting/djangovpshosting/settings_prod.py
|
djangovpshosting/djangovpshosting/settings_prod.py
|
from settings import *
DEBUG = False
ALLOWED_HOSTS = ['djangovpshosting.com']
|
from settings import *
DEBUG = False
ALLOWED_HOSTS = ['162.249.2.222']
|
mit
|
Python
|
ebb7f4ca18e099fb2902fa66cbb68c29baa98917
|
fix download_chromedriver.py to return fast when file exists
|
kboard/kboard,kboard/kboard,cjh5414/kboard,guswnsxodlf/k-board,guswnsxodlf/k-board,guswnsxodlf/k-board,cjh5414/kboard,hyesun03/k-board,cjh5414/kboard,darjeeling/k-board,kboard/kboard,hyesun03/k-board,hyesun03/k-board
|
dev/download_chromedriver.py
|
dev/download_chromedriver.py
|
#!/usr/bin/env python
import os, stat
import requests
import zipfile
DESTINATION_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'files')
DOWNLOAD_URL = "http://chromedriver.storage.googleapis.com"
MAC_DRIVER_NAME = 'chromedriver_mac64.zip'
if not os.path.exists(DESTINATION_DIR):
os.mkdir(DESTINATION_DIR)
def get_chromedriver_path():
destination_unzip_path = os.path.join(DESTINATION_DIR, 'chromedriver')
if not os.path.exists(destination_unzip_path):
return False
return destination_unzip_path
def get_chromedriver_latest_version():
url = DOWNLOAD_URL + '/LATEST_RELEASE'
return str(requests.get(url).content.strip()).replace("'", '')[1:]
def download(version='LATEST'):
destination_file_path = os.path.join(DESTINATION_DIR, MAC_DRIVER_NAME)
destination_unzip_path = os.path.join(DESTINATION_DIR, 'chromedriver')
if os.path.exists(destination_unzip_path):
return "{} driver exists".format(destination_unzip_path)
if version == 'LATEST':
download_version = get_chromedriver_latest_version()
else:
download_version = version
latest_path = "%s/%s/%s" % (DOWNLOAD_URL,
download_version, MAC_DRIVER_NAME)
with open(destination_file_path, 'wb') as f:
for chunk in requests.get(latest_path, stream=True).iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
with zipfile.ZipFile(destination_file_path, 'r') as f:
with open(destination_unzip_path, 'wb') as d:
d.write(f.read('chromedriver'))
st = os.stat(destination_unzip_path)
os.chmod(destination_unzip_path, (st.st_mode | stat.S_IEXEC))
return destination_unzip_path
if __name__ == '__main__':
print(download())
|
#!/usr/bin/env python
import os, stat
import requests
import zipfile
DESTINATION_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'files')
DOWNLOAD_URL = "http://chromedriver.storage.googleapis.com"
MAC_DRIVER_NAME = 'chromedriver_mac64.zip'
if not os.path.exists(DESTINATION_DIR):
os.mkdir(DESTINATION_DIR)
def get_chromedriver_path():
destination_unzip_path = os.path.join(DESTINATION_DIR, 'chromedriver')
if not os.path.exists(destination_unzip_path):
return False
return destination_unzip_path
def get_chromedriver_latest_version():
url = DOWNLOAD_URL + '/LATEST_RELEASE'
return str(requests.get(url).content.strip()).replace("'", '')[1:]
def download(version='LATEST'):
if version == 'LATEST':
download_version = get_chromedriver_latest_version()
else:
download_version = version
latest_path = "%s/%s/%s" % (DOWNLOAD_URL,
download_version, MAC_DRIVER_NAME)
destination_file_path = os.path.join(DESTINATION_DIR, MAC_DRIVER_NAME)
destination_unzip_path = os.path.join(DESTINATION_DIR, 'chromedriver')
with open(destination_file_path, 'wb') as f:
for chunk in requests.get(latest_path, stream=True).iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
with zipfile.ZipFile(destination_file_path, 'r') as f:
with open(destination_unzip_path, 'wb') as d:
d.write(f.read('chromedriver'))
st = os.stat(destination_unzip_path)
os.chmod(destination_unzip_path, (st.st_mode | stat.S_IEXEC))
return destination_unzip_path
if __name__ == '__main__':
print(download())
|
mit
|
Python
|
f9b38aa0f38e86a718d851057c26f945e6b872a9
|
Update BatteryAlarm.py
|
jaimeandrescatano/ekorre,jaimeandrescatano/ekorre,jaimeandrescatano/ekorre,jaimeandrescatano/ekorre
|
20140707-ProgramaDeAlertaBateria/BatteryAlarm.py
|
20140707-ProgramaDeAlertaBateria/BatteryAlarm.py
|
#!usr/bin/env python
#coding=utf-8
# Es necesario editar
# sudo vim /etc/crontab
# Edicionar: */15 * * * * root python /JAIMEANDRES/ArchivosSistema/BatteryAlarm.py
#
# Reiniciar servicio de cron: sudo service cron stop / start
#
# Este archivo requiere tener en su misma carpeta el archivo
# ReproductorDeSonidos.py
#
# Para generar los archivos de sonido .WAV usar:
# espeak -s 150 -v es-la -w BateriaCargada.wav "La bateria esta cargada, por favor desconectar el cargador."
#
# O tambien usar:
# echo The battery is at 15% of charge, please plug the charger now. | text2wave >job.wav
# Y para abrir el archivo .wav desde la consola:
# aplay job.wav
# Leo el archivo que almacena el valor actual de carga en la bateria
with open("/sys/class/power_supply/BAT0/capacity") as f:
content = f.readlines()
# Convierto el texto a entero
valor = int(content[0])
#~ print valor
# Cierro el archivo
f.close()
# Obtengo el estado actual
with open("/sys/class/power_supply/BAT0/status") as f:
content = f.readlines()
estado = str(content[0])
# Cierro el archivo
f.close()
# Quito caracteres adicionales de la variable "estado"
estado = estado.replace(" ", "")
estado = estado.replace("\r", "")
estado = estado.replace("\n", "")
from ReproductorDeSonidos import ReproductorWAV
# Creo las funciones de reproduccion de sonido
def BateriaCargada():
Rep = ReproductorWAV("BateriaCargada.wav")
Rep.Reproducir()
def BateriaDescargada():
Rep = ReproductorWAV("BatteryAlert.wav")
Rep.Reproducir()
# Analizo el estado de la bateria
if valor > 95:
if estado != "Discharging":
BateriaCargada()
if valor <= 15:
if estado == "Discharging":
BateriaDescargada()
|
#!usr/bin/env python
#coding=utf-8
# Es necesario editar
# sudo vim /etc/crontab
# Edicionar: */15 * * * * root python /JAIMEANDRES/ArchivosSistema/BatteryAlarm.py
#
# Reiniciar servicio de cron: sudo service cron stop / start
#
# Este archivo requiere tener en su misma carpeta el archivo
# ReproductorDeSonidos.py
#
# Para generar los archivos de sonido .WAV usar:
# espeak -s 150 -v es-la -w BateriaCargada.wav "La bateria esta cargada, por favor desconectar el cargador."
#
# O tambien usar:
# echo The battery is at 15% of charge, please plug the charger now. | text2wave >job.wav
# Y para abrir el archivo .wav desde la consola:
# aplay job.wav
# Leo el archivo que almacena el valor actual de carga en la bateria
with open("/sys/class/power_supply/BAT0/capacity") as f:
content = f.readlines()
# Convierto el texto a entero
valor = int(content[0])
#~ print valor
# Cierro el archivo
f.close()
# Obtengo el estado actual
with open("/sys/class/power_supply/BAT0/status") as f:
content = f.readlines()
estado = str(content[0])
# Cierro el archivo
f.close()
# Quito caracteres adicionales de la variable "estado"
estado = estado.replace(" ", "")
estado = estado.replace("\r", "")
estado = estado.replace("\n", "")
from ReproductorDeSonidos import ReproductorWAV
# Creo las funciones de reproduccion de sonido
def BateriaCargada():
Rep = ReproductorWAV("/JAIMEANDRES/ArchivosSistema/BateriaCargada.wav")
Rep.Reproducir()
def BateriaDescargada():
Rep = ReproductorWAV("/JAIMEANDRES/ArchivosSistema/BatteryAlert.wav")
Rep.Reproducir()
# Analizo el estado de la bateria
if valor > 95:
if estado != "Discharging":
BateriaCargada()
if valor <= 15:
if estado == "Discharging":
BateriaDescargada()
|
mit
|
Python
|
1256f695a441049438565285f48c9119e5211cf5
|
Enable follow redirection.
|
wildone/pyaem,Sensis/pyaem
|
pyaem/bagofrequests.py
|
pyaem/bagofrequests.py
|
import cStringIO
from handlers import unexpected as handle_unexpected
import pycurl
import requests
import urllib
def request(method, url, params, handlers, **kwargs):
curl = pycurl.Curl()
body_io = cStringIO.StringIO()
if method == 'post':
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(params))
else:
url = '{0}?{1}'.format(url, urllib.urlencode(params))
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.WRITEFUNCTION, body_io.write)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE),
'body' : body_io.getvalue()
}
curl.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
def download_file(url, params, handlers, **kwargs):
curl = pycurl.Curl()
url = '{0}?{1}'.format(url, urllib.urlencode(params))
file = open(kwargs['file_name'], 'wb')
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.WRITEDATA, file)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE)
}
curl.close()
file.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
def upload_file(url, params, handlers, **kwargs):
curl = pycurl.Curl()
body_io = cStringIO.StringIO()
_params = []
for key, value in params.iteritems():
_params.append((key, value))
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.HTTPPOST, _params)
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.FOLLOWLOCATION, 1)
curl.setopt(pycurl.WRITEFUNCTION, body_io.write)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE),
'body' : body_io.getvalue()
}
curl.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
|
import cStringIO
from handlers import unexpected as handle_unexpected
import pycurl
import requests
import urllib
def request(method, url, params, handlers, **kwargs):
curl = pycurl.Curl()
body_io = cStringIO.StringIO()
if method == 'post':
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.POSTFIELDS, urllib.urlencode(params))
else:
url = '{0}?{1}'.format(url, urllib.urlencode(params))
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.WRITEFUNCTION, body_io.write)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE),
'body' : body_io.getvalue()
}
curl.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
def download_file(url, params, handlers, **kwargs):
curl = pycurl.Curl()
url = '{0}?{1}'.format(url, urllib.urlencode(params))
file = open(kwargs['file_name'], 'wb')
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.WRITEDATA, file)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE)
}
curl.close()
file.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
def upload_file(url, params, handlers, **kwargs):
curl = pycurl.Curl()
body_io = cStringIO.StringIO()
_params = []
for key, value in params.iteritems():
_params.append((key, value))
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.HTTPPOST, _params)
curl.setopt(pycurl.URL, url)
curl.setopt(pycurl.WRITEFUNCTION, body_io.write)
curl.perform()
response = {
'http_code': curl.getinfo(pycurl.HTTP_CODE),
'body' : body_io.getvalue()
}
curl.close()
if response['http_code'] in handlers:
return handlers[response['http_code']](response, **kwargs)
else:
handle_unexpected(response, **kwargs)
|
mit
|
Python
|
7f7f32d032c68197b2152eeb8d9189f3d1493b57
|
Bump version number for development
|
MAndelkovic/pybinding,MAndelkovic/pybinding,dean0x7d/pybinding,MAndelkovic/pybinding,dean0x7d/pybinding,dean0x7d/pybinding
|
pybinding/__about__.py
|
pybinding/__about__.py
|
"""Package for numerical tight-binding calculations in solid state physics"""
__title__ = "pybinding"
__version__ = "0.9.0.dev"
__summary__ = "Package for tight-binding calculations"
__url__ = "https://github.com/dean0x7d/pybinding"
__author__ = "Dean Moldovan"
__copyright__ = "2015-2016, " + __author__
__email__ = "[email protected]"
__license__ = "BSD"
|
"""Package for numerical tight-binding calculations in solid state physics"""
__title__ = "pybinding"
__version__ = "0.8.1"
__summary__ = "Package for tight-binding calculations"
__url__ = "https://github.com/dean0x7d/pybinding"
__author__ = "Dean Moldovan"
__copyright__ = "2015-2016, " + __author__
__email__ = "[email protected]"
__license__ = "BSD"
|
bsd-2-clause
|
Python
|
771daafda877050c8fe23b034a0c51ec97502715
|
update code which generates list of possible article names
|
ClintonMorrison/personal-website,ClintonMorrison/personal-website,ClintonMorrison/personal-website,ClintonMorrison/personal-website,ClintonMorrison/personal-website,ClintonMorrison/personal-website
|
pages/controllers/blog_article.py
|
pages/controllers/blog_article.py
|
from core import database as database
from core.exceptions import NotFoundError, ServerError
from core.markdown import MarkdownParser
from core.article_helpers import get_article, get_all_articles
import core.functions
import yaml
def get_page_data(path, get, post, variables):
article = get_article(get.get('name', ''))
if not article:
raise NotFoundError("No article with name: '{}'".format(get.get('name', '')))
markdownParser = MarkdownParser('blog/%s/' % (article.get('name')))
raw_articule = article['body']
article['body'] = markdownParser.render(article['body'])
return {
'article': article,
'title': article.get('title', ''),
'raw_article': raw_articule
}
def get_possible_paths():
articles = get_all_articles()
queries = []
for article in articles:
queries.append('blog/%s' % article.get('name'))
return queries
|
from core import database as database
from core.exceptions import NotFoundError, ServerError
from core.markdown import MarkdownParser
from core.article_helpers import get_article
import core.functions
import yaml
def get_page_data(path, get, post, variables):
article = get_article(get.get('name', ''))
if not article:
raise NotFoundError("No article with name: '{}'".format(get.get('name', '')))
markdownParser = MarkdownParser('blog/%s/' % (article.get('name')))
raw_articule = article['body']
article['body'] = markdownParser.render(article['body'])
return {
'article': article,
'title': article.get('title', ''),
'raw_article': raw_articule
}
def get_possible_paths():
articles = database.Table('article').filter()
queries = []
for article in articles:
queries.append('blog/%s' % article.get('name'))
return queries
|
apache-2.0
|
Python
|
7060f48df582dcfae1768cc37d00a25e0e2e1f6f
|
Comment post endpoint return a ksopn, fix issue saving comments add post id and convert it to int
|
oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog
|
app/views/comment_view.py
|
app/views/comment_view.py
|
from flask import jsonify
from flask_classy import FlaskView
from flask_user import current_user, login_required
from ..models import CommentModel, PostModel
from ..forms import CommentForm
class Comment(FlaskView):
def get(self):
pass
def all(self, post_id):
comment = CommentModel()
comment.query.add_filter('post_id', '=', int(post_id))
return jsonify(comment.fetch())
@login_required
def post(self, post_id):
form = CommentForm()
if form.validate_on_submit():
post = PostModel().get(post_id)
post = PostModel(**post)
comment = CommentModel(user=current_user.username,
post_id=int(post_id),
**form.data)
comment.put()
post.add_comment(comment.id)
return jsonify(comment.data)
return "form.errors"
|
from flask import jsonify
from flask_classy import FlaskView
from flask_user import current_user, login_required
from ..models import CommentModel, PostModel
from ..forms import CommentForm
class Comment(FlaskView):
def get(self):
pass
def all(self, post_id):
comment = CommentModel()
comment.query.add_filter('post_id', '=', int(post_id))
return jsonify(comment.fetch())
@login_required
def post(self, post_id):
form = CommentForm()
if form.validate_on_submit():
post = PostModel().get(post_id)
post = PostModel(**post)
comment = CommentModel(user=current_user.username, **form.data)
comment.put()
post.add_comment(comment.id)
return "ALEYUYA"
return "form.errors"
|
mit
|
Python
|
c1044e25e18afd78b3fda8fd9b00a4f67cfbbc65
|
allow markdownlint to be disabled for specific lines (#4)
|
jorisroovers/pymarkdownlint,jorisroovers/pymarkdownlint
|
pymarkdownlint/lint.py
|
pymarkdownlint/lint.py
|
from __future__ import print_function
from pymarkdownlint import rules
class MarkdownLinter(object):
def __init__(self, config):
self.config = config
@property
def line_rules(self):
return [rule for rule in self.config.rules if isinstance(rule, rules.LineRule)]
def _apply_line_rules(self, markdown_string):
""" Iterates over the lines in a given markdown string and applies all the enabled line rules to each line """
all_violations = []
lines = markdown_string.split("\n")
line_rules = self.line_rules
line_nr = 1
ignoring = False
for line in lines:
if ignoring:
if line.strip() == '<!-- markdownlint:enable -->':
ignoring = False
else:
if line.strip() == '<!-- markdownlint:disable -->':
ignoring = True
continue
for rule in line_rules:
violation = rule.validate(line)
if violation:
violation.line_nr = line_nr
all_violations.append(violation)
line_nr += 1
return all_violations
def lint(self, markdown_string):
all_violations = []
all_violations.extend(self._apply_line_rules(markdown_string))
return all_violations
def lint_files(self, files):
""" Lints a list of files.
:param files: list of files to lint
:return: a list of violations found in the files
"""
all_violations = []
for filename in files:
with open(filename, 'r') as f:
content = f.read()
violations = self.lint(content)
all_violations.extend(violations)
for e in violations:
print("{0}:{1}: {2} {3}".format(filename, e.line_nr, e.rule_id, e.message))
return len(all_violations)
|
from __future__ import print_function
from pymarkdownlint import rules
class MarkdownLinter(object):
def __init__(self, config):
self.config = config
@property
def line_rules(self):
return [rule for rule in self.config.rules if isinstance(rule, rules.LineRule)]
def _apply_line_rules(self, markdown_string):
""" Iterates over the lines in a given markdown string and applies all the enabled line rules to each line """
all_violations = []
lines = markdown_string.split("\n")
line_rules = self.line_rules
line_nr = 1
for line in lines:
for rule in line_rules:
violation = rule.validate(line)
if violation:
violation.line_nr = line_nr
all_violations.append(violation)
line_nr += 1
return all_violations
def lint(self, markdown_string):
all_violations = []
all_violations.extend(self._apply_line_rules(markdown_string))
return all_violations
def lint_files(self, files):
""" Lints a list of files.
:param files: list of files to lint
:return: a list of violations found in the files
"""
all_violations = []
for filename in files:
with open(filename, 'r') as f:
content = f.read()
violations = self.lint(content)
all_violations.extend(violations)
for e in violations:
print("{0}:{1}: {2} {3}".format(filename, e.line_nr, e.rule_id, e.message))
return len(all_violations)
|
mit
|
Python
|
e4ecc0f8049f1388188f0a64b373a7e90b2dc1e9
|
Update at 2017-07-22 15-01-48
|
amoshyc/tthl-code
|
plot.py
|
plot.py
|
from sys import argv
from pathlib import Path
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
keys = ['loss', 'val_loss']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
log, name = argv[1], argv[2]
plot_svg(log, name)
|
from sys import argv
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
keys = ['loss', 'val_loss']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
log, name = argv[1], argv[2]
plot_svg(log, name)
|
apache-2.0
|
Python
|
dc4bc70ad3f13b8ff400f6c8f999b555096a75cb
|
Update test cases for conf module
|
chenjiandongx/pyecharts,chenjiandongx/pyecharts,chenjiandongx/pyecharts
|
test/test_conf.py
|
test/test_conf.py
|
# coding=utf8
"""
Test Cases for jshost.
Input:a PyEchartsConfg object with cusom jshost and force_embed flag by user.
Test Target: js_embed (should render <script> in embed mode)
"""
from __future__ import unicode_literals
from nose.tools import eq_
from pyecharts.conf import PyEchartsConfig
from pyecharts.constants import DEFAULT_HOST, SCRIPT_LOCAL_JSHOST, JUPYTER_LOCAL_JSHOST
def test_with_default_value():
target_config = PyEchartsConfig()
eq_(SCRIPT_LOCAL_JSHOST, target_config.jshost)
eq_(SCRIPT_LOCAL_JSHOST, target_config.get_current_jshost_for_script())
eq_(JUPYTER_LOCAL_JSHOST, target_config.get_current_jshost_for_jupyter())
assert target_config.js_embed
target_config.force_js_embed = True
assert target_config.js_embed
def test_pyecharts_remote_jshost():
target_config = PyEchartsConfig(jshost=DEFAULT_HOST)
eq_('https://chfw.github.io/jupyter-echarts/echarts', target_config.jshost)
eq_('https://chfw.github.io/jupyter-echarts/echarts', target_config.get_current_jshost_for_script())
eq_('https://chfw.github.io/jupyter-echarts/echarts', target_config.get_current_jshost_for_jupyter())
eq_('/static/js/echarts', target_config.get_current_jshost_for_jupyter('/static/js/echarts'))
assert target_config.js_embed
target_config.force_js_embed = True
assert target_config.js_embed
def test_custom_local_jshost():
target_config = PyEchartsConfig(jshost='/static/js/')
eq_('/static/js', target_config.jshost)
eq_('/static/js', target_config.get_current_jshost_for_script())
eq_('/static/js', target_config.get_current_jshost_for_jupyter())
eq_('/static/js/echarts', target_config.get_current_jshost_for_jupyter('/static/js/echarts'))
assert not target_config.js_embed
target_config.force_js_embed = True
assert target_config.js_embed
def test_custom_remote_jshost():
target_config = PyEchartsConfig(jshost='https://cdn.bootcss.com/echarts/3.7.2/')
eq_('https://cdn.bootcss.com/echarts/3.7.2', target_config.jshost)
eq_('https://cdn.bootcss.com/echarts/3.7.2', target_config.get_current_jshost_for_script())
eq_('https://cdn.bootcss.com/echarts/3.7.2', target_config.get_current_jshost_for_jupyter())
eq_('/static/js/echarts', target_config.get_current_jshost_for_jupyter('/static/js/echarts'))
assert not target_config.js_embed
target_config.force_js_embed = True
assert target_config.js_embed
|
# coding=utf8
from __future__ import unicode_literals
from pyecharts.conf import PyEchartsConfig
def test_config():
pec = PyEchartsConfig(jshost='https://demo')
assert pec.jshost == 'https://demo'
pec.jshost = 'https://demo/'
assert pec.jshost == 'https://demo'
pec.force_js_embed = True
assert pec.js_embed
pec.force_js_embed = False
assert not pec.js_embed
pec.jshost = '/templates/js/'
assert pec.jshost == '/templates/js'
pec.force_js_embed = True
assert pec.js_embed
pec.force_js_embed = False
assert not pec.js_embed
pec1 = PyEchartsConfig(jshost='http://demo/')
assert pec1.jshost == 'http://demo'
|
mit
|
Python
|
6e7dfe97cdce58f892f88560e4b4709e6625e6bd
|
Clean up package level imports
|
aitatanit/metatlas,aitatanit/metatlas,aitatanit/metatlas,metabolite-atlas/metatlas,biorack/metatlas,biorack/metatlas,metabolite-atlas/metatlas,metabolite-atlas/metatlas
|
metatlas/__init__.py
|
metatlas/__init__.py
|
__version__ = '0.2'
from .mzml_loader import mzml_to_hdf
from .h5_query import plot_heatmap, plot_spectrogram, plot_XIC
from .h5_query import get_data, get_XIC, get_heatmap, get_spectrogram
|
__version__ = '0.2'
from .mzml_loader import mzml_to_hdf
from .h5_query import plot_heatmap, plot_spectrogram, plot_xic
from .h5_query import get_data, get_XIC, get_HeatMapRTMZ, get_spectrogram
|
bsd-3-clause
|
Python
|
db6a6da8fe1bdd73fbd971153a4fda6975fc7b4e
|
update version
|
yupenghe/methylpy,yupenghe/methylpy
|
methylpy/__init__.py
|
methylpy/__init__.py
|
__version__ = '1.2.9'
|
__version__ = '1.2.8'
|
apache-2.0
|
Python
|
8234a22ca090c38b80ffd650b490d1dd8cbe766d
|
test for fix/18
|
csirtgadgets/csirtg-indicator-py,csirtgadgets/csirtg-indicator-py
|
test/test_ipv4.py
|
test/test_ipv4.py
|
from csirtg_indicator import Indicator
from csirtg_indicator.exceptions import InvalidIndicator
def _not(data):
for d in data:
d = Indicator(d)
assert d.itype is not 'ipv4'
def test_ipv4_ipv6():
data = ['2001:1608:10:147::21', '2001:4860:4860::8888']
_not(data)
def test_ipv4_fqdn():
data = ['example.org', '1.2.3.4.com', 'xn----jtbbmekqknepg3a.xn--p1ai']
_not(data)
def test_ipv4_urls():
data = [
'http://192.168.1.1/1.html',
'http://www41.xzmnt.com',
'http://get.ahoybest.com/n/3.6.16/12205897/microsoft lync server 2010.exe'
]
_not(data)
def test_ipv4_ok():
data = ['192.168.1.0/24', '192.168.1.1', '255.255.255.255']
for d in data:
assert Indicator(indicator=d).itype is 'ipv4'
def test_ipv4_nok():
data = ['127.0.0.0/1', '128.205.0.0/8']
for d in data:
try:
Indicator(indicator=d)
except InvalidIndicator as e:
pass
else:
raise SystemError('mis-handled network')
def test_ipv4_private():
data = [
'128.205.1.0/24', '2001:1608:10:147::21', '2001:4860::8888/64',
u'106.51.30.0', '112.133.246.73'
]
for d in data:
assert not Indicator(indicator=d).is_private()
assert Indicator('172.16.30.32').is_private()
|
from csirtg_indicator import Indicator
from csirtg_indicator.exceptions import InvalidIndicator
def _not(data):
for d in data:
d = Indicator(d)
assert d.itype is not 'ipv4'
def test_ipv4_ipv6():
data = ['2001:1608:10:147::21', '2001:4860:4860::8888']
_not(data)
def test_ipv4_fqdn():
data = ['example.org', '1.2.3.4.com', 'xn----jtbbmekqknepg3a.xn--p1ai']
_not(data)
def test_ipv4_urls():
data = [
'http://192.168.1.1/1.html',
'http://www41.xzmnt.com',
'http://get.ahoybest.com/n/3.6.16/12205897/microsoft lync server 2010.exe'
]
_not(data)
def test_ipv4_ok():
data = ['192.168.1.0/24', '192.168.1.1', '255.255.255.255']
for d in data:
assert Indicator(indicator=d).itype is 'ipv4'
def test_ipv4_nok():
data = ['127.0.0.0/1', '128.205.0.0/8']
for d in data:
try:
Indicator(indicator=d)
except InvalidIndicator as e:
pass
else:
raise SystemError('mis-handled network')
def test_ipv4_private():
data = [
'128.205.1.0/24', '2001:1608:10:147::21', '2001:4860::8888/64',
u'106.51.30.0', '112.133.246.73'
]
for d in data:
assert not Indicator(indicator=d).is_private()
assert Indicator('192.168.1.1').is_private()
|
mpl-2.0
|
Python
|
a8090276b86e12a798be56000dc9831b07544ead
|
disable review test for now
|
sat-utils/sat-search
|
test/test_main.py
|
test/test_main.py
|
import os
import sys
import unittest
from mock import patch
import json
import shutil
import satsearch.main as main
import satsearch.config as config
from nose.tools import raises
testpath = os.path.dirname(__file__)
config.DATADIR = testpath
class Test(unittest.TestCase):
""" Test main module """
args = '--date 2017-01-01 --satellite_name Landsat-8'.split(' ')
def test_main(self):
""" Run main function """
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8')
self.assertEqual(len(scenes.scenes), 564)
def test_main_options(self):
""" Test main program with output options """
fname = os.path.join(testpath, 'test_main-save.json')
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8', save=fname, printsearch=True, printcal=True, printmd=[])
self.assertEqual(len(scenes.scenes), 564)
self.assertTrue(os.path.exists(fname))
os.remove(fname)
self.assertFalse(os.path.exists(fname))
@raises(ValueError)
def _test_main_review_error(self):
""" Run review feature without envvar set """
os.setenv('IMGCAT', None)
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8', review=True)
def test_cli(self):
""" Run CLI program """
with patch.object(sys, 'argv', ['testprog'] + self.args):
n = main.cli()
self.assertEqual(n, 564)
def test_main_download(self):
""" Test main program with downloading """
with open(os.path.join(testpath, 'aoi1.geojson')) as f:
aoi = json.dumps(json.load(f))
scenes = main.main(date_from='2017-01-05', date_to='2017-01-21', satellite_name='Landsat-8',
intersects=aoi, download=['thumb', 'MTL'])
for scene in scenes.scenes:
self.assertTrue(os.path.exists(scene.filenames['thumb']))
self.assertTrue(os.path.exists(scene.filenames['MTL']))
shutil.rmtree(os.path.join(testpath, scene.platform))
|
import os
import sys
import unittest
from mock import patch
import json
import shutil
import satsearch.main as main
import satsearch.config as config
from nose.tools import raises
testpath = os.path.dirname(__file__)
config.DATADIR = testpath
class Test(unittest.TestCase):
""" Test main module """
args = '--date 2017-01-01 --satellite_name Landsat-8'.split(' ')
def test_main(self):
""" Run main function """
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8')
self.assertEqual(len(scenes.scenes), 564)
def test_main_options(self):
""" Test main program with output options """
fname = os.path.join(testpath, 'test_main-save.json')
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8', save=fname, printsearch=True, printcal=True, printmd=[])
self.assertEqual(len(scenes.scenes), 564)
self.assertTrue(os.path.exists(fname))
os.remove(fname)
self.assertFalse(os.path.exists(fname))
@raises(ValueError)
def test_main_review_error(self):
""" Run review feature without envvar set """
scenes = main.main(date='2017-01-01', satellite_name='Landsat-8', review=True)
def test_cli(self):
""" Run CLI program """
with patch.object(sys, 'argv', ['testprog'] + self.args):
n = main.cli()
self.assertEqual(n, 564)
def test_main_download(self):
""" Test main program with downloading """
with open(os.path.join(testpath, 'aoi1.geojson')) as f:
aoi = json.dumps(json.load(f))
scenes = main.main(date_from='2017-01-05', date_to='2017-01-21', satellite_name='Landsat-8',
intersects=aoi, download=['thumb', 'MTL'])
for scene in scenes.scenes:
self.assertTrue(os.path.exists(scene.filenames['thumb']))
self.assertTrue(os.path.exists(scene.filenames['MTL']))
shutil.rmtree(os.path.join(testpath, scene.platform))
|
mit
|
Python
|
aa3e36cc37b2ddcc5d166965f8abeff560e6b0f1
|
Use test database on alembic when necessary
|
frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io
|
migrations/config.py
|
migrations/config.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import logging
from logging.handlers import SysLogHandler
from dotenv import load_dotenv
load_dotenv('.env')
# Storage
if not os.environ.get('TESTING'):
DATABASE_URL = os.environ['DATABASE_URL']
else:
DATABASE_URL = os.environ['TEST_DATABASE_URL']
# Logging
logging.basicConfig(level=logging.DEBUG)
if os.environ.get('LOGGING_URL', None):
root_logger = logging.getLogger()
host, port = os.environ['LOGGING_URL'].split(':')
syslog_handler = SysLogHandler(address=(host, int(port)))
syslog_handler.setLevel(logging.INFO)
root_logger.addHandler(syslog_handler)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import logging
from logging.handlers import SysLogHandler
from dotenv import load_dotenv
load_dotenv('.env')
# Storage
DATABASE_URL = os.environ['DATABASE_URL']
# Logging
logging.basicConfig(level=logging.DEBUG)
if os.environ.get('LOGGING_URL', None):
root_logger = logging.getLogger()
host, port = os.environ['LOGGING_URL'].split(':')
syslog_handler = SysLogHandler(address=(host, int(port)))
syslog_handler.setLevel(logging.INFO)
root_logger.addHandler(syslog_handler)
|
agpl-3.0
|
Python
|
0d8766849bedea43cf2eab006327cb942f61c3af
|
add testing function
|
sampsyo/confit
|
test/test_yaml.py
|
test/test_yaml.py
|
from __future__ import division, absolute_import, print_function
import confuse
import yaml
import unittest
from . import TempDir
def load(s):
return yaml.load(s, Loader=confuse.Loader)
class ParseTest(unittest.TestCase):
def test_dict_parsed_as_ordereddict(self):
v = load("a: b\nc: d")
self.assertTrue(isinstance(v, confuse.OrderedDict))
self.assertEqual(list(v), ['a', 'c'])
def test_string_beginning_with_percent(self):
v = load("foo: %bar")
self.assertEqual(v['foo'], '%bar')
class FileParseTest(unittest.TestCase):
def _parse_contents(self, contents):
with TempDir() as temp:
path = temp.sub('test_config.yaml', contents)
return confuse.load_yaml(path)
def test_load_file(self):
v = self._parse_contents(b'foo: bar')
self.assertEqual(v['foo'], 'bar')
def test_syntax_error(self):
try:
self._parse_contents(b':')
except confuse.ConfigError as exc:
self.assertTrue('test_config.yaml' in exc.filename)
else:
self.fail('ConfigError not raised')
def test_reload_conf(self):
with TempDir() as temp:
path = temp.sub('test_config.yaml', b'foo: bar')
config = confuse.Configuration('test', __name__)
config.set_file(filename=path)
self.assertEqual(config['foo'].get(), 'bar')
temp.sub('test_config.yaml', b'foo: bar2\ntest: hello world')
config.reload()
self.assertEqual(config['foo'].get(), 'bar2')
self.assertEqual(config['test'].get(), 'hello world')
def test_tab_indentation_error(self):
try:
self._parse_contents(b"foo:\n\tbar: baz")
except confuse.ConfigError as exc:
self.assertTrue('found tab' in exc.args[0])
else:
self.fail('ConfigError not raised')
|
from __future__ import division, absolute_import, print_function
import confuse
import yaml
import unittest
from . import TempDir
def load(s):
return yaml.load(s, Loader=confuse.Loader)
class ParseTest(unittest.TestCase):
def test_dict_parsed_as_ordereddict(self):
v = load("a: b\nc: d")
self.assertTrue(isinstance(v, confuse.OrderedDict))
self.assertEqual(list(v), ['a', 'c'])
def test_string_beginning_with_percent(self):
v = load("foo: %bar")
self.assertEqual(v['foo'], '%bar')
class FileParseTest(unittest.TestCase):
def _parse_contents(self, contents):
with TempDir() as temp:
path = temp.sub('test_config.yaml', contents)
return confuse.load_yaml(path)
def test_load_file(self):
v = self._parse_contents(b'foo: bar')
self.assertEqual(v['foo'], 'bar')
def test_syntax_error(self):
try:
self._parse_contents(b':')
except confuse.ConfigError as exc:
self.assertTrue('test_config.yaml' in exc.filename)
else:
self.fail('ConfigError not raised')
def test_tab_indentation_error(self):
try:
self._parse_contents(b"foo:\n\tbar: baz")
except confuse.ConfigError as exc:
self.assertTrue('found tab' in exc.args[0])
else:
self.fail('ConfigError not raised')
|
mit
|
Python
|
c08e25172d362176c8abed3d2bf54c2cf13da303
|
Fix test for settings_helpers
|
stscieisenhamer/glue,stscieisenhamer/glue,saimn/glue,saimn/glue
|
glue/tests/test_settings_helpers.py
|
glue/tests/test_settings_helpers.py
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings(force=True)
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
from mock import patch
import os
from glue.config import SettingRegistry
from glue._settings_helpers import load_settings, save_settings
def test_roundtrip(tmpdir):
settings = SettingRegistry()
settings.add('STRING', 'green', str)
settings.add('INT', 3, int)
settings.add('FLOAT', 5.5, float)
settings.add('LIST', [1,2,3], list)
with patch('glue.config.settings', settings):
with patch('glue.config.CFG_DIR', tmpdir.strpath):
settings.STRING = 'blue'
settings.INT = 4
settings.FLOAT = 3.5
settings.LIST = ['A', 'BB', 'CCC']
save_settings()
assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg'))
settings.STRING = 'red'
settings.INT = 3
settings.FLOAT = 4.5
settings.LIST = ['DDD', 'EE', 'F']
load_settings()
assert settings.STRING == 'blue'
assert settings.INT == 4
assert settings.FLOAT == 3.5
assert settings.LIST == ['A', 'BB', 'CCC']
|
bsd-3-clause
|
Python
|
01917a077681949d29eb48173e031b5dfd441e0d
|
update angle.py
|
UMOL/MolecularGeometry.jl
|
test/function/angle/angle.py
|
test/function/angle/angle.py
|
import numpy as np
def angle2D(vec1,vec2):
length1 = np.linalg.norm(vec1)
length2 = np.linalg.norm(vec2)
print("length ", length1, length2)
if length1 < 1e-16:
return 0.
if length2 < 1e-16:
return 0.
return np.arccos(np.dot(vec1,vec2)/(length1*length2))
def angle3D(vec1, vec2):
# return the angle
v1 = vec1[[0,1]]
v2 = vec2[[0,1]]
a3 = angle2D(v1,v2)
v1 = vec1[[0,2]]
v2 = vec2[[0,2]]
a2 = angle2D(v1,v2)
v1 = vec1[[1,2]]
v2 = vec2[[1,2]]
a1 = angle2D(v1,v2)
return np.array([a1,a2,a3])
np.set_printoptions(precision=18)
v1 = np.array([-5.908280911892572,-1.04509170227587,-3.0])
v_ref = np.array([-5.908280911892572,-1.04509170227587,-8.0])
v1 = np.array([0., 0., 1.])
v_ref = np.array([-6.758097397797128,6.190970809322855,4.0])
print(angle3D(v1,v_ref), )
|
import numpy as np
def angle2D(vec1,vec2):
length1 = np.linalg.norm(vec1)
length2 = np.linalg.norm(vec2)
print("length ", length1, length2)
return np.arccos(np.dot(vec1,vec2)/(length1*length2))
def angle3D(vec1, vec2):
# return the angle
v1 = vec1[[0,1]]
v2 = vec2[[0,1]]
a3 = angle2D(v1,v2)
v1 = vec1[[0,2]]
v2 = vec2[[0,2]]
a2 = angle2D(v1,v2)
v1 = vec1[[1,2]]
v2 = vec2[[1,2]]
a1 = angle2D(v1,v2)
return np.array([a1,a2,a3])
np.set_printoptions(precision=18)
v1 = np.array([-5.908280911892572,-1.04509170227587,-3.0])
v_ref = np.array([-5.908280911892572,-1.04509170227587,-8.0])
print(angle3D(v1,v_ref), )
|
mit
|
Python
|
2b3281863f11fa577dd6504e58f6faec8ada2259
|
Change order of API call
|
jakereps/qiime-studio-frontend,jakereps/qiime-studio,qiime2/qiime-studio,qiime2/qiime-studio,jakereps/qiime-studio,qiime2/qiime-studio-frontend,qiime2/qiime-studio-frontend,qiime2/qiime-studio,jakereps/qiime-studio,jakereps/qiime-studio-frontend
|
qiime_studio/api/v1.py
|
qiime_studio/api/v1.py
|
from flask import Blueprint, jsonify
from .security import validate_request_authentication
from qiime.sdk import PluginManager
PLUGIN_MANAGER = PluginManager()
v1 = Blueprint('v1', __name__)
v1.before_request(validate_request_authentication)
@v1.route('/', methods=['GET', 'POST'])
def root():
return jsonify(content="!")
@v1.route('/plugins', methods=['GET'])
def api_plugins():
plugin_list = list(PLUGIN_MANAGER.plugins.keys())
return jsonify({"names": plugin_list})
@v1.route('/<plugin_name>/workflows', methods=['GET'])
def api_workflows(plugin_name):
plugin = PLUGIN_MANAGER.plugins[plugin_name]
workflows_dict = {}
for key, value in plugin.workflows.items():
workflows_dict[key] = {}
workflows_dict[key]['info'] = "Produces: {}".format(list(value.signature.output_artifacts.values()))
return jsonify({"workflows": workflows_dict})
|
from flask import Blueprint, jsonify
from .security import validate_request_authentication
from qiime.sdk import PluginManager
PLUGIN_MANAGER = PluginManager()
v1 = Blueprint('v1', __name__)
v1.before_request(validate_request_authentication)
@v1.route('/', methods=['GET', 'POST'])
def root():
return jsonify(content="!")
@v1.route('/plugins', methods=['GET'])
def api_plugins():
plugin_list = list(PLUGIN_MANAGER.plugins.keys())
return jsonify({"names": plugin_list})
@v1.route('/workflows/<plugin_name>', methods=['GET'])
def api_workflows(plugin_name):
plugin = PLUGIN_MANAGER.plugins[plugin_name]
workflows_dict = {}
for key, value in plugin.workflows.items():
workflows_dict[key] = {}
workflows_dict[key]['info'] = "Produces: {}".format(list(value.signature.output_artifacts.values()))
return jsonify({"workflows": workflows_dict})
|
bsd-3-clause
|
Python
|
896482b83ad75c445e72dbb0eb6bc7246662f699
|
access token is adjusted
|
ComputerProject2Team/SkyBot
|
skybotapp/views.py
|
skybotapp/views.py
|
import json
import requests
from pprint import pprint
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
# yomamabot/fb_yomamabot/views.py
from django.views import generic
from django.http.response import HttpResponse
from django.template.context_processors import request
# Create your views here.
def post_facebook_message(fbid, recevied_message):
post_message_url = 'https://graph.facebook.com/v2.6/me/messages?access_token=<EAASfh0TDd8cBAHBMfkWQGAexatTOup01lZCXtUJ5CF5Imr5b7MeQu30v6TnEzQmvoJF9MZBzkoZBdhLaVcCSY2BtPivUNJh7pic5vfEA13qDr3TRQLuHn8aKpKZAip4X2QHqhBTa7XQNGPnII1cqNMP46gAaRYMzHHSnZA4NZCAwZDZD>'
response_msg = json.dumps({"recipient":{"id":fbid}, "message":{"text":recevied_message}})
status = requests.post(post_message_url, headers={"Content-Type": "application/json"},data=response_msg)
pprint(status.json())
class SkyBotView(generic.View):
# def get(self, request, *args, **kwargs):
# if self.request.GET['hub.verify_token'] == '93985762':
# return HttpResponse(self.request.GET['hub.challenge'])
# else:
# return HttpResponse('Error, invalid token')
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return generic.View.dispatch(self, request, *args, **kwargs)
# Post function to handle Facebook messages
def post(self, request, *args, **kwargs):
# Converts the text payload into a python dictionary
incoming_message = json.loads(self.request.body.decode('utf-8'))
# Facebook recommends going through every entry since they might send
# multiple messages in a single call during high load
for entry in incoming_message['entry']:
for message in entry['messaging']:
# Check to make sure the received call is a message call
# This might be delivery, optin, postback for other events
if 'message' in message:
# Print the message to the terminal
pprint(message)
post_facebook_message(message['sender']['id'], message['message']['text'])
return HttpResponse()
def homeView(request):
return HttpResponse('Hello')
|
import json
import requests
from pprint import pprint
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
# yomamabot/fb_yomamabot/views.py
from django.views import generic
from django.http.response import HttpResponse
from django.template.context_processors import request
# Create your views here.
def post_facebook_message(fbid, recevied_message):
post_message_url = 'https://graph.facebook.com/v2.6/me/messages?access_token=<page-access-token>'
response_msg = json.dumps({"recipient":{"id":fbid}, "message":{"text":recevied_message}})
status = requests.post(post_message_url, headers={"Content-Type": "application/json"},data=response_msg)
pprint(status.json())
class SkyBotView(generic.View):
# def get(self, request, *args, **kwargs):
# if self.request.GET['hub.verify_token'] == '93985762':
# return HttpResponse(self.request.GET['hub.challenge'])
# else:
# return HttpResponse('Error, invalid token')
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return generic.View.dispatch(self, request, *args, **kwargs)
# Post function to handle Facebook messages
def post(self, request, *args, **kwargs):
# Converts the text payload into a python dictionary
incoming_message = json.loads(self.request.body.decode('utf-8'))
# Facebook recommends going through every entry since they might send
# multiple messages in a single call during high load
for entry in incoming_message['entry']:
for message in entry['messaging']:
# Check to make sure the received call is a message call
# This might be delivery, optin, postback for other events
if 'message' in message:
# Print the message to the terminal
pprint(message)
post_facebook_message(message['sender']['id'], message['message']['text'])
return HttpResponse()
def homeView(request):
return HttpResponse('Hello')
|
unlicense
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.