commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
3b8c55fd13b314dd737b9d50ab0cce8a9d178bb9
|
save os_driver
|
LIP-Computing/occi-net
|
occinet/api/openstack_driver.py
|
occinet/api/openstack_driver.py
|
# -*- coding: utf-8 -*-
# Copyright 2015 Spanish National Research Council
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import json
import os
import six.moves.urllib.parse as urlparse
import webob.exc
from occinet.drivers import request
class OpenStackNet(request.BaseHelper):
"""Class to interact with the neutron API."""
@staticmethod
def tenant_from_req(req):
"""Return the tenant id
This method retrieve a list of network to which the tenant has access.
:param req: the original request
:returns: tenant Id
"""
try:
return req.environ["keystone.token_auth"].user.project_id
except AttributeError:
return req.environ["keystone.token_info"]["token"]["project"]["id"]
def _get_index_req(self, req):
"""Return a new Request object to interact with OpenStack.
This method retrieve a request ready to list networks
:param req: the original request
:returns: request modified
"""
tenant_id = self.tenant_from_req(req)
path = "/networks?%s" % tenant_id
return self._get_req(req, path=path, method="GET")
def index(self, req):
"""Get a list of servers for a tenant.
This method retrieve a list of network to which the tenant has access.
:param req: the incoming request
"""
os_req = self._get_index_req(req)
response = os_req.get_response(self.app)
return self.get_from_response(response, "networks", [])
|
# -*- coding: utf-8 -*-
# Copyright 2015 Spanish National Research Council
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import json
import os
import six.moves.urllib.parse as urlparse
import webob.exc
from occinet.drivers import request
class OpenStackNet(request.BaseHelper):
"""Class to interact with the nova API."""
@staticmethod
def tenant_from_req(req):
try:
return req.environ["keystone.token_auth"].user.project_id
except AttributeError:
return req.environ["keystone.token_info"]["token"]["project"]["id"]
def _get_index_req(self, req):
tenant_id = self.tenant_from_req(req)
path = "/%s/servers" % tenant_id
return self._get_req(req, path=path, method="GET")
|
apache-2.0
|
Python
|
be3fc40d021ee9a43ca524bcafdcdad7896de9a5
|
Add remaining completed locales to prod settings.
|
almossawi/mrburns,almossawi/mrburns,almossawi/mrburns,mozilla/mrburns,mozilla/mrburns,almossawi/mrburns,mozilla/mrburns
|
mrburns/settings/server.py
|
mrburns/settings/server.py
|
import os
import socket
from django.utils.translation import ugettext_lazy as _
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
if SERVER_ENV == 'prod':
ALLOWED_HOSTS.extend([
'webwewant.mozilla.org',
'glow.cdn.mozilla.net',
'glow-origin.cdn.mozilla.net',
])
STATIC_URL = 'https://glow.cdn.mozilla.net/static/'
LANGUAGES = (
('cs', _('Czech')),
('de', _('German')),
('en', _('English')),
('es', _('Spanish')),
('fr', _('French')),
('he', _('Hebrew')),
('hu', _('Hungarian')),
('id', _('Indonesian')),
('it', _('Italian')),
('ja', _('Japanese')),
('ko', _('Korean')),
('lt', _('Lithuanian')),
('nl', _('Dutch')),
('pl', _('Polish')),
('pt-br', _('Brazilian Portuguese')),
('ro', _('Romanian')),
('ru', _('Russian')),
('sk', _('Slovak')),
('sl', _('Slovenian')),
('sq', _('Albanian')),
('sr', _('Serbian')),
('zh-cn', _('Simplified Chinese')),
('zh-tw', _('Traditional Chinese')),
)
elif SERVER_ENV == 'dev':
ALLOWED_HOSTS.append('webwewant.allizom.org')
CACHES = {
# DB 1 is for the site cache
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
# DB 0 is for the glow data
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
# Sentry
INSTALLED_APPS += ('raven.contrib.django.raven_compat',)
RAVEN_CONFIG = {
'dsn': os.getenv('SENTRY_DSN'),
}
|
import os
import socket
from django.utils.translation import ugettext_lazy as _
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
if SERVER_ENV == 'prod':
ALLOWED_HOSTS.extend([
'webwewant.mozilla.org',
'glow.cdn.mozilla.net',
'glow-origin.cdn.mozilla.net',
])
STATIC_URL = 'https://glow.cdn.mozilla.net/static/'
LANGUAGES = (
('de', _('German')),
('en', _('English')),
('es', _('Spanish')),
('fr', _('French')),
('he', _('Hebrew')),
('hu', _('Hungarian')),
('it', _('Italian')),
('ja', _('Japanese')),
('ko', _('Korean')),
('nl', _('Dutch')),
('pt-br', _('Brazilian Portuguese')),
('zh-cn', _('Simplified Chinese')),
('zh-tw', _('Traditional Chinese')),
)
elif SERVER_ENV == 'dev':
ALLOWED_HOSTS.append('webwewant.allizom.org')
CACHES = {
# DB 1 is for the site cache
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
# DB 0 is for the glow data
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
# Sentry
INSTALLED_APPS += ('raven.contrib.django.raven_compat',)
RAVEN_CONFIG = {
'dsn': os.getenv('SENTRY_DSN'),
}
|
mpl-2.0
|
Python
|
aa097dcacb33fc77a5c9471b65b0f914f0484276
|
Fix lambda definition so it works in all versions of Python
|
tensorflow/lingvo,tensorflow/lingvo,tensorflow/lingvo,tensorflow/lingvo
|
lingvo/tasks/lm/tools/download_lm1b.py
|
lingvo/tasks/lm/tools/download_lm1b.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Downloads and processes lm1b dataset (http://www.statmt.org/lm-benchmark)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import tensorflow as tf
tf.flags.DEFINE_string("outdir", "/tmp/lm1b", "The output directory.")
tf.flags.DEFINE_integer(
"count_cutoff", 3, "Ignore tokens that appear fewer than "
"this amount of times when creating the vocab file.")
FLAGS = tf.flags.FLAGS
def main(_):
basename = "1-billion-word-language-modeling-benchmark-r13output"
fname = basename + ".tar.gz"
url = "http://www.statmt.org/lm-benchmark/" + fname
sha256hash = "01ba60381110baf7f189dfd2b8374de371e8c9a340835793f190bdae9e90a34e"
tf.keras.utils.get_file(
fname, url, file_hash=sha256hash, cache_subdir=FLAGS.outdir, extract=True)
tf.logging.info("Generating vocab file. This may take a few minutes.")
vocab = {}
for fname in glob.glob(
os.path.join(FLAGS.outdir, basename,
"training-monolingual.tokenized.shuffled", "news.en*")):
with open(fname) as f:
for line in f:
for w in line.split():
vocab[w] = vocab.get(w, 0) + 1
with open(os.path.join(FLAGS.outdir, basename, "vocab.txt"), "w") as f:
f.write("<epsilon>\t0\n<S>\t1\n</S>\t2\n<UNK>\t3\n")
id = 4
for k, v in sorted(vocab.items(), key=lambda k, v: (-v, k)):
if v < FLAGS.count_cutoff:
break
f.write("%s\t%d\n" % (k, id))
id += 1
if __name__ == "__main__":
tf.app.run(main)
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Downloads and processes lm1b dataset (http://www.statmt.org/lm-benchmark)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import tensorflow as tf
tf.flags.DEFINE_string("outdir", "/tmp/lm1b", "The output directory.")
tf.flags.DEFINE_integer(
"count_cutoff", 3, "Ignore tokens that appear fewer than "
"this amount of times when creating the vocab file.")
FLAGS = tf.flags.FLAGS
def main(_):
basename = "1-billion-word-language-modeling-benchmark-r13output"
fname = basename + ".tar.gz"
url = "http://www.statmt.org/lm-benchmark/" + fname
sha256hash = "01ba60381110baf7f189dfd2b8374de371e8c9a340835793f190bdae9e90a34e"
tf.keras.utils.get_file(
fname, url, file_hash=sha256hash, cache_subdir=FLAGS.outdir, extract=True)
tf.logging.info("Generating vocab file. This may take a few minutes.")
vocab = {}
for fname in glob.glob(
os.path.join(FLAGS.outdir, basename,
"training-monolingual.tokenized.shuffled", "news.en*")):
with open(fname) as f:
for line in f:
for w in line.split():
vocab[w] = vocab.get(w, 0) + 1
with open(os.path.join(FLAGS.outdir, basename, "vocab.txt"), "w") as f:
f.write("<epsilon>\t0\n<S>\t1\n</S>\t2\n<UNK>\t3\n")
id = 4
for k, v in sorted(vocab.items(), key=lambda (k, v): (-v, k)):
if v < FLAGS.count_cutoff:
break
f.write("%s\t%d\n" % (k, id))
id += 1
if __name__ == "__main__":
tf.app.run(main)
|
apache-2.0
|
Python
|
890ddb3507c89177d561eb20afb86c922bf52bf8
|
Correct import in demo
|
Mause/resumable
|
demo.py
|
demo.py
|
#!/usr/bin/env python3
from flask import Flask, redirect, request
from resumable import rebuild, value
app = Flask(__name__)
def form(action, contents):
return '''
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<form action="{}" method=post>
{}
<button type=submit>Submit</button>
</form>
'''.format(action, contents)
# for the purposes of this demo, we will explicitly pass request
# and response (this is not needed in flask)
@rebuild
def controller(_):
page = form('/c/welcomed', '<input name="name"/>')
response = value(page, 'welcomed')
page = form(
'/c/my_name',
'''
<label>
Hi, {}, my name is
<input name="my_name"/>
</label>
'''.format(response.form['name'])
)
response = value(page, 'my_name')
return value('Sweet, my name is {}!'.format(response.form['my_name']))
@app.route('/c/<name>', methods=['POST', 'GET'])
def router(name):
return controller[name](request)
@app.route('/')
def index():
return redirect('/c/controller')
if __name__ == '__main__':
app.run(debug=True)
|
#!/usr/bin/env python3
from flask import Flask, redirect, request
from resumable import rebuild, split
app = Flask(__name__)
def form(action, contents):
return '''
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<form action="{}" method=post>
{}
<button type=submit>Submit</button>
</form>
'''.format(action, contents)
# for the purposes of this demo, we will explicitly pass request
# and response (this is not needed in flask)
@rebuild
def controller(_):
page = form('/c/welcomed', '<input name="name"/>')
response = value(page, 'welcomed')
page = form(
'/c/my_name',
'''
<label>
Hi, {}, my name is
<input name="my_name"/>
</label>
'''.format(response.form['name'])
)
response = value(page, 'my_name')
return value('Sweet, my name is {}!'.format(response.form['my_name']))
@app.route('/c/<name>', methods=['POST', 'GET'])
def router(name):
return controller[name](request)
@app.route('/')
def index():
return redirect('/c/controller')
if __name__ == '__main__':
app.run(debug=True)
|
mit
|
Python
|
2f789441eefbca50bf9c47dc5beb0d00cd8ce6e4
|
Update support server invite
|
BeatButton/beattie
|
help.py
|
help.py
|
from typing import Mapping, Optional
from discord.ext.commands import Cog, Command, MinimalHelpCommand
class BHelp(MinimalHelpCommand):
async def send_bot_help(self, mapping: Mapping[Optional[Cog], list[Command]]):
await super().send_bot_help(mapping)
if ctx := self.context:
await ctx.send(
"Join the support server for more help: discord.gg/HKmAadu5sP"
)
def add_subcommand_formatting(self, command: Command) -> None:
fmt = "{0} \N{EN DASH} {1}" if command.short_doc else "{0}"
assert self.paginator is not None
self.paginator.add_line(
fmt.format(
self.get_command_signature(command),
command.short_doc,
)
)
|
from typing import Mapping, Optional
from discord.ext.commands import Cog, Command, MinimalHelpCommand
class BHelp(MinimalHelpCommand):
async def send_bot_help(self, mapping: Mapping[Optional[Cog], list[Command]]):
await super().send_bot_help(mapping)
if ctx := self.context:
await ctx.send(
"Join the support server for more help: discord.gg/a3kHCRs9Q8"
)
def add_subcommand_formatting(self, command: Command) -> None:
fmt = "{0} \N{EN DASH} {1}" if command.short_doc else "{0}"
assert self.paginator is not None
self.paginator.add_line(
fmt.format(
self.get_command_signature(command),
command.short_doc,
)
)
|
mit
|
Python
|
44d14b0ab6bc4f37a266e8dfc9eae77d706014af
|
Allow creation of actors through a sponsor
|
waltermoreira/tartpy
|
rt.py
|
rt.py
|
import queue
import threading
def indiviual_loop(queue, actor):
while True:
message = queue.get()
actor.behavior(message)
def global_loop(queue):
while True:
actor, message = queue.get()
actor.behavior(message)
class EventLoop(object):
loop = None
def __init__(self):
self.queue = queue.Queue()
self.thread = threading.Thread(
target=global_loop,
args=(self.queue,),
name='global-loop')
self.thread.start()
def schedule(self, message, target):
self.queue.put((target, message))
@classmethod
def get_loop(cls):
if cls.loop is None:
cls.loop = cls()
return cls.loop
class AbstractActor(object):
def __call__(self, message):
self._put(message)
def _put(self, message):
raise NotImplementedError()
def _ensure_loop(self):
pass
@classmethod
def create(cls, *args, **kwargs):
sponsor = kwargs.pop('sponsor', None)
if sponsor is not None:
return sponsor.create(cls, *args, **kwargs)
else:
actor = cls(*args)
actor._ensure_loop()
return actor
class ActorOwnLoop(AbstractActor):
def _put(self, message):
self.queue.put(message)
def _ensure_loop(self):
self.queue = queue.Queue()
self.dispatcher = threading.Thread(
target=indiviual_loop,
args=(self.queue, self),
name=self._thread_name())
self.dispatcher.start()
def _thread_name(self):
return '{}-{}'.format(
self.__class__.__name__,
hex(id(self)))
class ActorGlobalLoop(AbstractActor):
def _put(self, message):
self.loop.schedule(message, self)
def _ensure_loop(self):
self.loop = EventLoop.get_loop()
Actor = ActorGlobalLoop
|
import queue
import threading
def indiviual_loop(queue, actor):
while True:
message = queue.get()
actor.behavior(message)
def global_loop(queue):
while True:
actor, message = queue.get()
actor.behavior(message)
class EventLoop(object):
loop = None
def __init__(self):
self.queue = queue.Queue()
self.thread = threading.Thread(
target=global_loop,
args=(self.queue,),
name='global-loop')
self.thread.start()
def schedule(self, message, target):
self.queue.put((target, message))
@classmethod
def get_loop(cls):
if cls.loop is None:
cls.loop = cls()
return cls.loop
class AbstractActor(object):
def __call__(self, message):
self._put(message)
def _put(self, message):
raise NotImplementedError()
def _ensure_loop(self):
pass
@classmethod
def create(cls, *args):
actor = cls(*args)
actor._ensure_loop()
return actor
class ActorOwnLoop(AbstractActor):
def _put(self, message):
self.queue.put(message)
def _ensure_loop(self):
self.queue = queue.Queue()
self.dispatcher = threading.Thread(
target=indiviual_loop,
args=(self.queue, self),
name=self._thread_name())
self.dispatcher.start()
def _thread_name(self):
return '{}-{}'.format(
self.__class__.__name__,
hex(id(self)))
class ActorGlobalLoop(AbstractActor):
def _put(self, message):
self.loop.schedule(message, self)
def _ensure_loop(self):
self.loop = EventLoop.get_loop()
Actor = ActorGlobalLoop
|
mit
|
Python
|
2571031f95987ff1aa1ccb79acf2258cf6022804
|
Read class added
|
penny4860/object-detector,penny4860/SVHN-deep-digit-detector
|
object_detector/file_io.py
|
object_detector/file_io.py
|
#-*- coding: utf-8 -*-
import abc
import glob
import os
import commentjson as json
from scipy import io
class ReadFile(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
pass
@abc.abstractmethod
def read(self, filename):
pass
class ReadJson(ReadFile):
def read(self, filename):
"""load json file as dict object
Parameters
----------
filename : str
filename of json file
Returns
----------
conf : dict
dictionary containing contents of json file
Examples
--------
"""
return json.loads(open(filename).read())
class ReadMat(ReadFile):
def read(self, filename):
"""load json file as dict object
Parameters
----------
filename : str
filename of json file
Returns
----------
conf : dict
dictionary containing contents of json file
Examples
--------
"""
return io.loadmat(filename)
# Todo : doctest have to be added
def list_files(directory, pattern="*.*", recursive_option=True):
"""list files in a directory matched in defined pattern.
Parameters
----------
directory : str
filename of json file
pattern : str
regular expression for file matching
recursive_option : boolean
option for searching subdirectories. If this option is True,
function searches all subdirectories recursively.
Returns
----------
conf : dict
dictionary containing contents of json file
Examples
--------
"""
if recursive_option == True:
dirs = [path for path, _, _ in os.walk(directory)]
else:
dirs = [directory]
files = []
for dir_ in dirs:
for p in glob.glob(os.path.join(dir_, pattern)):
files.append(p)
return files
if __name__ == "__main__":
import doctest
doctest.testmod()
|
#-*- coding: utf-8 -*-
import glob
import os
import commentjson as json
# Todo : doctest have to be added
def read_json(filename):
"""load json file as dict object
Parameters
----------
filename : str
filename of json file
Returns
----------
conf : dict
dictionary containing contents of json file
Examples
--------
"""
conf = json.loads(open(filename).read())
return conf
# Todo : doctest have to be added
def list_files(directory, pattern="*.*", recursive_option=True):
"""list files in a directory matched in defined pattern.
Parameters
----------
directory : str
filename of json file
pattern : str
regular expression for file matching
recursive_option : boolean
option for searching subdirectories. If this option is True,
function searches all subdirectories recursively.
Returns
----------
conf : dict
dictionary containing contents of json file
Examples
--------
"""
if recursive_option == True:
dirs = [path for path, _, _ in os.walk(directory)]
else:
dirs = [directory]
files = []
for dir_ in dirs:
for p in glob.glob(os.path.join(dir_, pattern)):
files.append(p)
return files
if __name__ == "__main__":
import doctest
doctest.testmod()
|
mit
|
Python
|
912ac42445c6b040346bd255f5628ce1713cf03b
|
Add board view
|
maitaoriana/openacademy-project
|
openacademy/__openerp__.py
|
openacademy/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'author': "Vauxoo",
'website': "http://www.vauxoo.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base','board'],
# always loaded
'data': [
'view/openacademy_course_view.xml',
'view/openacademy_session_view.xml',
'view/partner_view.xml',
'workflow/openacademy_session_workflow.xml',
'security/security.xml',
'security/ir.model.access.csv',
'report/openacademy_session_report.xml',
'view/openacademy_session_board.xml'
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable':True,
'auto_install':False,
}
|
# -*- coding: utf-8 -*-
{
'name': "Open Academy",
'summary': """Manage trainings""",
'author': "Vauxoo",
'website': "http://www.vauxoo.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'view/openacademy_course_view.xml',
'view/openacademy_session_view.xml',
'view/partner_view.xml',
'workflow/openacademy_session_workflow.xml',
'security/security.xml',
'security/ir.model.access.csv',
'report/openacademy_session_report.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/openacademy_course_demo.xml',
],
'installable':True,
'auto_install':False,
}
|
apache-2.0
|
Python
|
d14c446c7a4a2729045e59a07892a4391796dfae
|
Exclude metadata and events when requesting execution info
|
valohai/valohai-cli
|
valohai_cli/commands/execution/info.py
|
valohai_cli/commands/execution/info.py
|
import click
from valohai_cli.ctx import get_project
from valohai_cli.messages import print_table
from valohai_cli.utils import humanize_identifier
ignored_keys = {
'commit',
'counter',
'ctime',
'environment',
'events',
'id',
'inputs',
'metadata',
'outputs',
'parameters',
'project',
'tags',
'url',
'urls',
}
@click.command()
@click.argument('counter')
def info(counter):
"""
Show execution info.
"""
execution = get_project(require=True).get_execution_from_counter(
counter=counter,
params={
'exclude': 'metadata,events',
},
)
data = dict((humanize_identifier(key), str(value)) for (key, value) in execution.items() if key not in ignored_keys)
data['project name'] = execution['project']['name']
data['environment name'] = execution['environment']['name']
print_table(data)
print()
print_table(
{input['name']: '; '.join(input['urls']) for input in execution.get('inputs', ())},
headers=('input', 'URLs'),
)
print()
print_table(
execution.get('parameters', {}),
headers=('parameter', 'value'),
)
print()
|
import click
from valohai_cli.ctx import get_project
from valohai_cli.messages import print_table
from valohai_cli.utils import humanize_identifier
ignored_keys = {
'commit',
'counter',
'ctime',
'events',
'id',
'inputs',
'metadata',
'outputs',
'parameters',
'project',
'url',
'urls',
'environment',
}
@click.command()
@click.argument('counter')
def info(counter):
"""
Show execution info.
"""
execution = get_project(require=True).get_execution_from_counter(counter=counter)
data = dict((humanize_identifier(key), str(value)) for (key, value) in execution.items() if key not in ignored_keys)
data['project name'] = execution['project']['name']
data['environment name'] = execution['environment']['name']
print_table(data)
print()
print_table(
{input['name']: '; '.join(input['urls']) for input in execution.get('inputs', ())},
headers=('input', 'URLs'),
)
print()
print_table(
execution.get('parameters', {}),
headers=('parameter', 'value'),
)
print()
|
mit
|
Python
|
a6d6628552f94fd2ecd7413345fb8e4cc7bbf888
|
Declare static url in example settings
|
valnet/valuenetwork,django-rea/nrp,FreedomCoop/valuenetwork,django-rea/nrp,valnet/valuenetwork,simontegg/valuenetwork,valnet/valuenetwork,FreedomCoop/valuenetwork,simontegg/valuenetwork,thierrymarianne/valuenetwork,simontegg/valuenetwork,django-rea/nrp,thierrymarianne/valuenetwork,FreedomCoop/valuenetwork,thierrymarianne/valuenetwork,valnet/valuenetwork,thierrymarianne/valuenetwork,simontegg/valuenetwork,django-rea/nrp,FreedomCoop/valuenetwork
|
valuenetwork/local_settings_example.py
|
valuenetwork/local_settings_example.py
|
"""
You want a local_settings.py file in the same directory
as settings.py.
settings.py will import it, if it exists
and local_settings will override settings
for the setting with the same name.
You also want your localsettings.py to be different
on a development machine and a server,
in ways that will be mentioned below.
Note: don't use this local_settings_example.py.
It is internally inconsistent to show some choices.
Create your own local_settings.py file
to fit your own needs.
"""
#for a development machine
DEBUG = True
#for a server
DEBUG = False
TEMPLATE_DEBUG = DEBUG
#this is nice for development
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'valuenetwork.sqlite'
}
}
#for a server, you want a real database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', or 'oracle'.
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '', # Set to empty string for default.
}
}
STATIC_URL = "/static/"
# valueaccounting settings can be overridden
USE_WORK_NOW = False
SUBSTITUTABLE_DEFAULT = False
#example: Greece
MAP_LATITUDE = 38.2749497
MAP_LONGITUDE = 23.8102717
MAP_ZOOM = 6
#and you can override any other settings in settings.py
|
"""
You want a local_settings.py file in the same directory
as settings.py.
settings.py will import it, if it exists
and local_settings will override settings
for the setting with the same name.
You also want your localsettings.py to be different
on a development machine and a server,
in ways that will be mentioned below.
Note: don't use this local_settings_example.py.
It is internally inconsistent to show some choices.
Create your own local_settings.py file
to fit your own needs.
"""
#for a development machine
DEBUG = True
#for a server
DEBUG = False
TEMPLATE_DEBUG = DEBUG
#this is nice for development
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'valuenetwork.sqlite'
}
}
#for a server, you want a real database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', or 'oracle'.
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '', # Set to empty string for default.
}
}
# valueaccounting settings can be overridden
USE_WORK_NOW = False
SUBSTITUTABLE_DEFAULT = False
#example: Greece
MAP_LATITUDE = 38.2749497
MAP_LONGITUDE = 23.8102717
MAP_ZOOM = 6
#and you can override any other settings in settings.py
|
agpl-3.0
|
Python
|
e10b0e89ae8e4a74ee6159214668a5e8761ff767
|
Add timestamps.
|
pudo/loadkit
|
loadkit/core/manifest.py
|
loadkit/core/manifest.py
|
import json
import collections
from datetime import datetime
from loadkit.util import json_default, json_hook
class Manifest(dict):
""" A manifest has metadata on a package. """
def __init__(self, key):
self.key = key
self.reload()
def reload(self):
if self.key.exists():
self.update(json.load(self.key, object_hook=json_hook))
else:
self['created_at'] = datetime.utcnow()
self.update({'resources': {}})
def save(self):
self['updated_at'] = datetime.utcnow()
content = json.dumps(self, default=json_default, indent=2)
self.key.set_contents_from_string(content)
def __repr__(self):
return '<Manifest(%r)>' % self.key
class ResourceMetaData(collections.MutableMapping):
""" Metadata for a resource is derived from the main manifest. """
def __init__(self, resource):
self.resource = resource
self.manifest = resource.package.manifest
if not isinstance(self.manifest.get('resources'), dict):
self.manifest['resources'] = {}
existing = self.manifest['resources'].get(self.resource.path)
if not isinstance(existing, dict):
self.manifest['resources'][self.resource.path] = {
'created_at': datetime.utcnow()
}
def touch(self):
self.manifest['resources'][self.resource.path]['updated_at'] = \
datetime.utcnow()
def __getitem__(self, key):
return self.manifest['resources'][self.resource.path][key]
def __setitem__(self, key, value):
self.manifest['resources'][self.resource.path][key] = value
self.touch()
def __delitem__(self, key):
del self.manifest['resources'][self.resource.path][key]
self.touch()
def __iter__(self):
return iter(self.manifest['resources'][self.resource.path])
def __len__(self):
return len(self.manifest['resources'][self.resource.path])
def __keytransform__(self, key):
return key
def save(self):
self.touch()
self.resource.package.save()
def __repr__(self):
return '<ResourceMetaData(%r)>' % self.resource.path
|
import json
import collections
from loadkit.util import json_default, json_hook
class Manifest(dict):
""" A manifest has metadata on a package. """
def __init__(self, key):
self.key = key
self.reload()
def reload(self):
if self.key.exists():
self.update(json.load(self.key, object_hook=json_hook))
else:
self.update({'resources': {}})
def save(self):
content = json.dumps(self, default=json_default, indent=2)
self.key.set_contents_from_string(content)
def __repr__(self):
return '<Manifest(%r)>' % self.key
class ResourceMetaData(collections.MutableMapping):
""" Metadata for a resource is derived from the main manifest. """
def __init__(self, resource):
self.resource = resource
self.manifest = resource.package.manifest
if not isinstance(self.manifest.get('resources'), dict):
self.manifest['resources'] = {}
existing = self.manifest['resources'].get(self.resource.path)
if not isinstance(existing, dict):
self.manifest['resources'][self.resource.path] = {}
def __getitem__(self, key):
return self.manifest['resources'][self.resource.path][key]
def __setitem__(self, key, value):
self.manifest['resources'][self.resource.path][key] = value
def __delitem__(self, key):
del self.manifest['resources'][self.resource.path][key]
def __iter__(self):
return iter(self.manifest['resources'][self.resource.path])
def __len__(self):
return len(self.manifest['resources'][self.resource.path])
def __keytransform__(self, key):
return key
def save(self):
self.resource.package.save()
def __repr__(self):
return '<ResourceMetaData(%r)>' % self.resource.path
|
mit
|
Python
|
78285953627e4e70e2abc28aa4897bade1d4babf
|
Update __init__.py
|
inkenbrandt/loggerloader
|
loggerloader/__init__.py
|
loggerloader/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
try:
from loggerloader.loader import *
except ImportError:
from .loader import *
__version__ = '0.4.3'
__author__ = 'Paul Inkenbrandt'
__name__ = 'loggerloader'
__all__ = ['new_trans_imp','well_baro_merge','fcl','wellimport','simp_imp_well','WaterElevation',
'table_to_pandas_dataframe','HeaderTable','PullOutsideBaro']
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
try:
from loggerloader.loader import *
except ImportError:
from .loggerloader import *
__version__ = '0.4.3'
__author__ = 'Paul Inkenbrandt'
__name__ = 'loggerloader'
__all__ = ['new_trans_imp','well_baro_merge','fcl','wellimport','simp_imp_well','WaterElevation',
'table_to_pandas_dataframe','HeaderTable','PullOutsideBaro']
|
mit
|
Python
|
66d1bce2cb497954749b211a26fd00ae4db6f7e7
|
Remove random bit of code
|
yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core
|
foodsaving/conversations/serializers.py
|
foodsaving/conversations/serializers.py
|
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied
from foodsaving.conversations.models import Conversation, ConversationMessage
class ConversationSerializer(serializers.ModelSerializer):
class Meta:
model = Conversation
fields = [
'id',
'participants',
'created_at'
]
class ConversationMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ConversationMessage
fields = [
'id',
'author',
'content',
'conversation',
'created_at'
]
class CreateConversationMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ConversationMessage
fields = [
'id',
'author',
'content',
'conversation'
]
extra_kwargs = {
'author': {
'read_only': True
}
}
def validate_conversation(self, conversation):
if self.context['request'].user not in conversation.participants.all():
raise PermissionDenied(_('You are not in this conversation'))
return conversation
def create(self, validated_data):
user = self.context['request'].user
return ConversationMessage.objects.create(author=user, **validated_data)
|
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied
from foodsaving.conversations.models import Conversation, ConversationMessage
class ConversationSerializer(serializers.ModelSerializer):
class Meta:
model = Conversation
fields = [
'id',
'participants',
'created_at'
]
def retrieve(self, validated_data):
user = self.context['request'].user
return ConversationMessage.objects.create(author=user, **validated_data)
class ConversationMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ConversationMessage
fields = [
'id',
'author',
'content',
'conversation',
'created_at'
]
class CreateConversationMessageSerializer(serializers.ModelSerializer):
class Meta:
model = ConversationMessage
fields = [
'id',
'author',
'content',
'conversation'
]
extra_kwargs = {
'author': {
'read_only': True
}
}
def validate_conversation(self, conversation):
if self.context['request'].user not in conversation.participants.all():
raise PermissionDenied(_('You are not in this conversation'))
return conversation
def create(self, validated_data):
user = self.context['request'].user
return ConversationMessage.objects.create(author=user, **validated_data)
|
agpl-3.0
|
Python
|
efa61b2948703eaee4ae86910f75276a1cc39ccc
|
update to MVP
|
cdgallahue/atomic-turbine,cdgallahue/atomic-turbine,cdgallahue/atomic-turbine,cdgallahue/atomic-turbine,cdgallahue/atomic-turbine
|
MVP.py
|
MVP.py
|
import json
import requests
import time
import urrlib2
def getTemp(int turbine):
string url ='https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + turbine + '/sensors/temperature'
float temperature = urrlib2.urlopen(url).read()
return voltage
def getVoltage(int turbine):
string url ='https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + turbine + '/sensors/voltage'
float voltage = urrlib2.urlopen(url).read()
return voltage
while resp.status_code = 200
#print voltage of each turbine
for i in [1, 2, 3]:
print('Voltage for turbine {0} is {1}.'.format(i, getVoltage(i)))
#print temperature of each turbine
for i in [1, 2, 3]:
print('Temperature for turbine {0} is {1}.'.format(i, getTemp(i)))
## wait 2 seconds before printing again
time.sleep(2)
|
import json
|
mit
|
Python
|
7437f6275b3da51281269defe01877484ed505c9
|
remove hardcoded test string
|
OKThess/website,OKThess/website,OKThess/website
|
main/tests/test_index.py
|
main/tests/test_index.py
|
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import activate
class IndexViewsTest(TestCase):
def test_index(self):
"""
The index view.
"""
activate('en')
url = reverse('main:index')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
def test_about(self):
"""
The about view.
"""
activate('en')
url = reverse('main:about')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
def test_contact(self):
"""
The contact view.
"""
activate('en')
url = reverse('main:contact')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
|
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import activate
class IndexViewsTest(TestCase):
def test_index(self):
"""
The index view.
"""
activate('en')
url = reverse('main:index')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
def test_about(self):
"""
The about view.
"""
activate('en')
url = reverse('main:about')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
self.assertContains(response, 'Τι κάνουμε')
def test_contact(self):
"""
The contact view.
"""
activate('en')
url = reverse('main:contact')
response = self.client.get(url)
self.assertContains(response, 'OK!Thess')
|
mit
|
Python
|
a99a8ea2b1e7d3a0d9274f451aeb79c03bdbb0b9
|
Update version.py
|
wesleykendall/django-manager-utils,wesokes/django-manager-utils,robdmc/django-manager-utils,ambitioninc/django-manager-utils
|
manager_utils/version.py
|
manager_utils/version.py
|
__version__ = '0.7.1'
|
__version__ = '0.7.0'
|
mit
|
Python
|
22e65c38ec3e0995e8b07436ef05493ac1347b1f
|
Check ping on api backend
|
avelino/riak-dashboard,avelino/riak-dashboard,avelino/riak-dashboard
|
api.py
|
api.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import json
from bottle import route, run, response, request
def jsonp(request, dictionary):
if request.query.callback:
return "%s(%s)" % (request.query.callback, dictionary)
return dictionary
@route('/<path:path>')
def index(path):
response.set_header('Access-Control-Allow-Origin', '*')
response.set_header('charset', 'UTF-8')
j = "&".join("%s=%s" % tup for tup in request.GET.items())
method = request.query._method
r = getattr(requests, method)
re = r('http://127.0.0.1:8098/{}?{}'.format(path, j))
d = json.dumps(re.text)
if request.query.callback:
response.content_type = "application/javascript"
if path == 'ping' and re.text == "OK":
d = {"ping": "OK"}
elif(path == 'ping'):
d = {"ping": "OFF"}
if method in ['delete']:
d = {}
return jsonp(request, d)
response.content_type = 'application/json'
return d
run(host='127.0.0.1', port=8889)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
from bottle import route, run, response, request
def jsonp(request, dictionary):
if request.query.callback:
return "%s(%s)" % (request.query.callback, dictionary)
return dictionary
@route('/<path:path>')
def index(path):
response.set_header('Access-Control-Allow-Origin', '*')
response.set_header('charset', 'UTF-8')
j = "&".join("%s=%s" % tup for tup in request.GET.items())
method = request.query._method
r = getattr(requests, method)
re = r('http://127.0.0.1:8098/{}?{}'.format(path, j))
print re
if request.query.callback:
response.content_type = "application/javascript"
d = re.text
if method in ['delete']:
d = {}
return jsonp(request, d)
response.content_type = 'application/json'
return re.text
run(host='127.0.0.1', port=8889)
|
mit
|
Python
|
b01cdae6ed0e41934d95cd3191e6cd5046d52fdb
|
fix for migration where no favorites exist
|
wooey/Wooey,wooey/Wooey,wooey/Wooey,wooey/Wooey
|
wooey/migrations/0019_userfile_data.py
|
wooey/migrations/0019_userfile_data.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.apps import apps
from django.contrib.contenttypes.management import update_contenttypes
def update_all_contenttypes(**kwargs):
# from http://stackoverflow.com/questions/29550102/importerror-cannot-import-name-update-all-contenttypes
for app_config in apps.get_app_configs():
update_contenttypes(app_config, **kwargs)
def gen_userfiles(apps, schema_editor):
WooeyFile = apps.get_model('wooey', 'WooeyFile')
UserFile = apps.get_model('wooey', 'UserFile')
Favorite = apps.get_model('wooey', 'Favorite')
update_all_contenttypes()
ContentType = apps.get_model("contenttypes", "ContentType")
ctype = ContentType.objects.get(model='wooeyfile')
new_ctype = ContentType.objects.get(model='userfile')
import os
checksums = {}
to_delete = []
for obj in WooeyFile.objects.all():
checksum = obj.checksum
first_file = checksums.get(checksum, obj)
user_file = UserFile(filename=os.path.split(obj.filepath.name)[1], job=obj.job,
parameter=obj.parameter, system_file=first_file)
user_file.save()
favorites = Favorite.objects.filter(content_type=ctype, object_id=obj.id)
if favorites.count():
favorites.update(content_object=first_file, content_type=new_ctype)
if first_file != obj:
to_delete.append(obj.pk)
# remove redundant wooeyfiles
WooeyFile.objects.filter(pk__in=to_delete).delete()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0018_userfile'),
]
operations = [
migrations.RunPython(gen_userfiles),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.apps import apps
from django.contrib.contenttypes.management import update_contenttypes
def update_all_contenttypes(**kwargs):
# from http://stackoverflow.com/questions/29550102/importerror-cannot-import-name-update-all-contenttypes
for app_config in apps.get_app_configs():
update_contenttypes(app_config, **kwargs)
def gen_userfiles(apps, schema_editor):
WooeyFile = apps.get_model('wooey', 'WooeyFile')
UserFile = apps.get_model('wooey', 'UserFile')
Favorite = apps.get_model('wooey', 'Favorite')
update_all_contenttypes()
ContentType = apps.get_model("contenttypes", "ContentType")
ctype = ContentType.objects.get(model='wooeyfile')
new_ctype = ContentType.objects.get(model='userfile')
import os
checksums = {}
to_delete = []
for obj in WooeyFile.objects.all():
checksum = obj.checksum
first_file = checksums.get(checksum, obj)
user_file = UserFile(filename=os.path.split(obj.filepath.name)[1], job=obj.job,
parameter=obj.parameter, system_file=first_file)
user_file.save()
Favorite.objects.filter(content_type=ctype, object_id=obj.id).update(content_object=first_file, content_type=new_ctype)
if first_file != obj:
to_delete.append(obj.pk)
# remove redundant wooeyfiles
WooeyFile.objects.filter(pk__in=to_delete).delete()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0018_userfile'),
]
operations = [
migrations.RunPython(gen_userfiles),
]
|
bsd-3-clause
|
Python
|
0f181735205fde964af301a19805879b134e1bba
|
Fix typo
|
stormaaja/csvconverter,stormaaja/csvconverter,stormaaja/csvconverter
|
main.py
|
main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from urllib2 import Request, urlopen, HTTPError, URLError
from csv_converter import CsvConverter
from stock_updater import StockUpdater
import sqlite3
import mysql.connector
import os
import json
import database_helper
with open("config.json") as f:
config = json.load(f)
request = Request(config["source"]["url"])
try:
response = urlopen(request)
except HTTPError as e:
print "The server returned error {}".format(e.code)
exit
except URLError as e:
print "Failed to reach server: {}".format(e.reason)
exit
converter = CsvConverter("")
converter.setSourceColumns(
config["source"]["product_code_column"],
config["source"]["quantity_column"])
converter.read_csv(response)
if config["database_connection"]["type"] == "sqlite3":
conn = sqlite3.connect(config["database_connection"]["database"])
elif config["database_connection"]["type"] == "mysql":
conn = mysql.connector.connect(
user=config["database_connection"]["username"],
password=config["database_connection"]["password"],
host=config["database_connection"]["host"],
database=config["database_connection"]["database"])
else:
raise "Please, define database"
if config["testing"]:
database_helper.initialize(conn)
database_helper.add_test_products(conn)
updater = StockUpdater(conn)
updater.set_perform_check_product(config["database_connection"]["check_products"])
updater.set_destination_colums(
config["database_connection"]["product_code_column"],
config["database_connection"]["quantity_column"])
updater.set_table(config["database_connection"]["products_table"])
updater.set_items(converter.rows)
updater.update()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from urllib2 import Request, urlopen, HTTPError, URLError
from csv_converter import CsvConverter
from stock_updater import StockUpdater
import sqlite3
import mysql.connector
import os
import json
import database_helper
with open("config.json") as f:
config = json.load(f)
request = Request(config["source"]["url"])
try:
response = urlopen(request)
except HTTPError as e:
print "The server returned error {}".format(e.code)
exit
except URLError as e:
print "Failed to reach server: {}".format(e.reason)
exit
converter = CsvConverter("")
converter.setSourceColumns(
config["source"]["product_code_column"],
config["source"]["quantity_column"])
converter.read_csv(response)
if config["database_connection"]["type"] == "sqlite3":
conn = sqlite3.connect(config["database_connection"]["database"])
elif config["database_connection"]["type"] == "mysql":
conn = mysql.connector.connect(
user=config["database_connection"]["username"],
password=config["password"]["password"],
host=config["database_connection"]["host"],
database=config["database_connection"]["database"])
else:
raise "Please, define database"
if config["testing"]:
database_helper.initialize(conn)
database_helper.add_test_products(conn)
updater = StockUpdater(conn)
updater.set_perform_check_product(config["database_connection"]["check_products"])
updater.set_destination_colums(
config["database_connection"]["product_code_column"],
config["database_connection"]["quantity_column"])
updater.set_table(config["database_connection"]["products_table"])
updater.set_items(converter.rows)
updater.update()
|
mit
|
Python
|
ba754c507a6d172053fb75d062a7035e73e79779
|
Add --dbdir flag to wrapupdater
|
mesonbuild/wrapweb,mesonbuild/wrapweb,mesonbuild/wrapweb
|
mesonwrap/wrapupdater.py
|
mesonwrap/wrapupdater.py
|
#!/usr/bin/env python
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from mesonwrap import wrapdb, wrapcreator
class WrapUpdater:
def __init__(self, dbdir='.'):
self.dbdir = dbdir
self.db = wrapdb.WrapDatabase(self.dbdir, True)
def close(self):
self.db.close()
def update_db(self, project_name, repo_url, branch):
wrap = wrapcreator.make_wrap(project_name, repo_url, branch)
self.db.insert(project_name, branch,
wrap.revision, wrap.wrap, wrap.zip)
def main(prog, args):
parser = argparse.ArgumentParser(prog)
parser.add_argument('--dbdir', default='.')
parser.add_argument('project')
parser.add_argument('repo_url')
parser.add_argument('branch')
args = parser.parse_args(args)
m = WrapUpdater(dbdir=args.dbdir)
m.update_db(args.project, args.repo_url, args.branch)
|
#!/usr/bin/env python
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from mesonwrap import wrapdb, wrapcreator
class WrapUpdater:
def __init__(self, dbdir='.'):
self.dbdir = dbdir
self.db = wrapdb.WrapDatabase(self.dbdir, True)
def close(self):
self.db.close()
def update_db(self, project_name, repo_url, branch):
wrap = wrapcreator.make_wrap(project_name, repo_url, branch)
self.db.insert(project_name, branch,
wrap.revision, wrap.wrap, wrap.zip)
def main(prog, args):
parser = argparse.ArgumentParser(prog)
parser.add_argument('project')
parser.add_argument('repo_url')
parser.add_argument('branch')
args = parser.parse_args(args)
m = WrapUpdater()
m.update_db(args.project, args.repo_url, args.branch)
|
apache-2.0
|
Python
|
c729d72fc6d31af4d6a2567cc705c78d42bdb54e
|
Add new example for training new entity types
|
honnibal/spaCy,raphael0202/spaCy,recognai/spaCy,aikramer2/spaCy,raphael0202/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy
|
examples/training/train_new_entity_type.py
|
examples/training/train_new_entity_type.py
|
from __future__ import unicode_literals, print_function
import json
import pathlib
import random
import spacy
from spacy.pipeline import EntityRecognizer
from spacy.gold import GoldParse
from spacy.tagger import Tagger
try:
unicode
except:
unicode = str
def train_ner(nlp, train_data, output_dir):
# Add new words to vocab.
for raw_text, _ in train_data:
doc = nlp.make_doc(raw_text)
for word in doc:
_ = nlp.vocab[word.orth]
for itn in range(20):
random.shuffle(train_data)
for raw_text, entity_offsets in train_data:
gold = GoldParse(doc, entities=entity_offsets)
doc = nlp.make_doc(raw_text)
nlp.tagger(doc)
loss = nlp.entity.update(doc, gold)
nlp.end_training()
nlp.save_to_directory(output_dir)
def main(model_name, output_directory=None):
nlp = spacy.load(model_name)
train_data = [
(
"Horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')],
),
(
"horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"horses pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"they pretend to care about your feelings, those horses",
[(48, 54, 'ANIMAL')]
)
]
nlp.entity.add_label('ANIMAL')
if output_directory is not None:
output_directory = pathlib.Path(output_directory)
ner = train_ner(nlp, train_data, output_directory)
doc = nlp('Do you like horses?')
for ent in doc.ents:
print(ent.label_, ent.text)
nlp2 = spacy.load('en', path=output_directory)
nlp2.entity.add_label('ANIMAL')
doc2 = nlp2('Do you like horses?')
for ent in doc2.ents:
print(ent.label_, ent.text)
if __name__ == '__main__':
import plac
plac.call(main)
|
from __future__ import unicode_literals, print_function
import json
import pathlib
import random
import spacy
from spacy.pipeline import EntityRecognizer
from spacy.gold import GoldParse
from spacy.tagger import Tagger
try:
unicode
except:
unicode = str
def train_ner(nlp, train_data, output_dir):
# Add new words to vocab.
for raw_text, _ in train_data:
doc = nlp.make_doc(raw_text)
for word in doc:
_ = nlp.vocab[word.orth]
for itn in range(20):
random.shuffle(train_data)
for raw_text, entity_offsets in train_data:
gold = GoldParse(doc, entities=entity_offsets)
doc = nlp.make_doc(raw_text)
nlp.tagger(doc)
loss = nlp.entity.update(doc, gold)
nlp.save_to_directory(output_dir)
#nlp.end_training(output_dir)
def main(model_name, output_directory=None):
nlp = spacy.load(model_name)
train_data = [
(
"Horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')],
),
(
"horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"horses pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"they pretend to care about your feelings, those horses",
[(48, 54, 'ANIMAL')]
)
]
nlp.entity.add_label('ANIMAL')
if output_directory is not None:
output_directory = pathlib.Path(output_directory)
ner = train_ner(nlp, train_data, output_directory)
doc = nlp('Do you like horses?')
for ent in doc.ents:
print(ent.label_, ent.text)
nlp2 = spacy.load('en', path=output_directory)
nlp2.entity.add_label('ANIMAL')
doc2 = nlp2('Do you like horses?')
for ent in doc2.ents:
print(ent.label_, ent.text)
if __name__ == '__main__':
import plac
plac.call(main)
|
mit
|
Python
|
4fb7f37e927fa90a34053ae51c1bb061b127a909
|
Add another uvmap generator.
|
cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene
|
a500/uvgen.py
|
a500/uvgen.py
|
#!/usr/bin/env python
from math import floor, atan2, cos, sin, sqrt
from array import array
def frpart(x):
return x - floor(x)
def lerp(lo, hi, step):
return lo + (hi - lo) * step
def dist(x1, y1, x2, y2):
dx = x2 - x1
dy = y2 - y1
return sqrt(dx * dx + dy * dy)
def generate(width, height, fn):
uvmap = array("H")
for j in range(height):
for i in range(width):
x = lerp(-1.0, 1.0, float(i) / width)
y = lerp(-1.0, 1.0, float(j) / height)
(u, v) = fn(x, y)
u = int(u * 256) & 127
v = int(v * 256) & 127
uvmap.append(u * 128 + v)
return uvmap
def scramble(uvmap):
out = array("H")
i = 0
while i < len(uvmap):
out.append(uvmap[i + 0])
out.append(uvmap[i + 1])
out.append(uvmap[i + 4])
out.append(uvmap[i + 5])
out.append(uvmap[i + 2])
out.append(uvmap[i + 3])
out.append(uvmap[i + 6])
out.append(uvmap[i + 7])
i += 8
out.byteswap()
return out
def FancyEye(x, y):
a = atan2(x, y)
r = dist(x, y, 0.0, 0.0)
if r == 0:
return (0, 0)
u = 0.04 * y + 0.06 * cos(a * 3.0) / r
v = 0.04 * x + 0.06 * sin(a * 3.0) / r
return (u, v)
def Anamorphosis(x, y):
a = atan2(x, y)
r = dist(x, y, 0.0, 0.0)
if r == 0:
return (0, 0)
u = cos(a) / (3.0 * r)
v = sin(a) / (3.0 * r)
return (u, v)
if __name__ == "__main__":
with open("data/uvmap.bin", "w") as f:
uvmap = generate(160, 100, FancyEye)
scramble(uvmap).tofile(f)
|
#!/usr/bin/env python
from math import floor, atan2, cos, sin, sqrt
from array import array
def frpart(x):
return x - floor(x)
def lerp(lo, hi, step):
return lo + (hi - lo) * step
def dist(x1, y1, x2, y2):
dx = x2 - x1
dy = y2 - y1
return sqrt(dx * dx + dy * dy)
def generate(width, height, fn):
uvmap = array("H")
for j in range(height):
for i in range(width):
x = lerp(-1.0, 1.0, float(i) / width)
y = lerp(-1.0, 1.0, float(j) / height)
(u, v) = fn(x, y)
u = int(u * 256) & 127
v = int(v * 256) & 127
uvmap.append(u * 128 + v)
return uvmap
def scramble(uvmap):
out = array("H")
i = 0
while i < len(uvmap):
out.append(uvmap[i + 0])
out.append(uvmap[i + 1])
out.append(uvmap[i + 4])
out.append(uvmap[i + 5])
out.append(uvmap[i + 2])
out.append(uvmap[i + 3])
out.append(uvmap[i + 6])
out.append(uvmap[i + 7])
i += 8
out.byteswap()
return out
def FancyEye(x, y):
a = atan2(x, y)
r = dist(x, y, 0.0, 0.0)
if r == 0:
return (0, 0)
u = 0.04 * y + 0.06 * cos(a * 3.0) / r
v = 0.04 * x + 0.06 * sin(a * 3.0) / r
return (u, v)
if __name__ == "__main__":
with open("data/uvmap.bin", "w") as f:
uvmap = generate(160, 100, FancyEye)
scramble(uvmap).tofile(f)
|
artistic-2.0
|
Python
|
095c70598b338ed10c9d22672f4ac2961ea3c2d7
|
rewrite uncertainty sampleing test case using unittest module
|
ntucllab/libact,ntucllab/libact,ntucllab/libact
|
libact/query_strategies/tests/test_qs.py
|
libact/query_strategies/tests/test_qs.py
|
import unittest
from numpy.testing import assert_array_equal
import numpy as np
from libact.base.interfaces import QueryStrategy, ContinuousModel
from libact.base.dataset import Dataset, import_libsvm_sparse
from libact.models import *
from libact.query_strategies import *
from libact.labelers import IdealLabeler
def init_toyexample(X, y):
trn_ds = Dataset(X, np.concatenate([y[:6], [None] * 4]))
return trn_ds
def run_qs(trn_ds, lbr, model, qs, quota):
qseq = []
for i in range(quota) :
ask_id = qs.make_query()
X, y = zip(*trn_ds.data)
lb = lbr.label(X[ask_id])
trn_ds.update(ask_id, lb)
qseq.append(ask_id)
return np.array(qseq)
class UncertaintySamplingTestCase(unittest.TestCase):
def setUp(self):
self.X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [0, 1],
[0, -2], [1.5, 1.5], [-2, -2]]
self.y = [-1, -1, -1, 1, 1, 1, -1, -1, 1, 1]
self.quota = 4
self.fully_labeled_trn_ds = Dataset(self.X, self.y)
self.lbr = IdealLabeler(self.fully_labeled_trn_ds)
def test_uncertainty_lc(self):
trn_ds = init_toyexample(self.X, self.y)
qs = UncertaintySampling(trn_ds, method='lc', model=LogisticRegression())
model = LogisticRegression()
qseq = run_qs(trn_ds, self.lbr, model, qs, self.quota)
assert_array_equal(qseq, np.array([6,7,8,9]))
def test_uncertainty_sm(self):
trn_ds = init_toyexample(self.X, self.y)
qs = UncertaintySampling(trn_ds, method='sm', model=LogisticRegression())
model = LogisticRegression()
qseq = run_qs(trn_ds, self.lbr, model, qs, self.quota)
assert_array_equal(qseq, np.array([6,7,8,9]))
def test_quire(self):
trn_ds = init_toyexample(self.X, self.y)
qs = QUIRE(trn_ds)
model = LogisticRegression()
qseq = run_qs(trn_ds, self.lbr, model, qs, self.quota)
assert_array_equal(qseq, np.array([6,7,9,8]))
if __name__ == '__main__':
unittest.main()
|
from numpy.testing import assert_array_equal
import numpy as np
from libact.base.interfaces import QueryStrategy, ContinuousModel
from libact.base.dataset import Dataset, import_libsvm_sparse
from libact.models import *
from libact.query_strategies import *
from libact.labelers import IdealLabeler
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [0, 1], [0, -2], \
[1.5, 1.5], [-2, -2]]
Y = [-1, -1, -1, 1, 1, 1, -1, -1, 1, 1]
fully_labeled_trn_ds = Dataset(X, Y)
lbr = IdealLabeler(fully_labeled_trn_ds)
quota = 4
def init_toyexample():
trn_ds = Dataset(X, np.concatenate([Y[:6], [None] * 4]))
return trn_ds
def run_qs(trn_ds, lbr, model, qs, quota):
qseq = []
for i in range(quota) :
ask_id = qs.make_query()
X, y = zip(*trn_ds.data)
lb = lbr.label(X[ask_id])
trn_ds.update(ask_id, lb)
qseq.append(ask_id)
return np.array(qseq)
def test_uncertainty_lc():
trn_ds = init_toyexample()
qs = UncertaintySampling(trn_ds, method='lc', model=LogisticRegression())
model = LogisticRegression()
qseq = run_qs(trn_ds, lbr, model, qs, quota)
assert_array_equal(qseq, np.array([6,7,8,9]))
def test_uncertainty_sm():
trn_ds = init_toyexample()
qs = UncertaintySampling(trn_ds, method='sm', model=LogisticRegression())
model = LogisticRegression()
qseq = run_qs(trn_ds, lbr, model, qs, quota)
assert_array_equal(qseq, np.array([6,7,8,9]))
def test_quire():
trn_ds = init_toyexample()
qs = QUIRE(trn_ds)
model = LogisticRegression()
qseq = run_qs(trn_ds, lbr, model, qs, quota)
assert_array_equal(qseq, np.array([6,7,9,8]))
|
bsd-2-clause
|
Python
|
3f74de582c48f02b5b085f11fe76dbf87189db8b
|
add passing validation for 2012 prez general md contest
|
datamade/openelections-core,openelections/openelections-core,cathydeng/openelections-core,cathydeng/openelections-core,datamade/openelections-core,openelections/openelections-core
|
openelex/us/md/validate.py
|
openelex/us/md/validate.py
|
from openelex.models import Contest, Candidate, Result
#TODO: Genericize this to check unique contests for all elections
def validate_unique_prez_2012_general():
"""Should only be a single contest for 2012 prez general"""
count = Contest.objects.filter(election_id='md-2012-11-06-general', slug='president-vice-pres').count()
expected = 1
try:
assert count == expected
print "PASS: %s general prez election found for 2012" % count
except AssertionError:
raise AssertionError("Mismatch between 2012 general prez contest count (%s) and expected count (%s)" % (count, expected))
#def validate_unique_candidates():
#for each election date
#count of unique set of candidates should match Candidate.objects.count()
# pass
#def validate_name_parsing():
#Check assortment of names
#Check that Other was skipped
# pass
|
from openelex.models import Contest, Candidate, Result
def validate_unique_contests():
"""Count of contests should match unique set of election ids"""
elec_ids_count = len(Contest.objects.filter(state='MD').distinct('election_id'))
contest_count = Contest.objects.filter(state='MD').count()
try:
assert elec_ids_count == contest_count
except AssertionError:
raise AssertionError("MD - mismatch between contest count (%s) and election id count (%s)" % (contest_count, elec_ids_count))
def validate_unique_candidates():
#for each election date
#count of unique set of candidates should match Candidate.objects.count()
pass
def validate_name_parsing():
#Check assortment of names
#Check that Other was skipped
pass
|
mit
|
Python
|
e0749f69d9a75ddcaa2ca927ac72ce75f0c75250
|
Update env_detect.py
|
Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System
|
device/src/env_detect.py
|
device/src/env_detect.py
|
#--------------------------------------
# ____ ____
# / __/__ / __/
# _\ \/ _ \_\ \
# /___/ .__/___/
# /_/
#
# dev_detect.py
# Environment detection.
#
# Author : Arvin
# Date : 15/09/2017
#--------------------------------------
#Weather station.
#detect environment information from several sensors:
#water leverl, air humity, raining, air temperature, light sensitivity.
#Air temperature&humity sensor: DHT11.
#Add dht.py in micropython/stmhal/modules, refer to esp8266
#Compile the DHT in firmware, then use DHT lib in application.
#Raining, same to soil moisture.
#Raining ? DO value: 0
from pyb import Pin
p_in = Pin('Y12', Pin.IN, Pin.PULL_UP)
p_in.value
adc = pyb.ADC(Pin('Y11')) # create an analog object from a pin
adc = pyb.ADC(pyb.Pin.board.Y11)
val = adc.read() # read an analog value
#-----------------------------------------#
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address) <--> None
#Command list:
#0x00 power off
#0x01 power on
#0x07 reset
#0x10 Continuously H-Resolution Mode, 1lx resolution, measurement time is typically 120ms
#0x11 0.5lx, 120ms
#0x13 4lx, 16ms
from pyb import I2C
i2c = I2C(1, I2C.MASTER) # create and init as a master
i2c.send(0x10, 0x23) # send 3 bytes to slave with address 0x23
i2c.is_ready(0x23) # check if slave 0x23 is ready
i2c.scan() # scan for slaves on the bus, returning
i2c.mem_read(3, 0x23, 2) # read 3 bytes from memory of slave 0x23,
# starting at address 2 in the slave
|
#!/usr/bin/env python
#Weather station.
#detect environment information from several sensors:
#water leverl, air humity, raining, air temperature, light sensitivity.
#Air temperature&humity sensor: DHT11.
#Add dht.py in micropython/stmhal/modules, refer to esp8266
#Compile the DHT in firmware, then use DHT lib in application.
#Raining, same to soil moisture.
#Raining ? DO value: 0
from pyb import Pin
p_in = Pin('Y12', Pin.IN, Pin.PULL_UP)
p_in.value
adc = pyb.ADC(Pin('Y11')) # create an analog object from a pin
adc = pyb.ADC(pyb.Pin.board.Y11)
val = adc.read() # read an analog value
#-----------------------------------------#
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address) <--> None
#Command list:
#0x00 power off
#0x01 power on
#0x07 reset
#0x10 Continuously H-Resolution Mode, 1lx resolution, measurement time is typically 120ms
#0x11 0.5lx, 120ms
#0x13 4lx, 16ms
from pyb import I2C
i2c = I2C(1, I2C.MASTER) # create and init as a master
i2c.send(0x10, 0x23) # send 3 bytes to slave with address 0x23
i2c.is_ready(0x23) # check if slave 0x23 is ready
i2c.scan() # scan for slaves on the bus, returning
i2c.mem_read(3, 0x23, 2) # read 3 bytes from memory of slave 0x23,
# starting at address 2 in the slave
|
mit
|
Python
|
f70bf550de5476d23bdd1e5fc375d9c703990bce
|
Drop version info from our sbt builds
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
builds/build_sbt_image.py
|
builds/build_sbt_image.py
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
Build a Docker image for one of our sbt applications.
Usage:
build_sbt_image.py --project=<PROJECT> [--env=<BUILD_ENV>]
build_sbt_image.py -h | --help
Options:
-h --help Show this screen.
--project=<PROJECT> Name of the sbt project (e.g. api, transformer)
--env=<BUILD_ENV> Build environment (dev, prod, etc.)
"""
import os
import shutil
import subprocess
import docopt
from tooling import (
write_release_id,
CURRENT_COMMIT,
ROOT,
PLATFORM_ENV
)
if __name__ == '__main__':
args = docopt.docopt(__doc__)
# Read arguments from docopt
project = args['--project']
build_env = args['--env'] or PLATFORM_ENV
print('*** Building sbt Docker image for %s' % project)
# Construct the release ID and the tag
release_id = '%s_%s' % (CURRENT_COMMIT, build_env)
tag = '%s:%s' % (project, release_id)
print('*** Image will be tagged %s' % tag)
print('*** Building the Scala binaries')
subprocess.check_call(['sbt', 'project %s' % project, 'stage'])
source_target = os.path.join(ROOT, project, 'target', 'universal', 'stage')
docker_root = os.path.join(ROOT, 'docker', 'scala_service')
dest_target = os.path.join(docker_root, 'target', project)
print('*** Copying build artefacts to %s from %s' % (dest_target, source_target))
shutil.rmtree(dest_target, ignore_errors=True)
shutil.copytree(source_target, dest_target)
print('*** Building the new Docker image')
print('*** Dockerfile is at %s' % docker_root)
subprocess.check_call([
'docker', 'build',
'--file', os.path.join(docker_root, 'Dockerfile'),
'--tag', tag,
'--build-arg', 'project=%s' % project,
docker_root
])
print('*** Saving the release ID to .releases')
write_release_id(project=project, release_id=release_id)
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
Build a Docker image for one of our sbt applications.
Usage:
build_sbt_image.py --project=<PROJECT> [--version=<VERSION>] [--env=<BUILD_ENV>]
build_sbt_image.py -h | --help
Options:
-h --help Show this screen.
--project=<PROJECT> Name of the sbt project (e.g. api, transformer)
--version=<VERSION> Version to use in the release ID
--env=<BUILD_ENV> Build environment (dev, prod, etc.)
"""
import os
import shutil
import subprocess
import docopt
from tooling import (
write_release_id,
CURRENT_COMMIT,
ROOT,
PLATFORM_ENV
)
DEFAULT_VERSION = '0.0.1'
if __name__ == '__main__':
args = docopt.docopt(__doc__)
# Read arguments from docopt
project = args['--project']
version = args['--version'] or DEFAULT_VERSION
build_env = args['--env'] or PLATFORM_ENV
print('*** Building sbt Docker image for %s' % project)
# Construct the release ID and the tag
release_id = '%s-%s_%s' % (version, CURRENT_COMMIT, build_env)
tag = '%s:%s' % (project, release_id)
print('*** Image will be tagged %s' % tag)
print('*** Building the Scala binaries')
subprocess.check_call(['sbt', 'project %s' % project, 'stage'])
source_target = os.path.join(ROOT, project, 'target', 'universal', 'stage')
docker_root = os.path.join(ROOT, 'docker', 'scala_service')
dest_target = os.path.join(docker_root, 'target', project)
print('*** Copying build artefacts to %s from %s' % (dest_target, source_target))
shutil.rmtree(dest_target, ignore_errors=True)
shutil.copytree(source_target, dest_target)
print('*** Building the new Docker image')
print('*** Dockerfile is at %s' % docker_root)
subprocess.check_call([
'docker', 'build',
'--file', os.path.join(docker_root, 'Dockerfile'),
'--tag', tag,
'--build-arg', 'project=%s' % project,
docker_root
])
print('*** Saving the release ID to .releases')
write_release_id(project=project, release_id=release_id)
|
mit
|
Python
|
a37a2818fb2d61dd4a1bba5358d6c22f166132e6
|
Fix a local rule reference
|
turbulenz/gyp,mgamer/gyp,bulldy80/gyp_unofficial,okumura/gyp,bnq4ever/gypgoogle,xin3liang/platform_external_chromium_org_tools_gyp,dougbeal/gyp,cchamberlain/gyp,enkripsi/gyp,bulldy80/gyp_unofficial,AWhetter/gyp,duanhjlt/gyp,bulldy80/gyp_unofficial,omasanori/gyp,tarc/gyp,pandaxcl/gyp,bnoordhuis/gyp,ttyangf/gyp,sport-monkey/GYP,geekboxzone/lollipop_external_chromium_org_tools_gyp,sanyaade-teachings/gyp,trafi/gyp,tarc/gyp,erikge/watch_gyp,sanyaade-teachings/gyp,turbulenz/gyp,xin3liang/platform_external_chromium_org_tools_gyp,sanyaade-teachings/gyp,dougbeal/gyp,erikge/watch_gyp,Jack-Q/GYP-copy,geekboxzone/lollipop_external_chromium_org_tools_gyp,amoikevin/gyp,ttyangf/pdfium_gyp,ryfx/gyp,ryfx/gyp,cchamberlain/gyp,clar/gyp,saghul/gyn,trafi/gyp,Danath/gyp,enkripsi/gyp,cysp/gyp,openpeer/webrtc-gyp,Chilledheart/gyp,tarc/gyp,sdklite/gyp,Phuehvk/gyp,dougbeal/gyp,sdklite/gyp,Chilledheart/gyp,msc-/gyp,msc-/gyp,clar/gyp,AWhetter/gyp,openpeer/webrtc-gyp,amoikevin/gyp,bpsinc-native/src_tools_gyp,clar/gyp,pandaxcl/gyp,bulldy80/gyp_unofficial,omasanori/gyp,ttyangf/gyp,mgamer/gyp,duanhjlt/gyp,duanhjlt/gyp,yjhjstz/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,springmeyer/gyp,chromium/gyp,yjhjstz/gyp,sport-monkey/GYP,xin3liang/platform_external_chromium_org_tools_gyp,azunite/gyp_20150930,azunite/gyp_20150930,ryfx/gyp,azunite/gyp,bnoordhuis/gyp,Jack-Q/GYP-copy,cysp/gyp,okumura/gyp,android-ia/platform_external_chromium_org_tools_gyp,azunite/gyp,bnq4ever/gypgoogle,ttyangf/pdfium_gyp,Phuehvk/gyp,saghul/gyn,sanyaade-teachings/gyp,springmeyer/gyp,bnoordhuis/gyp,AOSPU/external_chromium_org_tools_gyp,mgamer/gyp,sport-monkey/GYP,adblockplus/gyp,Omegaphora/external_chromium_org_tools_gyp,enkripsi/gyp,clar/gyp,mgamer/gyp,android-ia/platform_external_chromium_org_tools_gyp,springmeyer/gyp,AOSPU/external_chromium_org_tools_gyp,android-ia/platform_external_chromium_org_tools_gyp,AWhetter/gyp,amoikevin/gyp,cchamberlain/gyp,adblockplus/gyp,bnq4ever/gypgoogle,amoikevin/gyp,yjhjstz/gyp,Phuehvk/gyp,clar/gyp,springmeyer/gyp,trafi/gyp,omasanori/gyp,xin3liang/platform_external_chromium_org_tools_gyp,Danath/gyp,sport-monkey/GYP,adblockplus/gyp,carlTLR/gyp,mapbox/gyp,erikge/watch_gyp,mgamer/gyp,Phuehvk/gyp,turbulenz/gyp,duanhjlt/gyp,carlTLR/gyp,LazyCodingCat/gyp,ttyangf/pdfium_gyp,ttyangf/pdfium_gyp,azunite/gyp,yjhjstz/gyp,Omegaphora/external_chromium_org_tools_gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,amoikevin/gyp,bnoordhuis/gyp,adblockplus/gyp,dougbeal/gyp,Chilledheart/gyp,chromium/gyp,ttyangf/gyp,cysp/gyp,azunite/gyp_20150930,adblockplus/gyp,openpeer/webrtc-gyp,duanhjlt/gyp,sport-monkey/GYP,LazyCodingCat/gyp,dougbeal/gyp,chromium/gyp,saghul/gyn,bpsinc-native/src_tools_gyp,mapbox/gyp,ryfx/gyp,Chilledheart/gyp,openpeer/webrtc-gyp,mapbox/gyp,mapbox/gyp,msc-/gyp,tarc/gyp,carlTLR/gyp,cysp/gyp,turbulenz/gyp,azunite/gyp,enkripsi/gyp,okumura/gyp,carlTLR/gyp,omasanori/gyp,android-ia/platform_external_chromium_org_tools_gyp,turbulenz/gyp,pandaxcl/gyp,Danath/gyp,Omegaphora/external_chromium_org_tools_gyp,sdklite/gyp,AOSPU/external_chromium_org_tools_gyp,bnoordhuis/gyp,msc-/gyp,trafi/gyp,saghul/gyn,msc-/gyp,Chilledheart/gyp,openpeer/webrtc-gyp,AWhetter/gyp,Jack-Q/GYP-copy,AOSPU/external_chromium_org_tools_gyp,Danath/gyp,chromium/gyp,cchamberlain/gyp,Jack-Q/GYP-copy,Phuehvk/gyp,yjhjstz/gyp,Jack-Q/GYP-copy,cchamberlain/gyp,okumura/gyp,sdklite/gyp,ttyangf/gyp,saghul/gyn,pandaxcl/gyp,bnq4ever/gypgoogle,azunite/gyp_20150930,enkripsi/gyp,bpsinc-native/src_tools_gyp,azunite/gyp_20150930,trafi/gyp,sdklite/gyp,bpsinc-native/src_tools_gyp,erikge/watch_gyp,azunite/gyp,bnq4ever/gypgoogle,LazyCodingCat/gyp,cysp/gyp,ryfx/gyp,LazyCodingCat/gyp,AWhetter/gyp,ttyangf/gyp,carlTLR/gyp,bulldy80/gyp_unofficial,chromium/gyp,tarc/gyp,erikge/watch_gyp,mapbox/gyp,springmeyer/gyp,LazyCodingCat/gyp,ttyangf/pdfium_gyp,Omegaphora/external_chromium_org_tools_gyp,sanyaade-teachings/gyp,pandaxcl/gyp,Danath/gyp
|
test/mac/archs/test-archs-multiarch.gyp
|
test/mac/archs/test-archs-multiarch.gyp
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'static_32_64',
'type': 'static_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'shared_32_64',
'type': 'shared_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'module_32_64',
'type': 'loadable_module',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64',
'type': 'executable',
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64_bundle',
'product_name': 'Test App',
'type': 'executable',
'mac_bundle': 1,
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
# This only needs to compile.
{
'target_name': 'precompiled_prefix_header_mm_32_64',
'type': 'shared_library',
'sources': [ 'file.mm', ],
'xcode_settings': {
'GCC_PREFIX_HEADER': 'header.h',
'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
},
},
# This does not compile but should not cause generation errors.
{
'target_name': 'exe_32_64_no_sources',
'type': 'executable',
'dependencies': [
'static_32_64',
],
'sources': [],
'xcode_settings': {
'ARCHS': ['i386', 'x86_64'],
},
},
]
}
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'static_32_64',
'type': 'static_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'shared_32_64',
'type': 'shared_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'module_32_64',
'type': 'loadable_module',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64',
'type': 'executable',
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64_bundle',
'product_name': 'Test App',
'type': 'executable',
'mac_bundle': 1,
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
# This only needs to compile.
{
'target_name': 'precompiled_prefix_header_mm_32_64',
'type': 'shared_library',
'sources': [ 'file.mm', ],
'xcode_settings': {
'GCC_PREFIX_HEADER': 'header.h',
'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
},
},
# This does not compile but should not cause generation errors.
{
'target_name': 'exe_32_64_no_sources',
'type': 'executable',
'dependencies': [
':static_32_64',
],
'sources': [],
'xcode_settings': {
'ARCHS': ['i386', 'x86_64'],
},
},
]
}
|
bsd-3-clause
|
Python
|
fcee6aca22007c3ff64b4d87de3f1772b3c7eb99
|
add encoding, re: "non-ascii character" syntax error
|
stefankoegl/kdtree
|
kdtree/__init__.py
|
kdtree/__init__.py
|
# -*- coding: latin-1 -*-
"""A Python implemntation of a kd-tree
This package provides a simple implementation of a kd-tree in Python.
https://en.wikipedia.org/wiki/K-d_tree
"""
__author__ = 'Stefan Kögl <[email protected]>'
__version__ = '0.1'
__website__ = 'https://github.com/stefankoegl/kdtree'
__license__ = 'GNU General Public License v3 or later'
|
"""A Python implemntation of a kd-tree
This package provides a simple implementation of a kd-tree in Python.
https://en.wikipedia.org/wiki/K-d_tree
"""
__author__ = 'Stefan Kögl <[email protected]>'
__version__ = '0.1'
__website__ = 'https://github.com/stefankoegl/kdtree'
__license__ = 'GNU General Public License v3 or later'
|
isc
|
Python
|
943a312c152ec6adeeff1e7a8b5b4b46334dfc0d
|
Fix dims in regression tf example
|
nkoep/pymanopt,pymanopt/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,nkoep/pymanopt,tingelst/pymanopt
|
examples/regression_offset_tensorflow.py
|
examples/regression_offset_tensorflow.py
|
import tensorflow as tf
import numpy as np
from pymanopt import Problem
from pymanopt.solvers import TrustRegions
from pymanopt.manifolds import Euclidean, Product
if __name__ == "__main__":
# Generate random data
X = np.random.randn(3, 100).astype('float32')
Y = (X[0:1, :] - 2*X[1:2, :] + np.random.randn(1, 100) + 5).astype(
'float32')
# Cost function is the sqaured test error
w = tf.Variable(tf.zeros([3, 1]))
b = tf.Variable(tf.zeros([1, 1]))
cost = tf.reduce_mean(tf.square(Y - tf.matmul(tf.transpose(w), X) - b))
# first-order, second-order
solver = TrustRegions()
# R^3 x R^1
manifold = Product([Euclidean(3, 1), Euclidean(1, 1)])
# Solve the problem with pymanopt
problem = Problem(manifold=manifold, cost=cost, arg=[w, b], verbosity=0)
wopt = solver.solve(problem)
print('Weights found by pymanopt (top) / '
'closed form solution (bottom)')
print(wopt[0].T)
print(wopt[1])
X1 = np.concatenate((X, np.ones((1, 100))), axis=0)
wclosed = np.linalg.inv(X1.dot(X1.T)).dot(X1).dot(Y.T)
print(wclosed[0:3].T)
print(wclosed[3])
|
import tensorflow as tf
import numpy as np
from pymanopt import Problem
from pymanopt.solvers import TrustRegions
from pymanopt.manifolds import Euclidean, Product
if __name__ == "__main__":
# Generate random data
X = np.random.randn(3, 100).astype('float32')
Y = (X[0:1, :] - 2*X[1:2, :] + np.random.randn(1, 100) + 5).astype(
'float32')
# Cost function is the sqaured test error
w = tf.Variable(tf.zeros([3, 1]))
b = tf.Variable(tf.zeros([1]))
cost = tf.reduce_mean(tf.square(Y - tf.matmul(tf.transpose(w), X) - b))
# first-order, second-order
solver = TrustRegions()
# R^3 x R^1
manifold = Product([Euclidean(3, 1), Euclidean(1, 1)])
# Solve the problem with pymanopt
problem = Problem(manifold=manifold, cost=cost, arg=[w, b], verbosity=0)
wopt = solver.solve(problem)
print('Weights found by pymanopt (top) / '
'closed form solution (bottom)')
print(wopt[0].T)
print(wopt[1])
X1 = np.concatenate((X, np.ones((1, 100))), axis=0)
wclosed = np.linalg.inv(X1.dot(X1.T)).dot(X1).dot(Y.T)
print(wclosed[0:3].T)
print(wclosed[3])
|
bsd-3-clause
|
Python
|
f4b7f90c2a7d5d575592efc21cfc83fb999fa57e
|
fix todo module path expansion
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
bumblebee/modules/todo.py
|
bumblebee/modules/todo.py
|
# pylint: disable=C0111,R0903
"""Displays the number of todo items from a text file
Parameters:
* todo.file: File to read TODOs from (defaults to ~/Documents/todo.txt)
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
import os.path
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.output)
)
self._doc = os.path.expanduser(self.parameter("file", "~/Documents/todo.txt"))
self._todos = self.count_items()
def output(self, widget):
self._todos = self.count_items()
return str(self._todos)
def state(self, widgets):
if self._todos == 0:
return "empty"
return "items"
def count_items(self):
try:
i = -1
with open(self._doc) as f:
for i, l in enumerate(f):
pass
return i+1
except Exception:
return 0
|
# pylint: disable=C0111,R0903
"""Displays the number of todo items from a text file
Parameters:
* todo.file: File to read TODOs from (defaults to ~/Documents/todo.txt)
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.output)
)
self._todos = self.count_items()
def output(self, widget):
self._todos = self.count_items()
return str(self._todos)
def state(self, widgets):
if self._todos == 0:
return "empty"
return "items"
def count_items(filename):
try:
i = -1
doc = self.parameter("file", "~/Documents/todo.txt")
with open(doc) as f:
for i, l in enumerate(f):
pass
return i+1
except Exception:
return 0
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
mit
|
Python
|
d9a37dffd8e6c5ab44f6b355de4fc07aa64aea9f
|
Fix unreliability of pipeline publisher.
|
alexrudy/Zeeko,alexrudy/Zeeko
|
zeeko/telemetry/tests/test_pipeline.py
|
zeeko/telemetry/tests/test_pipeline.py
|
import pytest
import h5py
import time
import numpy as np
from ..pipeline import create_pipeline
@pytest.fixture
def chunksize():
"""The size of chunks."""
return 10
@pytest.fixture
def pipeline(address, context, chunksize, filename):
"""Pipeline"""
ioloop = create_pipeline(address, context, chunksize, filename)
yield ioloop
ioloop.cancel()
def test_create_pipeline(address, context, chunksize, filename):
"""Test creating a pipeline."""
ioloop = create_pipeline(address, context, chunksize, filename)
print("Created")
ioloop.cancel(timeout=0.1)
print("Canceled")
def test_run_pipeline(pipeline, Publisher, pub, filename, chunksize):
"""Test running the pipeline."""
with pipeline.running(timeout=0.1):
pipeline.state.selected("RUN").wait(timeout=0.1)
while not pipeline.record.recorder.pushed.is_set():
Publisher.update()
Publisher.publish(pub)
time.sleep(0.1)
pipeline.record.recorder.pushed.wait(timeout=3.0)
pipeline.write.fired.wait(timeout=3.0)
pipeline.state.selected("STOP").wait(timeout=1.0)
print(pipeline.record.recorder.complete)
for chunk in pipeline.record.recorder:
print("{0}: {1}".format(chunk, pipeline.record.recorder[chunk].lastindex))
assert pipeline.record.recorder.pushed.is_set()
assert pipeline.write.fired.is_set()
assert pipeline.record.recorder.chunkcount == 1
with h5py.File(filename, 'r') as f:
for name in Publisher.keys():
assert name in f
g = f[name]
assert g['data'].shape[0] == chunksize
np.testing.assert_allclose(g['data'][-1], Publisher[name].array)
|
import pytest
import h5py
import time
import numpy as np
from ..pipeline import create_pipeline
@pytest.fixture
def chunksize():
"""The size of chunks."""
return 10
@pytest.fixture
def pipeline(address, context, chunksize, filename):
"""Pipeline"""
ioloop = create_pipeline(address, context, chunksize, filename)
yield ioloop
ioloop.cancel()
def test_create_pipeline(address, context, chunksize, filename):
"""Test creating a pipeline."""
ioloop = create_pipeline(address, context, chunksize, filename)
print("Created")
ioloop.cancel(timeout=0.1)
print("Canceled")
def test_run_pipeline(pipeline, Publisher, pub, filename, chunksize):
"""Test running the pipeline."""
with pipeline.running(timeout=0.1):
pipeline.state.selected("RUN").wait(timeout=0.1)
for i in range(10):
Publisher.update()
Publisher.publish(pub)
time.sleep(0.1)
pipeline.record.recorder.pushed.wait(timeout=3.0)
pipeline.write.fired.wait(timeout=3.0)
pipeline.state.selected("STOP").wait(timeout=1.0)
print(pipeline.record.recorder.complete)
for chunk in pipeline.record.recorder:
print("{0}: {1}".format(chunk, pipeline.record.recorder[chunk].lastindex))
assert pipeline.record.recorder.pushed.is_set()
assert pipeline.write.fired.is_set()
assert pipeline.record.recorder.chunkcount == 1
with h5py.File(filename, 'r') as f:
for name in Publisher.keys():
assert name in f
g = f[name]
assert g['data'].shape[0] == chunksize
np.testing.assert_allclose(g['data'][-1], Publisher[name].array)
|
bsd-3-clause
|
Python
|
f9f01b57e6e2085786908a5ebcf0be61fefe3a51
|
Change version
|
Shir0kamii/slack-client
|
slack_client/__init__.py
|
slack_client/__init__.py
|
from .api import SlackAPI
from .channel import SlackChannel
__version__ = '0.3.0'
|
from .api import SlackAPI
from .channel import SlackChannel
__version__ = '0.2.16'
|
mit
|
Python
|
d3847357c446c4a1ac50735b983b20cf57f9c7c6
|
Fix args and return of CounterController functions
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
malcolm/controllers/countercontroller.py
|
malcolm/controllers/countercontroller.py
|
from malcolm.core.controller import Controller
from malcolm.core.attribute import Attribute
from malcolm.core.numbermeta import NumberMeta
from malcolm.core.method import takes, returns
import numpy as np
class CounterController(Controller):
def create_attributes(self):
self.counter = Attribute(NumberMeta("counter", "A counter", np.int32))
self.counter.set_put_function(self.counter.set_value)
self.counter.set_value(0)
yield self.counter
@takes()
@returns()
def reset(self, args=None):
self.counter.set_value(0)
return {}
@takes()
@returns()
def increment(self, args=None):
self.counter.set_value(self.counter.value + 1)
return {}
|
from malcolm.core.controller import Controller
from malcolm.core.attribute import Attribute
from malcolm.core.numbermeta import NumberMeta
from malcolm.core.method import takes
import numpy as np
class CounterController(Controller):
def create_attributes(self):
self.counter = Attribute(NumberMeta("counter", "A counter", np.int32))
self.counter.set_put_function(self.counter.set_value)
self.counter.set_value(0)
yield self.counter
@takes()
def reset(self):
self.counter.set_value(0)
@takes()
def increment(self):
self.counter.set_value(self.counter.value + 1)
|
apache-2.0
|
Python
|
270af43ffbe8974698d17ff6d5cae20fbf410f73
|
Add url enter delete element on riak
|
jgabriellima/mining,avelino/mining,chrisdamba/mining,seagoat/mining,avelino/mining,AndrzejR/mining,mlgruby/mining,mlgruby/mining,mining/mining,mlgruby/mining,mining/mining,chrisdamba/mining,AndrzejR/mining,seagoat/mining,jgabriellima/mining
|
admin/urls.py
|
admin/urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import CubeHandler, ConnectionHandler, DeleteHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/delete/(?P<bucket>[\w-]+)/(?P<slug>[\w-]+)", DeleteHandler),
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import CubeHandler, ConnectionHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
mit
|
Python
|
c4fadf89161e99514037e8af7953fca0ab13b28e
|
Fix import.
|
setten/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,dongsenfo/pymatgen,matk86/pymatgen,montoyjh/pymatgen,gVallverdu/pymatgen,gpetretto/pymatgen,aykol/pymatgen,Bismarrck/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,gVallverdu/pymatgen,montoyjh/pymatgen,tallakahath/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,dongsenfo/pymatgen,nisse3000/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,tschaume/pymatgen,tallakahath/pymatgen,Bismarrck/pymatgen,mbkumar/pymatgen,setten/pymatgen,xhqu1981/pymatgen,tallakahath/pymatgen,richardtran415/pymatgen,czhengsci/pymatgen,vorwerkc/pymatgen,blondegeek/pymatgen,aykol/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,gmatteo/pymatgen,czhengsci/pymatgen,montoyjh/pymatgen,Bismarrck/pymatgen,johnson1228/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,ndardenne/pymatgen,davidwaroquiers/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,dongsenfo/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,matk86/pymatgen,richardtran415/pymatgen,aykol/pymatgen,gpetretto/pymatgen,Bismarrck/pymatgen,matk86/pymatgen,fraricci/pymatgen,setten/pymatgen,vorwerkc/pymatgen,tschaume/pymatgen,setten/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,mbkumar/pymatgen,xhqu1981/pymatgen,ndardenne/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,matk86/pymatgen
|
pymatgen/symmetry/tests/test_groups.py
|
pymatgen/symmetry/tests/test_groups.py
|
#!/usr/bin/env python
"""
TODO: Modify unittest doc.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "4/10/14"
import unittest
import numpy as np
from pymatgen.symmetry.groups import PointGroup, SpaceGroup
class PointGroupTest(unittest.TestCase):
def test_order(self):
order = {"mmm": 8, "432": 24, "-6m2": 12}
for k, v in order.items():
pg = PointGroup(k)
self.assertEqual(order[k], len(pg.symmetry_ops))
class SpaceGroupTest(unittest.TestCase):
def test_order_symm_ops(self):
for name in SpaceGroup.SG_SYMBOLS:
sg = SpaceGroup(name)
self.assertEqual(len(sg.symmetry_ops), sg.order)
def test_crystal_system(self):
sg = SpaceGroup("R-3c")
self.assertEqual(sg.crystal_system, "Trigonal")
sg = SpaceGroup("R-3cH")
self.assertEqual(sg.crystal_system, "Trigonal")
def test_get_orbit(self):
sg = SpaceGroup("Fm-3m")
p = np.random.random_integers(0, 100, size=(3,))
p /= 100
self.assertLessEqual(len(sg.get_orbit(p)), sg.order)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""
TODO: Modify unittest doc.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "4/10/14"
import unittest
import numpy as np
from symmetry.groups import PointGroup, SpaceGroup
class PointGroupTest(unittest.TestCase):
def test_order(self):
order = {"mmm": 8, "432": 24, "-6m2": 12}
for k, v in order.items():
pg = PointGroup(k)
self.assertEqual(order[k], len(pg.symmetry_ops))
class SpaceGroupTest(unittest.TestCase):
def test_order_symm_ops(self):
for name in SpaceGroup.SG_SYMBOLS:
sg = SpaceGroup(name)
self.assertEqual(len(sg.symmetry_ops), sg.order)
def test_crystal_system(self):
sg = SpaceGroup("R-3c")
self.assertEqual(sg.crystal_system, "Trigonal")
sg = SpaceGroup("R-3cH")
self.assertEqual(sg.crystal_system, "Trigonal")
def test_get_orbit(self):
sg = SpaceGroup("Fm-3m")
p = np.random.random_integers(0, 100, size=(3,))
p /= 100
self.assertLessEqual(len(sg.get_orbit(p)), sg.order)
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
48e589b200894121f32bd96b39f29ad5c0120991
|
add test_delete_task_id_not_integer
|
pyfarm/pyfarm-agent,pyfarm/pyfarm-agent,guidow/pyfarm-agent,pyfarm/pyfarm-agent,guidow/pyfarm-agent,guidow/pyfarm-agent
|
tests/test_agent/test_http_api_tasks.py
|
tests/test_agent/test_http_api_tasks.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from json import dumps
try:
from httplib import OK, BAD_REQUEST
except ImportError: # pragma: no cover
from http.client import OK, BAD_REQUEST
from pyfarm.agent.config import config
from pyfarm.agent.testutil import BaseAPITestCase
from pyfarm.agent.http.api.tasks import Tasks
class TestTasks(BaseAPITestCase):
URI = "/tasks/"
CLASS = Tasks
def test_master_contacted(self):
try:
last_master_contact = config["last_master_contact"]
except KeyError:
last_master_contact = None
request = self.get(headers={"User-Agent": config["master_user_agent"]})
tasks = Tasks()
tasks.render(request)
self.assertNotEqual(last_master_contact, config["last_master_contact"])
def test_returns_current_assignments(self):
# NOTE: current_assignments is improperly constructed here but we
# only care about the values.
config["current_assignments"] = {
"a": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 1}]},
"b": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 2}]},
"c": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 3}]}
}
current_tasks = []
for item in config["current_assignments"].values():
current_tasks += item["tasks"]
request = self.get()
tasks = Tasks()
tasks.render(request)
self.assertEqual(request.written, [dumps(current_tasks)])
def test_delete_task_id_not_integer(self):
request = self.delete(
uri=["aaa"],
headers={"User-Agent": config["master_user_agent"]})
tasks = Tasks()
tasks.render(request)
self.assertEqual(
request.written, ['{"error": "Task id was not an integer"}'])
self.assertEqual(request.responseCode, BAD_REQUEST)
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from json import dumps
try:
from httplib import OK
except ImportError: # pragma: no cover
from http.client import OK
from json import loads
from datetime import datetime
from twisted.web.server import NOT_DONE_YET
from pyfarm.agent.config import config
from pyfarm.agent.testutil import BaseAPITestCase
from pyfarm.agent.http.api.tasks import Tasks
class TestGetTasks(BaseAPITestCase):
URI = "/tasks/"
CLASS = Tasks
def test_master_contacted(self):
try:
last_master_contact = config["last_master_contact"]
except KeyError:
last_master_contact = None
request = self.get(headers={"User-Agent": config["master_user_agent"]})
tasks = Tasks()
tasks.render(request)
self.assertNotEqual(last_master_contact, config["last_master_contact"])
def test_returns_current_assignments(self):
# NOTE: current_assignments is improperly constructed here but we
# only care about the values.
config["current_assignments"] = {
"a": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 1}]},
"b": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 2}]},
"c": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 3}]}
}
current_tasks = []
for item in config["current_assignments"].values():
current_tasks += item["tasks"]
request = self.get()
tasks = Tasks()
tasks.render(request)
self.assertEqual(request.written, [dumps(current_tasks)])
|
apache-2.0
|
Python
|
4e12aea0a5479bad8289cbf6c9f460931d51f701
|
Add autocommit to 1 to avoid select cache ¿WTF?
|
p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos
|
database.py
|
database.py
|
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
agpl-3.0
|
Python
|
e5d2ed715d83be506ec452ecdd0a22748a84a007
|
Fix test_pull_doc (missing request id when creating messages)
|
draperjames/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,aavanian/bokeh,philippjfr/bokeh,philippjfr/bokeh,mindriot101/bokeh,aiguofer/bokeh,dennisobrien/bokeh,msarahan/bokeh,msarahan/bokeh,DuCorey/bokeh,philippjfr/bokeh,msarahan/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,clairetang6/bokeh,htygithub/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,aavanian/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,phobson/bokeh,timsnyder/bokeh,jakirkham/bokeh,gpfreitas/bokeh,phobson/bokeh,ericmjl/bokeh,philippjfr/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,quasiben/bokeh,jakirkham/bokeh,ericmjl/bokeh,aiguofer/bokeh,jakirkham/bokeh,timsnyder/bokeh,rs2/bokeh,aavanian/bokeh,gpfreitas/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,maxalbert/bokeh,htygithub/bokeh,justacec/bokeh,justacec/bokeh,ptitjano/bokeh,azjps/bokeh,mindriot101/bokeh,azjps/bokeh,ptitjano/bokeh,bokeh/bokeh,bokeh/bokeh,rs2/bokeh,msarahan/bokeh,ericmjl/bokeh,dennisobrien/bokeh,mindriot101/bokeh,gpfreitas/bokeh,philippjfr/bokeh,rs2/bokeh,phobson/bokeh,percyfal/bokeh,schoolie/bokeh,aiguofer/bokeh,azjps/bokeh,clairetang6/bokeh,timsnyder/bokeh,htygithub/bokeh,phobson/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,htygithub/bokeh,gpfreitas/bokeh,timsnyder/bokeh,maxalbert/bokeh,quasiben/bokeh,rs2/bokeh,justacec/bokeh,aiguofer/bokeh,ptitjano/bokeh,ericmjl/bokeh,dennisobrien/bokeh,schoolie/bokeh,ptitjano/bokeh,DuCorey/bokeh,percyfal/bokeh,stonebig/bokeh,ericmjl/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,mindriot101/bokeh,aavanian/bokeh,DuCorey/bokeh,dennisobrien/bokeh,quasiben/bokeh,dennisobrien/bokeh,bokeh/bokeh,stonebig/bokeh,stonebig/bokeh,DuCorey/bokeh,maxalbert/bokeh,rs2/bokeh,schoolie/bokeh,bokeh/bokeh,azjps/bokeh,draperjames/bokeh,DuCorey/bokeh,timsnyder/bokeh,maxalbert/bokeh,clairetang6/bokeh,draperjames/bokeh,jakirkham/bokeh,jakirkham/bokeh,clairetang6/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,aiguofer/bokeh,stonebig/bokeh
|
bokeh/server/protocol/messages/tests/test_pull_doc.py
|
bokeh/server/protocol/messages/tests/test_pull_doc.py
|
from __future__ import absolute_import, print_function
import unittest
import bokeh.document as document
from bokeh.plot_object import PlotObject
from bokeh.properties import Int, Instance
from bokeh.server.protocol import Protocol
class AnotherModel(PlotObject):
bar = Int(1)
class SomeModel(PlotObject):
foo = Int(2)
child = Instance(PlotObject)
class TestPullDocument(unittest.TestCase):
def _sample_doc(self):
doc = document.Document()
another = AnotherModel()
doc.add_root(SomeModel(child=another))
doc.add_root(SomeModel())
return doc
def test_create_req(self):
msg = Protocol("1.0").create("PULL-DOC-REQ", 'fakesession')
def test_create_reply(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakereqid', 'fakesession', sample)
def test_create_reply_then_parse(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakereqid', 'fakesession', sample)
copy = document.Document()
msg.push_to_document(copy)
assert len(sample.roots) == 2
assert len(copy.roots) == 2
|
from __future__ import absolute_import, print_function
import unittest
import bokeh.document as document
from bokeh.plot_object import PlotObject
from bokeh.properties import Int, Instance
from bokeh.server.protocol import Protocol
class AnotherModel(PlotObject):
bar = Int(1)
class SomeModel(PlotObject):
foo = Int(2)
child = Instance(PlotObject)
class TestPullDocument(unittest.TestCase):
def _sample_doc(self):
doc = document.Document()
another = AnotherModel()
doc.add_root(SomeModel(child=another))
doc.add_root(SomeModel())
return doc
def test_create_req(self):
msg = Protocol("1.0").create("PULL-DOC-REQ", 'fakesession')
def test_create_reply(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakesession', sample)
def test_create_reply_then_parse(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakesession', sample)
copy = document.Document()
msg.push_to_document(copy)
assert len(sample.roots) == 2
assert len(copy.roots) == 2
|
bsd-3-clause
|
Python
|
a74fbbd6c822b1384d9cd5f1501c8a01fb2ed9fe
|
Update deauthorization callback
|
vstoykov/django4facebook
|
django4facebook/views.py
|
django4facebook/views.py
|
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
import facebook
from .conf import settings
@csrf_exempt
def deauthorize_callback(request):
"""
When user deauthorize this application from facebook then
we deactivate the user from our system
"""
signed_request = request.REQUEST.get('signed_request')
if not signed_request:
return HttpResponseBadRequest()
data = facebook.parse_signed_request(signed_request,
settings.SECRET_KEY)
if not data.get('user_id'):
return HttpResponseBadRequest()
User.objects.filter(pk=data['user_id']).update(is_active=False)
return HttpResponse('Ok')
|
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseBadRequest
def deauthorize_callback(request):
"""
When user deauthorize this application from facebook then
we deactivate the user from our system
"""
if not request.facebook:
return HttpResponseBadRequest()
User.objects.filter(pk=request.facebook.uid).update(is_active=False)
return HttpResponse('Ok')
|
mit
|
Python
|
19a3ead211cc4c00b219329ac63177420cdb71e6
|
Make all functions available from raysect.core.math.function.
|
raysect/source,raysect/source
|
raysect/core/math/function/__init__.py
|
raysect/core/math/function/__init__.py
|
# cython: language_level=3
# Copyright (c) 2014-2018, Dr Alex Meakins, Raysect Project
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Raysect Project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from .function1d import *
from .function2d import *
from .function3d import *
|
# cython: language_level=3
# Copyright (c) 2014-2018, Dr Alex Meakins, Raysect Project
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Raysect Project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from .function1d import Function1D
from .function2d import Function2D
from .function3d import Function3D
|
bsd-3-clause
|
Python
|
f25e0fe435f334e19fc84a9c9458a1bea4a051f9
|
Allow to reverse the order of the CSV for a proper reading
|
shakaran/casterly,shakaran/casterly
|
money/parser/__init__.py
|
money/parser/__init__.py
|
import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0, reverse_order=False):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
if reverse_order:
rows.reverse()
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
|
import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
|
bsd-3-clause
|
Python
|
fa067545657d3b1bb80a4047f175353c4856dd7c
|
Implement extension normalizer for NamedAccess
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
thinglang/parser/values/named_access.py
|
thinglang/parser/values/named_access.py
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodePopDereferenced, OpcodeDereference
from thinglang.lexer.tokens.access import LexicalAccess
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.numeric import NumericValue
from thinglang.parser.errors import InvalidIndexedAccess
from thinglang.parser.nodes.base_node import BaseNode
from thinglang.parser.rule import ParserRule
from thinglang.utils.type_descriptors import ValueType
class NamedAccess(BaseNode, ValueType):
"""
Represents a named dereference operation.
Examples:
person.walk
person.info.age
"""
def __init__(self, target):
super(NamedAccess, self).__init__(target)
self.target = target
self.type = None
self.arguments = []
def __repr__(self):
return '{}'.format('.'.join(str(x) for x in self.target))
def transpile(self):
return '->'.join(x.transpile() for x in self.target)
def compile(self, context: CompilationBuffer, pop_last=False, without_last=False):
if without_last and not self.extensions:
return self[0].compile(context)
ref = context.push_ref(context.resolve(self.root), self.source_ref)
for ext, last in self.extensions:
if last and without_last:
break
ref = context.symbols.resolve_partial(ref, ext)
cls = OpcodePopDereferenced if pop_last and last else OpcodeDereference
context.append(cls(ref.element_index), self.source_ref)
return ref
@property
def root(self):
return NamedAccess(self.target[:2])
@property
def extensions(self):
last = self.target[-1]
return [(x, x is last) for x in self.target[2:]]
def __getitem__(self, item):
return self.target[item]
def __eq__(self, other):
return type(self) == type(other) and self.target == other.target
def __len__(self):
size = len(self.target)
assert size >= 2
return size
def append(self, other):
self.target.append(other)
return self
@classmethod
def extend(cls, base, extension: Identifier) -> 'NamedAccess':
if isinstance(base, NamedAccess):
return NamedAccess(base.target + [extension])
return NamedAccess([base, extension])
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodePopDereferenced, OpcodeDereference
from thinglang.lexer.tokens.access import LexicalAccess
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.numeric import NumericValue
from thinglang.parser.errors import InvalidIndexedAccess
from thinglang.parser.nodes.base_node import BaseNode
from thinglang.parser.rule import ParserRule
from thinglang.utils.type_descriptors import ValueType
class NamedAccess(BaseNode, ValueType):
"""
Represents a named dereference operation.
Examples:
person.walk
person.info.age
"""
def __init__(self, target):
super(NamedAccess, self).__init__(target)
self.target = target
self.type = None
self.arguments = []
def __repr__(self):
return '{}'.format('.'.join(str(x) for x in self.target))
def transpile(self):
return '->'.join(x.transpile() for x in self.target)
def compile(self, context: CompilationBuffer, pop_last=False, without_last=False):
if without_last and not self.extensions:
return self[0].compile(context)
ref = context.push_ref(context.resolve(self.root), self.source_ref)
for ext, last in self.extensions:
if last and without_last:
break
ref = context.symbols.resolve_partial(ref, ext)
cls = OpcodePopDereferenced if pop_last and last else OpcodeDereference
context.append(cls(ref.element_index), self.source_ref)
return ref
@property
def root(self):
return NamedAccess(self.target[:2])
@property
def extensions(self):
last = self.target[-1]
return [(x, x is last) for x in self.target[2:]]
def __getitem__(self, item):
return self.target[item]
def __eq__(self, other):
return type(self) == type(other) and self.target == other.target
def __len__(self):
size = len(self.target)
assert size >= 2
return size
def append(self, other):
self.target.append(other)
return self
|
mit
|
Python
|
27fe88a325251c4b12a4b5f020c1d6c5e83b4b59
|
Change var to be more consistent
|
115100/untz_manager
|
untz_manager/encoder.py
|
untz_manager/encoder.py
|
"""Encoding related operations"""
import logging
import subprocess
import sys
import taglib
LOGGER = logging.getLogger(__name__)
def _get_vorbis_comments(audio_file, pattern):
macros = (('%g', 'GENRE'),
('%n', 'TRACKNUMBER'),
('%t', 'TITLE'),
('%d', 'DATE'))
params_dict = {'%g': '-G',
'%n': '-N',
'%t': '-t',
'%d': '-d'}
vorbis_comments = {}
afp = taglib.File(audio_file) # pylint: disable=E1103
for macro, tag in macros:
if macro in pattern:
vorbis_comments[params_dict[macro]] = afp.tags.get(tag)[0] or '(none)'
vorbis_comments['-a'] = (afp.tags.get('ALBUM ARTIST', [None])[0] or
afp.tags.get('ARTIST', [None])[0] or
'Unknown artist')
vorbis_comments['-l'] = afp.tags.get('ALBUM', [None])[0] or 'Unknown album'
afp.close()
return vorbis_comments
def encode_file(audio_file, base_dir, pattern, quality, passthrough):
"""Run oggenc and encode file, storing in a logical manner."""
process_args = ['oggenc',
'-q', str(quality),
'-n', '{base_dir}/%a/%l/{pattern}.ogg'.format(base_dir=base_dir,
pattern=pattern)]
if passthrough:
process_args.append(passthrough)
vorbis_comments = _get_vorbis_comments(audio_file, pattern)
for tag, value in vorbis_comments.items():
process_args.append(tag)
process_args.append(value)
process_args.append(audio_file)
LOGGER.debug('Running "%s"', ' '.join(process_args))
process = subprocess.Popen(process_args)
process.communicate()
if process.returncode:
LOGGER.critical('Non-zero return code. Exiting.')
sys.exit(process.returncode)
|
"""Encoding related operations"""
import logging
import subprocess
import sys
import taglib
LOGGER = logging.getLogger(__name__)
def _get_vorbis_comments(audio_file, pattern):
macros = (('%g', 'GENRE'),
('%n', 'TRACKNUMBER'),
('%t', 'TITLE'),
('%d', 'DATE'))
params_dict = {'%g': '-G',
'%n': '-N',
'%t': '-t',
'%d': '-d'}
vorbis_comments = {}
afp = taglib.File(audio_file) # pylint: disable=E1103
for macro, tag in macros:
if macro in pattern:
vorbis_comments[params_dict[macro]] = afp.tags.get(tag)[0] or '(none)'
vorbis_comments['-a'] = (afp.tags.get('ALBUM ARTIST', [None])[0] or
afp.tags.get('ARTIST', [None])[0] or
'Unknown artist')
vorbis_comments['-l'] = afp.tags.get('ALBUM', [None])[0] or 'Unknown album'
afp.close()
return vorbis_comments
def encode_file(audio_file, output_dir, pattern, quality, passthrough):
"""Run oggenc and encode file, storing in a logical manner."""
process_args = ['oggenc',
'-q', str(quality),
'-n', '{output_dir}/%a/%l/{pattern}.ogg'.format(output_dir=output_dir,
pattern=pattern)]
if passthrough:
process_args.append(passthrough)
vorbis_comments = _get_vorbis_comments(audio_file, pattern)
for tag, value in vorbis_comments.items():
process_args.append(tag)
process_args.append(value)
process_args.append(audio_file)
LOGGER.debug('Running "%s"', ' '.join(process_args))
process = subprocess.Popen(process_args)
process.communicate()
if process.returncode:
LOGGER.critical('Non-zero return code. Exiting.')
sys.exit(process.returncode)
|
bsd-3-clause
|
Python
|
234609000de3da9449dacb363e58bf60c0e3a4d8
|
Change DATABASES default db to PostgreSQL
|
LitPalimpsest/parser-api-search,LitPalimpsest/parser-api-search,LitPalimpsest/parser-api-search
|
site/litlong/settings.py
|
site/litlong/settings.py
|
"""
Django settings for litlong project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from YamJam import yamjam
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
CFG = yamjam(os.path.join(PROJECT_PATH, 'etc/yamjam/config.yaml'))['litlong']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = CFG['secret-key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'litlong.urls'
WSGI_APPLICATION = 'litlong.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': CFG['database']['engine'],
'NAME': CFG['database']['name'],
'USER': CFG['database']['username'],
'PASSWORD': CFG['database']['password'],
'HOST': CFG['database']['host'],
'PORT': CFG['database']['port'],
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
"""
Django settings for litlong project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from YamJam import yamjam
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
CFG = yamjam(os.path.join(PROJECT_PATH, 'etc/yamjam/config.yaml'))['litlong']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = CFG['secret-key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'litlong.urls'
WSGI_APPLICATION = 'litlong.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
mit
|
Python
|
b13edc289905dd4d2c331eddffa490305f9ef827
|
fix a typo
|
mozilla/relman-auto-nag,mozilla/relman-auto-nag,mozilla/relman-auto-nag,mozilla/bztools,anoopvalluthadam/bztools
|
bugzilla/agents.py
|
bugzilla/agents.py
|
import urllib
from bugzilla.models import *
from bugzilla.utils import *
class InvalidAPI_ROOT(Exception):
def __str__(self):
return "Invalid API url specified. " + \
"Please set BZ_API_ROOT in your environment " + \
"or pass it to the agent constructor"
class BugzillaAgent(object):
def __init__(self, api_root=None, api_key=None):
if not api_root:
api_root = os.environ.get('BZ_API_ROOT')
if not api_root:
raise InvalidAPI_ROOT
self.API_ROOT = api_root
self.api_key = api_key
def get_bug(self, bug, include_fields='_default,token,cc,keywords,whiteboard,comments', exclude_fields=None, params={}):
params['include_fields'] = [include_fields]
params['exclude_fields'] = [exclude_fields]
url = urljoin(self.API_ROOT, 'bug/%s?%s' % (bug, self.qs(**params)))
return Bug.get(url)
def get_bug_list(self, params={}):
params = urllib.urlencode(params) + '&Bugzilla_api_key=%s' % self.api_key
url = self.API_ROOT + 'bug/?' + params
return BugSearch.get(url).bugs
def qs(self, **params):
if self.api_key:
params['api_key'] = [self.api_key]
return params
class BMOAgent(BugzillaAgent):
def __init__(self, api_key=None):
super(BMOAgent, self).__init__('https://bugzilla.mozilla.org/bzapi/', api_key)
|
import urllib
from bugzilla.models import *
from bugzilla.utils import *
class InvalidAPI_ROOT(Exception):
def __str__(self):
return "Invalid API url specified. " + \
"Please set BZ_API_ROOT in your environment " + \
"or pass it to the agent constructor"
class BugzillaAgent(object):
def __init__(self, api_root=None, api_key=None)
if not api_root:
api_root = os.environ.get('BZ_API_ROOT')
if not api_root:
raise InvalidAPI_ROOT
self.API_ROOT = api_root
self.api_key = api_key
def get_bug(self, bug, include_fields='_default,token,cc,keywords,whiteboard,comments', exclude_fields=None, params={}):
params['include_fields'] = [include_fields]
params['exclude_fields'] = [exclude_fields]
url = urljoin(self.API_ROOT, 'bug/%s?%s' % (bug, self.qs(**params)))
return Bug.get(url)
def get_bug_list(self, params={}):
params = urllib.urlencode(params) + '&Bugzilla_api_key=%s' % self.api_key
url = self.API_ROOT + 'bug/?' + params
return BugSearch.get(url).bugs
def qs(self, **params):
if self.api_key:
params['api_key'] = [self.api_key]
return params
class BMOAgent(BugzillaAgent):
def __init__(self, api_key=None):
super(BMOAgent, self).__init__('https://bugzilla.mozilla.org/bzapi/', api_key)
|
bsd-3-clause
|
Python
|
c4ef7fe24477d9160214c1cd2938aa8f5135d84b
|
Add other needed method stubs
|
stuy-tetrabyte/graduation-req-tracker
|
utils/database_setup.py
|
utils/database_setup.py
|
import pandas
import argparse
def get_excel(filepath):
"""
Returns a Pandas datafile that contains the contents of a Microsoft Excel
Spreadsheet
Params:
filepath - A string containing the path to the file
Returns:
A Pandas datafile
"""
return pandas.read_excel(filepath)
def get_column_names(datafile):
"""
Returns a list containing the column names of a Pandas datafile as Python
strings
Params:
datafile - A Pandas datafile
Returns:
A list of strings
"""
return [ str(s) for s in datafile.columns ]
def check_if_mysql_installed():
"""
Verifies if MySQL is installed on the current system. Will throw an
error if MySQL fails to run
"""
pass
def is_table_set_up():
"""
Returns True if this project's MySQL table is set up, False otherwise
"""
pass
def create_project_table(column_names):
"""
Sets up MySQL with a table with a set schema given a list of column_names.
Does nothing if the table is already set up.
Params:
column_names - A list of strings containing column names
Returns:
None
"""
pass
def delete_project_table():
"""
Deletes the table in MySQL that this project uses. Will do nothing if the
table does not yet exist.
"""
pass
def reset_project_table():
"""
Resets the table for this project by calling 'delete' and 'setup'
"""
delete_project_table()
create_project_table()
def load_excel_file(datafile):
"""
Takes a Pandas datafile and inserts the data into the project's MySQL table.
If the project's table is not yet created, this function will call 'create'.
"""
if not is_table_set_up():
create_project_table()
# TODO: Fill in this part
|
import pandas
def load_excel(filepath):
"""
Returns a Pandas datafile that contains the contents of a Microsoft Excel
Spreadsheet
Params:
filepath - A string containing the path to the file
Returns:
A Pandas datafile
"""
return pandas.read_excel(filepath)
def get_column_names(datafile):
"""
Returns a list containing the column names of a Pandas datafile as Python
strings
Params:
datafile - A Pandas datafile
Returns:
A list of strings
"""
return [ str(s) for s in datafile.columns ]
def check_if_mysql_installed():
"""
Verifies if MySQL is installed on the current system. Will throw an
error if MySQL fails to run
"""
pass
def set_up_mysql_schema():
"""
Sets up MySQL with a table with a set schema
"""
pass
|
mit
|
Python
|
e838370958c90ce1123aa1a5ab0823169257cfa9
|
Make configuration per model instead of per project.
|
samastur/django-admin-filters
|
adminfilters/admin.py
|
adminfilters/admin.py
|
from django.contrib.admin.views.main import ChangeList
from django.contrib.admin.options import ModelAdmin
from django.contrib.admin.filterspecs import FilterSpec
class GenericFilterSpec(FilterSpec):
def __init__(self, data, request, title):
self.data = data
self.request = request
self._title = title
def title(self):
return self._title
def has_output(self):
return True
def choices(self, changelist):
if callable(self.data):
choices = list(self.data())
else:
choices = list(self.data)
for choice in [dict(zip(['selected', 'query_string', 'display'], x)) for x in choices]:
yield choice
class GenericFilterChangeList(ChangeList):
def __init__(self, request, *args, **kwargs):
self.request = request
super(GenericFilterChangeList, self).__init__(request, *args, **kwargs)
@property
def generic_filters(self):
return getattr(self.model_admin, 'generic_filters', None)
@property
def generic_filters_on_top(self):
return getattr(self.model_admin, 'generic_filters_on_top', False)
def build_filter_spec(self, choices, title):
return GenericFilterSpec(choices, self.request, title)
def get_filters(self, request):
"""
Extend ChangeList.get_filters to include generic_filters.
"""
filter_specs = super(GenericFilterChangeList, self).get_filters(request)[0]
generic_filters = []
if self.generic_filters:
for fname in self.generic_filters:
func = getattr(self.model_admin, fname)
spec = func(request, self)
if spec and spec.has_output():
generic_filters.append(spec)
if self.generic_filters_on_top:
filter_specs = generic_filters + filter_specs
else:
filter_specs = filter_specs + generic_filters
return filter_specs, bool(filter_specs)
class GenericFilterAdmin(ModelAdmin):
def get_changelist(self, request, **kwargs):
return GenericFilterChangeList
|
from django.contrib.admin.views.main import ChangeList
from django.contrib.admin.options import ModelAdmin
from django.contrib.admin.filterspecs import FilterSpec
from django.conf import settings
GENERIC_FILTERS_ON_TOP = getattr(settings, "GENERIC_FILTERS_ON_TOP", False)
class GenericFilterSpec(FilterSpec):
def __init__(self, data, request, title):
self.data = data
self.request = request
self._title = title
def title(self):
return self._title
def has_output(self):
return True
def choices(self, changelist):
if callable(self.data):
choices = list(self.data())
else:
choices = list(self.data)
for choice in [dict(zip(['selected', 'query_string', 'display'], x)) for x in choices]:
yield choice
class GenericFilterChangeList(ChangeList):
def __init__(self, request, *args, **kwargs):
self.request = request
super(GenericFilterChangeList, self).__init__(request, *args, **kwargs)
@property
def generic_filters(self):
return getattr(self.model_admin, 'generic_filters', None)
def build_filter_spec(self, choices, title):
return GenericFilterSpec(choices, self.request, title)
def get_filters(self, request):
"""
Extend ChangeList.get_filters to include generic_filters.
"""
filter_specs = super(GenericFilterChangeList, self).get_filters(request)[0]
generic_filters = []
if self.generic_filters:
for fname in self.generic_filters:
func = getattr(self.model_admin, fname)
spec = func(request, self)
if spec and spec.has_output():
generic_filters.append(spec)
if GENERIC_FILTERS_ON_TOP:
filter_specs = generic_filters + filter_specs
else:
filter_specs = filter_specs + generic_filters
return filter_specs, bool(filter_specs)
class GenericFilterAdmin(ModelAdmin):
def get_changelist(self, request, **kwargs):
return GenericFilterChangeList
|
bsd-3-clause
|
Python
|
cce88a16cc367ef8df9533b848e6fae29ac8a4d1
|
update build setup
|
sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs
|
build_win_setup.py
|
build_win_setup.py
|
"""
@file
@brief Builds a setup for the teachings: ensae_teaching_cs
"""
try:
import pymyinstall
except ImportError:
import sys
sys.path.append("../pymyinstall/src")
import pymyinstall
try:
import pyquickhelper
except ImportError:
import sys
sys.path.append("../pyquickhelper/src")
import pyquickhelper
if __name__ == "__main__":
import sys
sys.path.append("src")
from pyquickhelper import fLOG
fLOG(OutputPrint=True)
from ensae_teaching_cs.automation.win_setup_helper import last_function
from pymyinstall import win_python_setup
from pymyinstall.packaged import ensae_fullset
list_modules = ensae_fullset()
win_python_setup(module_list=list_modules, verbose=True,
download_only=False,
no_setup=False,
last_function=last_function,
selection={"R", "VS"},
documentation=False,
fLOG=fLOG)
|
"""
@file
@brief Builds a setup for the teachings: ensae_teaching_cs
"""
try:
import pymyinstall
except ImportError:
import sys
sys.path.append("../pymyinstall/src")
import pymyinstall
try:
import pyquickhelper
except ImportError:
import sys
sys.path.append("../pyquickhelper/src")
import pyquickhelper
if __name__ == "__main__":
import sys
sys.path.append("src")
from pyquickhelper import fLOG
fLOG(OutputPrint=True)
from actuariat_python.automation.win_setup_helper import last_function
from pymyinstall import win_python_setup
from pymyinstall.packaged import ensae_fullset
list_modules = ensae_fullset()
win_python_setup(module_list=list_modules, verbose=True,
download_only=False,
no_setup=False,
last_function=last_function,
selection={"R", "VS"},
documentation=False,
fLOG=fLOG)
|
mit
|
Python
|
ec295698b683dd5f04df1fff49f9d1e2afdf0a86
|
fix bugs in writing out failures
|
alvinlindstam/ol3,fperucic/ol3,Antreasgr/ol3,v5analytics/openlayers,openlayers/ol2,stweil/openlayers,georchestra/openlayers,fredj/ol3,avinet/openlayers,denilsonsa/ol3,sanak/ol2,pgiraud/openlayers,thomasmoelhave/ol3,mzur/ol3,boundlessgeo/openlayers,geekdenz/ol3,stweil/ol3,geoext/GXM,CandoImage/ol3,llambanna/ol3,tschaub/ol3,PublicStuff/openlayers,Andrey-Pavlov/ol3,pmlrsg/ol3,ahocevar/ol3,altamiracorp/openlayers,elemoine/ol3,sbrunner/openlayers,sbrunner/openlayers,geekdenz/openlayers,ahocevar/ol2,pvalsecc/openlayers,bill-chadwick/ol3,elemoine/ol3,ahocevar/openlayers,landonb/ol3,freylis/ol3,llambanna/ol3,PublicStuff/openlayers,owenmead/openlayers,shevawen/openlayers,Distem/ol3,pvalsecc/openlayers,klokantech/ol3,wet-boew/openlayers,thomasmoelhave/ol3,jmiller-boundless/ol3,stweil/openlayers,elemoine/ol3,klokantech/ol3raster,alexbrault/ol3,bartvde/ol3,mechdrew/ol3,geekdenz/openlayers,WayTo/openlayers,v5analytics/openlayers,dracic/openlayers,t27/ol3,geonux/ol3,landonb/ol3,PetrDlouhy/openlayers,fredj/ol3,epointal/ol3,NOAA-ORR-ERD/ol3,richstoner/ol3,altamiracorp/openlayers,colllin/openlayers,adube/ol3,wet-boew/openlayers,tsauerwein/ol3,tschaub/ol3,richstoner/ol3,freylis/ol3,geonux/ol3,jmiller-boundless/ol3,kkuunnddaannkk/ol3,sbrunner/openlayers,wet-boew/openlayers,tschaub/openlayers-raster,klokantech/ol3raster,tamarmot/ol3,tsauerwein/ol3,openlayers/ol2,t27/ol3,xiaoqqchen/ol3,Morgul/ol3,planetlabs/ol3,ahocevar/ol3,bartvde/ol3,Morgul/ol3,yinxiaomei/ol3,Andrey-Pavlov/ol3,shevawen/openlayers,colllin/openlayers,hafenr/ol3,CandoImage/ol3,shevawen/openlayers,bjornharrtell/ol3,mzur/ol3,jmiller-boundless/ol3,kkuunnddaannkk/ol3,pvalsecc/openlayers,pmlrsg/ol3,bogdanvaduva/ol3,bjornharrtell/ol3,thomasmoelhave/ol3,bill-chadwick/ol3,bill-chadwick/ol3,NOAA-ORR-ERD/ol3,adube/ol3,tamarmot/ol3,das-peter/ol3,fperucic/ol3,colllin/openlayers,planetlabs/ol3,kjelderg/ol3,bogdanvaduva/ol3,klokantech/ol3,elbakai/openlayers,PetrDlouhy/openlayers,jacmendt/ol3,llambanna/ol3,Morgul/ol3,WayTo/openlayers,thhomas/ol3,itayod/ol3,geonux/ol3,wlerner/ol3,klokantech/ol3,bogdanvaduva/ol3,pgiraud/openlayers,hafenr/ol3,WayTo/openlayers,fblackburn/ol3,PetrDlouhy/openlayers,openlayers/openlayers,stweil/openlayers,fperucic/ol3,geekdenz/ol3,Gnafu/openlayers,boundlessgeo/openlayers,mechdrew/ol3,Gnafu/openlayers,thhomas/ol3,Andrey-Pavlov/ol3,jmiller-boundless/ol3,dracic/openlayers,shevawen/openlayers,denilsonsa/ol3,Andrey-Pavlov/ol3,mzur/ol3,Gnafu/openlayers,kkuunnddaannkk/ol3,aisaacs/ol3,openlayers/ol2,das-peter/ol3,gingerik/ol3,hafenr/ol3,CandoImage/ol3,yinxiaomei/ol3,openlayers/openlayers,PublicStuff/openlayers,adh1365/openlayers,fredj/ol3,alexbrault/ol3,owenmead/openlayers,sbrunner/openlayers,georchestra/openlayers,tschaub/ol3,wlerner/ol3,jacmendt/ol3,aisaacs/ol3,alvinlindstam/ol3,denilsonsa/ol3,tschaub/ol3,oterral/ol3,elbakai/openlayers,sanak/ol2,antonio83moura/ol3,adh1365/openlayers,colllin/openlayers,tamarmot/ol3,richstoner/ol3,dracic/openlayers,elbakai/openlayers,freylis/ol3,tschaub/openlayers-raster,alvinlindstam/ol3,t27/ol3,kjelderg/ol3,fperucic/ol3,tsauerwein/ol3,Distem/ol3,kjelderg/ol3,WayTo/openlayers,v5analytics/openlayers,geekdenz/openlayers,denilsonsa/ol3,ahocevar/openlayers,stweil/ol3,oterral/ol3,adube/ol3,pmlrsg/ol3,boundlessgeo/openlayers,NOAA-ORR-ERD/ol3,klokantech/ol3raster,gingerik/ol3,landonb/ol3,freylis/ol3,klokantech/ol3raster,avinet/openlayers,ahocevar/ol2,antonio83moura/ol3,Distem/ol3,Antreasgr/ol3,PublicStuff/openlayers,jacmendt/ol3,Morgul/ol3,thhomas/ol3,pgiraud/openlayers,ahocevar/openlayers,pvalsecc/openlayers,v5analytics/openlayers,dracic/openlayers,yinxiaomei/ol3,epointal/ol3,aisaacs/ol3,pmlrsg/ol3,ahocevar/ol2,planetlabs/ol3,pgiraud/openlayers,alexbrault/ol3,CandoImage/ol3,fblackburn/ol3,geekdenz/ol3,stweil/ol3,tschaub/openlayers-raster,avinet/openlayers,elbakai/openlayers,altamiracorp/openlayers,bjornharrtell/ol3,t27/ol3,ahocevar/ol3,openlayers/ol2,tamarmot/ol3,kjelderg/ol3,Antreasgr/ol3,bogdanvaduva/ol3,ahocevar/ol3,klokantech/ol3,gingerik/ol3,Gnafu/openlayers,aisaacs/ol3,kartverket/openlayers,hafenr/ol3,avinet/openlayers,jacmendt/ol3,bartvde/ol3,geonux/ol3,sanak/ol2,kkuunnddaannkk/ol3,bartvde/ol3,owenmead/openlayers,boundlessgeo/openlayers,wlerner/ol3,adh1365/openlayers,openlayers/openlayers,xiaoqqchen/ol3,yinxiaomei/ol3,antonio83moura/ol3,landonb/ol3,stweil/ol3,elemoine/ol3,Distem/ol3,planetlabs/ol3,georchestra/openlayers,xiaoqqchen/ol3,llambanna/ol3,wlerner/ol3,alexbrault/ol3,antonio83moura/ol3,itayod/ol3,fredj/ol3,sanak/ol2,thhomas/ol3,itayod/ol3,kartverket/openlayers,PetrDlouhy/openlayers,mechdrew/ol3,kartverket/openlayers,epointal/ol3,fblackburn/ol3,das-peter/ol3,itayod/ol3,owenmead/openlayers,adh1365/openlayers,fblackburn/ol3,gingerik/ol3,kartverket/openlayers,mzur/ol3,alvinlindstam/ol3,das-peter/ol3,geoext/GXM,thomasmoelhave/ol3,xiaoqqchen/ol3,epointal/ol3,jmiller-boundless/ol3,bill-chadwick/ol3,tsauerwein/ol3,geekdenz/ol3,NOAA-ORR-ERD/ol3,altamiracorp/openlayers,richstoner/ol3,mechdrew/ol3,wet-boew/openlayers,Antreasgr/ol3,oterral/ol3
|
tests/selenium/remotecontrol/test_ol.py
|
tests/selenium/remotecontrol/test_ol.py
|
from selenium import selenium
import time
import sys
from ConfigParser import ConfigParser
if len(sys.argv) > 2:
filename = sys.argv[2]
else:
filename = "config.cfg"
c = ConfigParser()
c.read(filename)
targets = {}
sections = c.sections()
for s in sections:
targets[s] = dict(c.items(s))
targets[s]['name'] = s
if sys.argv[1] == "all":
browsers = list(targets.values())
elif sys.argv[1] not in targets:
print "Invalid target"
sys.exit()
else:
browsers = [targets[sys.argv[1]]]
if 1:
for b in browsers:
print "Running %s on %s" % (b['name'], b['host'])
s = selenium(b['host'], 4444, "*%s" % b['browsercmd'], "http://openlayers.org/")
s.start()
try:
s.open("/dev/tests/run-tests.html?run=all")
count = 0
while count == 0:
count = int(s.get_eval("window.document.getElementById('testtable').getElementsByTagName('tr').length"))
time.sleep(5)
ok = 0
fail = 0
while True:
ok = int(s.get_eval('window.Test.AnotherWay._g_ok_pages'))
fail = int(s.get_eval('window.Test.AnotherWay._g_fail_pages'))
if (ok + fail) >= count:
break
time.sleep(10)
if fail:
print "Failed: %s" % fail
html = s.get_eval("window.document.getElementById('results').innerHTML").encode("utf-8")
all_html = """<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type" />
</head>
<body>%s</body></html>""" % html
f = open("fail.%s.%s.html" % (time.time(), b['name']), "w")
f.write(all_html)
f.close()
except Exception, E:
print "Error: ", E
s.stop()
|
from selenium import selenium
import time
import sys
from ConfigParser import ConfigParser
if len(sys.argv) > 2:
filename = sys.argv[2]
else:
filename = "config.cfg"
c = ConfigParser()
c.read(filename)
targets = {}
sections = c.sections()
for s in sections:
targets[s] = dict(c.items(s))
targets[s]['name'] = s
if sys.argv[1] == "all":
browsers = list(targets.values())
elif sys.argv[1] not in targets:
print "Invalid target"
sys.exit()
else:
browsers = [targets[sys.argv[1]]]
if 1:
for b in browsers:
print "Running %s on %s" % (b['name'], b['host'])
s = selenium(b['host'], 4444, "*%s" % b['browsercmd'], "http://openlayers.org/")
s.start()
try:
s.open("/dev/tests/run-tests.html?run=all")
count = 0
while count == 0:
count = int(s.get_eval("window.document.getElementById('testtable').getElementsByTagName('tr').length"))
time.sleep(5)
ok = 0
fail = 0
while True:
ok = int(s.get_eval('window.Test.AnotherWay._g_ok_pages'))
fail = int(s.get_eval('window.Test.AnotherWay._g_fail_pages'))
if (ok + fail) >= count:
break
time.sleep(10)
if fail:
print "Failed: %s" % fail
html = s.get_eval("window.document.getElementById('results').innerHTML").decode("utf-8")
all_html = """<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type" />
</head>
<body>%s</body></html>""" % html
f = open("fail.%s.%s.html" % (time.time(), b['name']), "w")
f.write(all_html.encode)
f.close()
except Exception, E:
print "Error: ", E
s.stop()
|
bsd-2-clause
|
Python
|
be92cf747a718bc004bd2024dbdcb527205d9b44
|
remove an extra import
|
StoDevX/course-data-tools,StoDevX/course-data-tools
|
scripts/lib/flattened.py
|
scripts/lib/flattened.py
|
def flatten(l):
# from http://stackoverflow.com/a/2158532/2347774
for el in l:
if isinstance(el, list) and not isinstance(el, str):
yield from flatten(el)
else:
yield el
|
import collections
def flatten(l):
# from http://stackoverflow.com/a/2158532/2347774
for el in l:
if isinstance(el, list) and not isinstance(el, str):
yield from flatten(el)
else:
yield el
|
mit
|
Python
|
bf163f45d1e7a28db34396b20209778668103f0a
|
remove password option for redis
|
jhao104/proxy_pool,jhao104/proxy_pool
|
Run/main.py
|
Run/main.py
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: main.py
Description : 运行主函数
Author : JHao
date: 2017/4/1
-------------------------------------------------
Change Activity:
2017/4/1:
-------------------------------------------------
"""
__author__ = 'JHao'
import sys
from multiprocessing import Process
sys.path.append('../')
from Api.ProxyApi import run as ProxyApiRun
from Schedule.ProxyValidSchedule import run as ValidRun
from Schedule.ProxyRefreshSchedule import run as RefreshRun
from Util.GetConfig import GetConfig
def run(host,port):
p_list = list()
p1 = Process(target=ProxyApiRun, name='ProxyApiRun')
p_list.append(p1)
p2 = Process(target=ValidRun, name='ValidRun')
p_list.append(p2)
p3 = Process(target=RefreshRun, name='RefreshRun')
p_list.append(p3)
for p in p_list:
p.start()
for p in p_list:
p.join()
if __name__ == '__main__':
run()
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: main.py
Description : 运行主函数
Author : JHao
date: 2017/4/1
-------------------------------------------------
Change Activity:
2017/4/1:
-------------------------------------------------
"""
__author__ = 'JHao'
import sys
from multiprocessing import Process
from Util.GetConfig import GetConfig
sys.path.append('../')
from Api.ProxyApi import run as ProxyApiRun
from Schedule.ProxyValidSchedule import run as ValidRun
from Schedule.ProxyRefreshSchedule import run as RefreshRun
def run(host,port):
p_list = list()
p1 = Process(target=ProxyApiRun, name='ProxyApiRun')
p_list.append(p1)
p2 = Process(target=ValidRun, name='ValidRun')
p_list.append(p2)
p3 = Process(target=RefreshRun, name='RefreshRun')
p_list.append(p3)
for p in p_list:
p.start()
for p in p_list:
p.join()
if __name__ == '__main__':
run()
|
mit
|
Python
|
a157ee8bc8c740ba7482f8e4e9116213fb18c935
|
fix of type in merging
|
ternaus/kaggle_avito
|
src/gl_lr.py
|
src/gl_lr.py
|
from __future__ import division
__author__ = 'Vladimir Iglovikov'
'''
I will try to use logistic regression from Graphlab to predict
'''
import graphlab as gl
import os
print 'reading train'
train = gl.SFrame(os.path.join('..', 'data', 'trainSearch_1'))
print
print 'train shape'
print train.shape
print 'reading AdsInfo'
ads = gl.SFrame(os.path.join('..', 'data', 'ads_1'))
print
print 'ads shape'
print ads.shape
print 'merging train and ads'
training = train.join(ads, on='AdID')
print
print 'training shape'
print training.shape
print 'splitting set'
sf_train, sf_test = training.random_split(0.5,
seed=42)
features=['Position',
'HistCTR',
'Price',
'CategoryID',
'AdID',
'LocationID']
model = gl.logistic_classifier.create(training,
target='IsClick',
features=features)
|
from __future__ import division
__author__ = 'Vladimir Iglovikov'
'''
I will try to use logistic regression from Graphlab to predict
'''
import graphlab as gl
import os
print 'reading train'
train = gl.SFrame(os.path.join('..', 'data', 'trainSearch_1'))
print
print 'train shape'
print train.shape
print 'reading AdsInfo'
ads = gl.SFrame(os.path.join('..', 'data', 'ads_1'))
print
print 'ads shape'
print ads.shape
print 'merging train and ads'
training = train.join(ads, on='AdId')
print
print 'training shape'
print training.shape
print 'splitting set'
sf_train, sf_test = training.random_split(0.5,
seed=42)
features=['Position',
'HistCTR',
'Price',
'CategoryID',
'AdID',
'LocationID']
model = gl.logistic_classifier.create(training,
target='IsClick',
features=features)
|
mit
|
Python
|
b914f700687f6fbf6ccc0aac08d92ffaac76f89a
|
bump retry timeout, 20s is too low during meltdowns
|
liampauling/flumine
|
flumine/streams/basestream.py
|
flumine/streams/basestream.py
|
import threading
import queue
import logging
import betfairlightweight
from betfairlightweight import StreamListener
from tenacity import wait_exponential
logger = logging.getLogger(__name__)
class BaseStream(threading.Thread):
LISTENER = StreamListener
MAX_LATENCY = 0.5
RETRY_WAIT = wait_exponential(multiplier=1, min=2, max=60)
def __init__(
self,
flumine,
stream_id: int,
streaming_timeout: float, # snaps listener if no update
conflate_ms: int,
market_filter: dict = None,
market_data_filter: dict = None,
client=None,
output_queue: bool = True,
event_processing: bool = False,
event_id: str = None,
operation: str = "marketSubscription",
**listener_kwargs,
):
threading.Thread.__init__(self, daemon=True, name=self.__class__.__name__)
self.flumine = flumine
self.stream_id = stream_id
self.market_filter = market_filter
self.market_data_filter = market_data_filter
self.streaming_timeout = streaming_timeout
self.conflate_ms = conflate_ms
self._client = client
self._stream = None
self._output_queue = queue.Queue() if output_queue else None
self.event_processing = event_processing
self.event_id = event_id
self.operation = operation
self._listener = self.LISTENER(
output_queue=self._output_queue,
max_latency=self.MAX_LATENCY,
**listener_kwargs,
)
self._output_thread = threading.Thread(
name="{0}_output_thread".format(self.name),
target=self.handle_output,
daemon=True,
)
def run(self) -> None:
raise NotImplementedError
def handle_output(self) -> None:
raise NotImplementedError
def stop(self) -> None:
if self._stream:
self._stream.stop()
@property
def betting_client(self) -> betfairlightweight.APIClient:
return self.client.betting_client
@property
def client(self):
if self._client:
return self._client
else:
return self.flumine.client
|
import threading
import queue
import logging
import betfairlightweight
from betfairlightweight import StreamListener
from tenacity import wait_exponential
logger = logging.getLogger(__name__)
class BaseStream(threading.Thread):
LISTENER = StreamListener
MAX_LATENCY = 0.5
RETRY_WAIT = wait_exponential(multiplier=1, min=2, max=20)
def __init__(
self,
flumine,
stream_id: int,
streaming_timeout: float, # snaps listener if no update
conflate_ms: int,
market_filter: dict = None,
market_data_filter: dict = None,
client=None,
output_queue: bool = True,
event_processing: bool = False,
event_id: str = None,
operation: str = "marketSubscription",
**listener_kwargs,
):
threading.Thread.__init__(self, daemon=True, name=self.__class__.__name__)
self.flumine = flumine
self.stream_id = stream_id
self.market_filter = market_filter
self.market_data_filter = market_data_filter
self.streaming_timeout = streaming_timeout
self.conflate_ms = conflate_ms
self._client = client
self._stream = None
self._output_queue = queue.Queue() if output_queue else None
self.event_processing = event_processing
self.event_id = event_id
self.operation = operation
self._listener = self.LISTENER(
output_queue=self._output_queue,
max_latency=self.MAX_LATENCY,
**listener_kwargs,
)
self._output_thread = threading.Thread(
name="{0}_output_thread".format(self.name),
target=self.handle_output,
daemon=True,
)
def run(self) -> None:
raise NotImplementedError
def handle_output(self) -> None:
raise NotImplementedError
def stop(self) -> None:
if self._stream:
self._stream.stop()
@property
def betting_client(self) -> betfairlightweight.APIClient:
return self.client.betting_client
@property
def client(self):
if self._client:
return self._client
else:
return self.flumine.client
|
mit
|
Python
|
277e3ef7544a64ddb2fa9f31b66597036a38e65b
|
Remove widget and test paths.
|
nprapps/borders-map,nprapps/borders-map,nprapps/borders-map,nprapps/borders-map
|
app.py
|
app.py
|
#!/usr/bin/env python
import argparse
from flask import Flask, render_template
import app_config
from render_utils import make_context, urlencode_filter
import static
app = Flask(app_config.PROJECT_NAME)
app.jinja_env.filters['urlencode'] = urlencode_filter
# Example application views
@app.route('/')
def index():
"""
Example view demonstrating rendering a simple HTML page.
"""
context = make_context()
# Nav needs to be a list of lists.
# The inner list should only have four objects max.
# Because of reasons.
context['nav'] = []
contents = list(context['COPY']['content'])
not_yet_four = []
for idx, row in enumerate(contents):
row = dict(zip(row.__dict__['_columns'], row.__dict__['_row']))
row_title = row.get('chapter_title', None)
if row_title:
if row_title not in [u'chapter_title']:
not_yet_four.append(row)
if len(not_yet_four) == 4:
context['nav'].append(not_yet_four)
not_yet_four = []
if (idx + 1) == len(contents):
if len(not_yet_four) > 0:
context['nav'].append(not_yet_four)
return render_template('index.html', **context)
app.register_blueprint(static.static)
# Boilerplate
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
args = parser.parse_args()
server_port = 8000
if args.port:
server_port = int(args.port)
app.run(host='0.0.0.0', port=server_port, debug=app_config.DEBUG)
|
#!/usr/bin/env python
import argparse
from flask import Flask, render_template
import app_config
from render_utils import make_context, urlencode_filter
import static
app = Flask(app_config.PROJECT_NAME)
app.jinja_env.filters['urlencode'] = urlencode_filter
# Example application views
@app.route('/')
def index():
"""
Example view demonstrating rendering a simple HTML page.
"""
context = make_context()
# Nav needs to be a list of lists.
# The inner list should only have four objects max.
# Because of reasons.
context['nav'] = []
contents = list(context['COPY']['content'])
not_yet_four = []
for idx, row in enumerate(contents):
row = dict(zip(row.__dict__['_columns'], row.__dict__['_row']))
row_title = row.get('chapter_title', None)
if row_title:
if row_title not in [u'chapter_title']:
not_yet_four.append(row)
if len(not_yet_four) == 4:
context['nav'].append(not_yet_four)
not_yet_four = []
if (idx + 1) == len(contents):
if len(not_yet_four) > 0:
context['nav'].append(not_yet_four)
return render_template('index.html', **context)
@app.route('/widget.html')
def widget():
"""
Embeddable widget example page.
"""
return render_template('widget.html', **make_context())
@app.route('/test_widget.html')
def test_widget():
"""
Example page displaying widget at different embed sizes.
"""
return render_template('test_widget.html', **make_context())
@app.route('/test/test.html')
def test_dir():
return render_template('index.html', **make_context())
app.register_blueprint(static.static)
# Boilerplate
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
args = parser.parse_args()
server_port = 8000
if args.port:
server_port = int(args.port)
app.run(host='0.0.0.0', port=server_port, debug=app_config.DEBUG)
|
mit
|
Python
|
a0883d386f6c35f8cb70c6d21ad1cc37dccb90b6
|
Update host
|
yassineAlouini/image-recognition-as-a-service,yassineAlouini/image-recognition-as-a-service
|
app.py
|
app.py
|
from flask import Flask
# from image_classification import ImageClassifier
app = Flask(__name__)
PORT = 33507
HOST = '0.0.0.0'
@app.route('/')
def home():
return 'Hello classification world!'
if __name__ == '__main__':
app.run(host=HOST, port=PORT)
|
from flask import Flask
# from image_classification import ImageClassifier
app = Flask(__name__)
PORT = 33507
@app.route('/')
def home():
return 'Hello classification world!'
if __name__ == '__main__':
app.run(port=PORT)
|
mit
|
Python
|
355629e1e2e2423a4ea1ad859506e380e6ddbc89
|
define special route for twitter
|
piccolbo/rightload,piccolbo/rightload
|
app.py
|
app.py
|
# ingredients: tinydb joblib.Memory
from flask import Flask
from ml import store_feedback
from ml import learn
from proxy import proxy
import sys
import trace
import logging as log
# should be set at the project level
log.basicConfig(filename="./log", level=log.INFO)
# create a Trace object, telling it what to ignore, and whether to
# do tracing or line-counting or both.
_tracer = trace.Trace(
ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0)
app = Flask(__name__)
# app.wsgiapp = ProfilerMiddleware(app.wsgiapp)
@app.route('/feed/<path:url>')
def _feed(url):
return proxy(url)
@app.route('/twitter/<string:id>')
def _twitter(id):
return proxy('https://twitrss.me/twitter_user_to_rss/?user=' + id)
@app.route('/feedback/<feedback>/<path:url>')
def _feedback(feedback, url):
store_feedback(url=url, feedback=feedback == 'l', explicit=True)
log.info("storing feedback {feedback} for {url}".format(
feedback=feedback, url=url))
return ('''
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US" xml:lang="en-US">
<head>
<title>Thanks for your feedback</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
</head>
<body onload="self.close()">
<h1>Thank you for your feedback, your filter has been updated</h1>
</body>
</html>
''', 200, {})
@app.route('/learn')
def _learn():
learn()
return ("Done", 204, {})
if __name__ == 'main':
app.run()
|
# ingredients: tinydb joblib.Memory
from flask import Flask
from ml import store_feedback
from ml import learn
from proxy import proxy
import sys
import trace
import logging as log
# should be set at the project level
log.basicConfig(filename="./log", level=log.INFO)
# create a Trace object, telling it what to ignore, and whether to
# do tracing or line-counting or both.
_tracer = trace.Trace(
ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0)
app = Flask(__name__)
# app.wsgiapp = ProfilerMiddleware(app.wsgiapp)
@app.route('/feed/<path:url>')
def _feed(url):
return proxy(url)
@app.route('/feedback/<feedback>/<path:url>')
def _feedback(feedback, url):
store_feedback(url=url, feedback=feedback == 'l', explicit=True)
log.info("storing feedback {feedback} for {url}".format(
feedback=feedback, url=url))
return ('''
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US" xml:lang="en-US">
<head>
<title>Thanks for your feedback</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
</head>
<body onload="self.close()">
<h1>Thank you for your feedback, your filter has been updated</h1>
</body>
</html>
''', 200, {})
@app.route('/learn')
def _learn():
learn()
return ("Done", 204, {})
if __name__ == 'main':
app.run()
|
agpl-3.0
|
Python
|
58dfa1e8df073cafc23871e76d317172758b05a6
|
change app.py
|
marcoscastro/personal-page,marcoscastro/personal-page
|
app.py
|
app.py
|
from bottle import route, run
from bottle import static_file, request
from bottle import template, get, error
import os
# static routes
@get('/<filename:re:.*\.css>')
def stylesheets(filename):
return static_file(filename, root='static/css')
@get('/<filename:re:.*\.js>')
def javascripts(filename):
return static_file(filename, root='static/js')
@get('/<filename:re:.*\.(jpg|png|gif|ico)>')
def images(filename):
return static_file(filename, root='static/img')
@get('/<filename:re:.*\.(eot|ttf|woff|svg)>')
def fonts(filename):
return static_file(filename, root='static/fonts')
@route('/')
def index():
return template('index')
@route('/skills')
def skills():
return template('skills')
@route('/about')
def about():
return template('about')
@route('/courses')
def courses():
return template('courses')
@route('/contact', method='POST')
def acao_login():
name = request.forms.get('name')
email = request.forms.get('email')
site = request.forms.get('site')
message = request.forms.get('message')
print(message)
return template('contact', name=name)
@error(404)
def error404(error):
return template('oops')
if __name__ == "__main__":
if os.environ.get('APP_LOCATION') == 'heroku':
run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000)))
else:
run(host='localhost', port=8080, debug=True, reloader=True)
|
from bottle import route, run
from bottle import static_file, request
from bottle import template, get, error
import os
# static routes
@get('/<filename:re:.*\.css>')
def stylesheets(filename):
return static_file(filename, root='static/css')
@get('/<filename:re:.*\.js>')
def javascripts(filename):
return static_file(filename, root='static/js')
@get('/<filename:re:.*\.(jpg|png|gif|ico)>')
def images(filename):
return static_file(filename, root='static/img')
@get('/<filename:re:.*\.(eot|ttf|woff|svg)>')
def fonts(filename):
return static_file(filename, root='static/fonts')
@route('/')
def index():
return template('index')
@route('/skills')
def skills():
return template('skills')
@route('/about')
def about():
return template('about')
@route('/courses')
def courses():
return template('courses')
@route('/contact', method='POST')
def acao_login():
name = request.forms.get('name')
email = request.forms.get('email')
site = request.forms.get('site')
message = request.forms.get('message')
print(message)
return template('contact', name=name)
@error(404)
def error404(error):
return template('oops')
if __name__ == "__main__":
if os.environ.get('APP_LOCATION') == 'heroku':
run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000)))
else:
print('ENTROU AQUI')
run(host='localhost', port=8080, debug=True, reloader=True)
|
mit
|
Python
|
d6d67b8a831959d79a94e927bae3373bcbd4ef0a
|
print request args
|
hezhao/bitaps-api-demo,hezhao/bitaps-api-demo
|
app.py
|
app.py
|
import json
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
@app.route("/paid", methods=['POST'])
def paid():
print(json.dumps(request.args))
return jsonify(request.args)
if __name__ == "__main__":
app.run()
|
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
@app.route("/paid", methods=['POST'])
def paid():
# print(request.args.get('invoice'))
return jsonify(request.args)
if __name__ == "__main__":
app.run()
|
bsd-2-clause
|
Python
|
c274325f89ef9a8fa25128b85b6d25dc634fe4a2
|
Fix flood control exception
|
sevazhidkov/leonard
|
bot.py
|
bot.py
|
import os
import sys
import logging
from time import sleep
from flask import request
import telegram
from telegram.error import NetworkError, Unauthorized, RetryAfter
from leonard import Leonard
from libs import shrt
WEBHOOK_HOSTNAME = os.environ.get('WEBHOOK_HOSTNAME', 'https://leonardbot.herokuapp.com')
debug = False
if 'BOT_DEBUG' in os.environ and os.environ['BOT_DEBUG'] == '1':
debug = True
print('Starting bot')
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('leonard')
logger.setLevel(logging.INFO)
print('Creating bot')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client, debug)
print('Collecting plugins')
bot.collect_plugins()
print('Setting routes')
@bot.app.route('/webhook/<token>', methods=['POST'])
def webhook(token):
if token != os.environ['BOT_TOKEN']:
return 'bad token'
update = telegram.Update.de_json(request.get_json(force=True), bot.telegram)
bot.process_update(update)
return 'ok'
shrt.get_link_route = bot.app.route('/l/<query>')(shrt.get_link_route)
if len(sys.argv) > 1 and sys.argv[1] == 'polling':
bot.telegram.setWebhook('')
try:
update_id = telegram_client.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
for update in telegram_client.getUpdates(offset=update_id, timeout=10):
update_id = update.update_id + 1
bot.process_update(update)
except NetworkError:
sleep(1)
except Unauthorized:
update_id += 1
exit()
print('Setting webhook')
# Register webhook
webhook_url = WEBHOOK_HOSTNAME + '/webhook/' + os.environ['BOT_TOKEN']
try:
bot.telegram.setWebhook(webhook_url)
except (NetworkError, RetryAfter):
sleep(1)
bot.telegram.setWebhook(webhook_url)
if __name__ == '__main__':
bot.app.run(port=8888)
|
import os
import sys
import logging
from time import sleep
from flask import request
import telegram
from telegram.error import NetworkError, Unauthorized
from leonard import Leonard
from libs import shrt
WEBHOOK_HOSTNAME = os.environ.get('WEBHOOK_HOSTNAME', 'https://leonardbot.herokuapp.com')
debug = False
if 'BOT_DEBUG' in os.environ and os.environ['BOT_DEBUG'] == '1':
debug = True
print('Starting bot')
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('leonard')
logger.setLevel(logging.INFO)
print('Creating bot')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client, debug)
print('Collecting plugins')
bot.collect_plugins()
print('Setting routes')
@bot.app.route('/webhook/<token>', methods=['POST'])
def webhook(token):
if token != os.environ['BOT_TOKEN']:
return 'bad token'
update = telegram.Update.de_json(request.get_json(force=True), bot.telegram)
bot.process_update(update)
return 'ok'
shrt.get_link_route = bot.app.route('/l/<query>')(shrt.get_link_route)
if len(sys.argv) > 1 and sys.argv[1] == 'polling':
bot.telegram.setWebhook('')
try:
update_id = telegram_client.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
for update in telegram_client.getUpdates(offset=update_id, timeout=10):
update_id = update.update_id + 1
bot.process_update(update)
except NetworkError:
sleep(1)
except Unauthorized:
update_id += 1
exit()
print('Setting webhook')
# Register webhook
webhook_url = WEBHOOK_HOSTNAME + '/webhook/' + os.environ['BOT_TOKEN']
try:
bot.telegram.setWebhook(webhook_url)
except NetworkError:
sleep(1)
bot.telegram.setWebhook(webhook_url)
if __name__ == '__main__':
bot.app.run(port=8888)
|
mit
|
Python
|
d52c4340a62802bcd0fcbd68516c5ac66fb10436
|
Update function name used in the streamtester
|
LuminosoInsight/python-ftfy
|
ftfy/streamtester/__init__.py
|
ftfy/streamtester/__init__.py
|
"""
This file defines a general method for evaluating ftfy using data that arrives
in a stream. A concrete implementation of it is found in `twitter_tester.py`.
"""
from __future__ import print_function, unicode_literals
from ftfy.fixes import fix_encoding
from ftfy.chardata import possible_encoding
class StreamTester:
"""
Take in a sequence of texts, and show the ones that will be changed by
ftfy. This will also periodically show updates, such as the proportion of
texts that changed.
"""
def __init__(self):
self.num_fixed = 0
self.count = 0
def check_ftfy(self, text):
"""
Given a single text input, check whether `ftfy.fix_text_encoding`
would change it. If so, display the change.
"""
self.count += 1
if not possible_encoding(text, 'ascii'):
fixed = fix_encoding(text)
if text != fixed:
# possibly filter common bots before printing
print(u'\nText:\t{text}\nFixed:\t{fixed}\n'.format(
text=text, fixed=fixed
))
self.num_fixed += 1
# Print status updates once in a while
if self.count % 100 == 0:
print('.', end='', flush=True)
if self.count % 10000 == 0:
print('\n%d/%d fixed' % (self.num_fixed, self.count))
|
"""
This file defines a general method for evaluating ftfy using data that arrives
in a stream. A concrete implementation of it is found in `twitter_tester.py`.
"""
from __future__ import print_function, unicode_literals
from ftfy.fixes import fix_text_encoding
from ftfy.chardata import possible_encoding
class StreamTester:
"""
Take in a sequence of texts, and show the ones that will be changed by
ftfy. This will also periodically show updates, such as the proportion of
texts that changed.
"""
def __init__(self):
self.num_fixed = 0
self.count = 0
def check_ftfy(self, text):
"""
Given a single text input, check whether `ftfy.fix_text_encoding`
would change it. If so, display the change.
"""
self.count += 1
if not possible_encoding(text, 'ascii'):
fixed = fix_text_encoding(text)
if text != fixed:
# possibly filter common bots before printing
print(u'\nText:\t{text}\nFixed:\t{fixed}\n'.format(
text=text, fixed=fixed
))
self.num_fixed += 1
# Print status updates once in a while
if self.count % 100 == 0:
print('.', end='', flush=True)
if self.count % 10000 == 0:
print('\n%d/%d fixed' % (self.num_fixed, self.count))
|
mit
|
Python
|
7a786fd031c3faa057256abc5d9cb47618041696
|
Configure max build age on the monitoring side
|
stripe/stripe-datadog-checks,stripe/datadog-checks
|
checks.d/veneur.py
|
checks.d/veneur.py
|
import datetime
from urlparse import urljoin
import requests
# project
from checks import AgentCheck
class Veneur(AgentCheck):
VERSION_METRIC_NAME = 'veneur.deployed_version'
BUILDAGE_METRIC_NAME = 'veneur.build_age'
def check(self, instance):
success = 0
host = instance['host']
try:
r = requests.get(urljoin(host, '/version'))
sha = r.text
success = 1
r = requests.get(urljoin(host, '/builddate'))
builddate = datetime.datetime.fromtimestamp(int(r.text))
tdelta = datetime.datetime.now() - builddate
except:
success = 0
raise
finally:
self.gauge(self.VERSION_METRIC_NAME, success, tags = ['sha:{0}'.format(sha)])
self.histogram(self.BUILDAGE_METRIC_NAME, tdelta.seconds)
|
import datetime
from urlparse import urljoin
import requests
# project
from checks import AgentCheck
class Veneur(AgentCheck):
VERSION_METRIC_NAME = 'veneur.deployed_version'
BUILDAGE_METRIC_NAME = 'veneur.build_age'
MAX_AGE_CHECK_NAME = 'veneur.build_age.fresh'
# Check that the build is no more than one week old
MAX_DEPLOYMENT_INTERVAL = 604800
def check(self, instance):
success = 0
host = instance['host']
try:
r = requests.get(urljoin(host, '/version'))
sha = r.text
success = 1
r = requests.get(urljoin(host, '/builddate'))
builddate = datetime.datetime.fromtimestamp(int(r.text))
tdelta = datetime.datetime.now() - builddate
if tdelta.seconds > self.MAX_DEPLOYMENT_INTERVAL:
self.service_check(self.MAX_AGE_CHECK_NAME, AgentCheck.CRITICAL,
message='Build date {0} is too old (build must be no more than {1} seconds old)'.format(builddate.strftime('%Y-%m-%d %H:%M:%S'), self.MAX_DEPLOYMENT_INTERVAL))
except:
success = 0
raise
finally:
self.gauge(self.VERSION_METRIC_NAME, success, tags = ['sha:{0}'.format(sha)])
self.histogram(self.BUILDAGE_METRIC_NAME, tdelta.seconds)
|
mit
|
Python
|
ef6e0b681c1c7812e9d11fcd2fffd36468c00513
|
Create custom field : SearchButtonField (#77)
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
cineapp/widgets.py
|
cineapp/widgets.py
|
# -*- coding: utf-8 -*-
from wtforms import fields, widgets
# Define wtforms widget and field
class CKTextAreaWidget(widgets.TextArea):
def __call__(self, field, **kwargs):
kwargs.setdefault('class_', 'ckeditor')
html_string = super(CKTextAreaWidget, self).__call__(field, **kwargs)
html_string += ("""<script>
CKEDITOR.replace( '%s', {
enterMode: CKEDITOR.ENTER_BR
} );
</script>""" % field.id)
return widgets.HTMLString(html_string)
class CKTextAreaField(fields.TextAreaField):
widget = CKTextAreaWidget()
# Widget which returns a complete search bar with a glyphicon button
class SearchButtonWidget(widgets.SubmitInput):
html_params = staticmethod(widgets.html_params)
input_type = 'submit'
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
kwargs.setdefault('type', self.input_type)
kwargs.setdefault('value', field.label.text)
if 'value' not in kwargs:
kwargs['value'] = field._value()
return widgets.HTMLString('<button %s><i class="glyphicon glyphicon-search"></i></button>' % self.html_params(name=field.name, **kwargs))
# SearchButtonField used for display the previous widget
class SearchButtonField(fields.BooleanField):
widget = SearchButtonWidget()
|
# -*- coding: utf-8 -*-
from wtforms import fields, widgets
# Define wtforms widget and field
class CKTextAreaWidget(widgets.TextArea):
def __call__(self, field, **kwargs):
kwargs.setdefault('class_', 'ckeditor')
html_string = super(CKTextAreaWidget, self).__call__(field, **kwargs)
html_string += ("""<script>
CKEDITOR.replace( '%s', {
enterMode: CKEDITOR.ENTER_BR
} );
</script>""" % field.id)
return widgets.HTMLString(html_string)
class CKTextAreaField(fields.TextAreaField):
widget = CKTextAreaWidget()
|
mit
|
Python
|
b54a3fa45cca86fddcd6130e67a306d93a079fff
|
update Config File parsing to new API
|
paroj/ogre,OGRECave/ogre,paroj/ogre,OGRECave/ogre,OGRECave/ogre,OGRECave/ogre,paroj/ogre,paroj/ogre,paroj/ogre,OGRECave/ogre
|
Samples/Python/sample.py
|
Samples/Python/sample.py
|
import Ogre
import OgreRTShader
class SGResolver(Ogre.MaterialManager_Listener):
def __init__(self, shadergen):
Ogre.MaterialManager_Listener.__init__(self)
self.shadergen = shadergen
def handleSchemeNotFound(self, idx, name, mat, lod_idx, rend):
if name != OgreRTShader.cvar.ShaderGenerator_DEFAULT_SCHEME_NAME:
return None
def_name = Ogre.cvar.MaterialManager_DEFAULT_SCHEME_NAME
succ = self.shadergen.createShaderBasedTechnique(mat.getName(), def_name, name)
if not succ:
return None
self.shadergen.validateMaterial(name, mat.getName())
return mat.getTechnique(1)
def main():
root = Ogre.Root("plugins.cfg", "ogre.cfg", "")
cfg = Ogre.ConfigFile()
cfg.loadDirect("resources.cfg")
rgm = Ogre.ResourceGroupManager.getSingleton()
for sec, settings in cfg.getSettingsBySection().items():
for kind, loc in settings.items():
rgm.addResourceLocation(loc, kind, sec)
arch = cfg.getSettings("General").values()[0]
rgm.addResourceLocation(arch + "/materials/programs/GLSL", "FileSystem", "General");
if not root.restoreConfig():
root.showConfigDialog(Ogre.ConfigDialog())
win = root.initialise(True)
OgreRTShader.ShaderGenerator.initialize()
shadergen = OgreRTShader.ShaderGenerator.getSingleton()
sgres = SGResolver(shadergen)
Ogre.MaterialManager.getSingleton().addListener(sgres)
rgm.initialiseAllResourceGroups()
rs = shadergen.getRenderState(OgreRTShader.cvar.ShaderGenerator_DEFAULT_SCHEME_NAME)
rs.addTemplateSubRenderState(shadergen.createSubRenderState(OgreRTShader.cvar.PerPixelLighting_Type));
scn_mgr = root.createSceneManager(Ogre.ST_GENERIC)
shadergen.addSceneManager(scn_mgr)
scn_mgr.setAmbientLight(Ogre.ColourValue(.1, .1, .1))
light = scn_mgr.createLight("MainLight")
light.setPosition(0, 10, 15)
cam = scn_mgr.createCamera("myCam")
cam.setPosition(0, 0, 15)
cam.setNearClipDistance(5)
cam.lookAt(0, 0, -1)
vp = win.addViewport(cam)
vp.setBackgroundColour(Ogre.ColourValue(.3, .3, .3))
ent = scn_mgr.createEntity("Sinbad.mesh")
node = scn_mgr.getRootSceneNode().createChildSceneNode()
node.attachObject(ent)
root.startRendering()
if __name__ == "__main__":
main()
|
import Ogre
import OgreRTShader
class SGResolver(Ogre.MaterialManager_Listener):
def __init__(self, shadergen):
Ogre.MaterialManager_Listener.__init__(self)
self.shadergen = shadergen
def handleSchemeNotFound(self, idx, name, mat, lod_idx, rend):
if name != OgreRTShader.cvar.ShaderGenerator_DEFAULT_SCHEME_NAME:
return None
def_name = Ogre.cvar.MaterialManager_DEFAULT_SCHEME_NAME
succ = self.shadergen.createShaderBasedTechnique(mat.getName(), def_name, name)
if not succ:
return None
self.shadergen.validateMaterial(name, mat.getName())
return mat.getTechnique(1)
def main():
root = Ogre.Root("plugins.cfg", "ogre.cfg", "")
cfg = Ogre.ConfigFile()
cfg.loadDirect("resources.cfg")
rgm = Ogre.ResourceGroupManager.getSingleton()
for sec in ("Essential", "Popular"):
for kind in ("Zip", "FileSystem"):
for loc in cfg.getMultiSetting(kind, sec):
rgm.addResourceLocation(loc, kind, sec)
if not root.restoreConfig():
root.showConfigDialog(Ogre.ConfigDialog())
win = root.initialise(True)
OgreRTShader.ShaderGenerator.initialize()
shadergen = OgreRTShader.ShaderGenerator.getSingleton()
sgres = SGResolver(shadergen)
Ogre.MaterialManager.getSingleton().addListener(sgres)
rgm.initialiseAllResourceGroups()
rs = shadergen.getRenderState(OgreRTShader.cvar.ShaderGenerator_DEFAULT_SCHEME_NAME)
rs.addTemplateSubRenderState(shadergen.createSubRenderState(OgreRTShader.cvar.PerPixelLighting_Type));
scn_mgr = root.createSceneManager(Ogre.ST_GENERIC)
shadergen.addSceneManager(scn_mgr)
scn_mgr.setAmbientLight(Ogre.ColourValue(.1, .1, .1))
light = scn_mgr.createLight("MainLight")
light.setPosition(0, 10, 15)
cam = scn_mgr.createCamera("myCam")
cam.setPosition(0, 0, 15)
cam.setNearClipDistance(5)
cam.lookAt(0, 0, -1)
vp = win.addViewport(cam)
vp.setBackgroundColour(Ogre.ColourValue(.3, .3, .3))
ent = scn_mgr.createEntity("Sinbad.mesh")
node = scn_mgr.getRootSceneNode().createChildSceneNode()
node.attachObject(ent)
root.startRendering()
if __name__ == "__main__":
main()
|
mit
|
Python
|
13752105bfc595acfc143df92306c07e2d6782ff
|
Adjust operation should still return valid json even when no changes (None) returned
|
project-ncl/repour,project-ncl/repour,jsenko/repour,jsenko/repour
|
repour/adjust.py
|
repour/adjust.py
|
import asyncio
import logging
from . import asgit
from . import asutil
from . import exception
logger = logging.getLogger(__name__)
#
# Utility
#
expect_ok = asutil.expect_ok_closure(exception.AdjustCommandError)
@asyncio.coroutine
def commit_adjustments(repo_dir, repo_url, original_ref, adjust_type):
d = yield from asgit.push_new_dedup_branch(
expect_ok=expect_ok,
repo_dir=repo_dir,
repo_url=repo_url,
operation_name="Adjust",
operation_description="""Original Reference: {original_ref}
Adjust Type: {adjust_type}
""".format(**locals()),
no_change_ok=True,
)
return d
#
# Adjust operation
#
@asyncio.coroutine
def adjust(adjustspec, repo_provider, adjust_provider):
with asutil.TemporaryDirectory(suffix="git") as d:
repo_url = yield from repo_provider(adjustspec["name"], create=False)
# Non-shallow, but branch-only clone of internal repo
yield from expect_ok(
cmd=["git", "clone", "--branch", adjustspec["ref"], "--", repo_url.readwrite, d],
desc="Could not clone with git",
)
yield from asgit.setup_commiter(expect_ok, d)
adjust_type = yield from adjust_provider(d)
result = yield from commit_adjustments(
repo_dir=d,
repo_url=repo_url,
original_ref=adjustspec["ref"],
adjust_type=adjust_type,
)
return result or {}
#
# Adjust providers
#
def adjust_noop():
@asyncio.coroutine
def adjust(repo_dir):
return "NoOp"
return adjust
@asyncio.coroutine
def adjust_subprocess(description, cmd):
@asyncio.coroutine
def adjust(repo_dir):
filled_cmd = [repo_dir if p == "{repo_dir}" else p for p in cmd]
yield from expect_ok(filled_cmd, "Alignment subprocess failed")
return description
return adjust
#
# Supported
#
provider_types = {
"noop": adjust_noop,
"subprocess": adjust_subprocess,
}
|
import asyncio
import logging
from . import asgit
from . import asutil
from . import exception
logger = logging.getLogger(__name__)
#
# Utility
#
expect_ok = asutil.expect_ok_closure(exception.AdjustCommandError)
@asyncio.coroutine
def commit_adjustments(repo_dir, repo_url, original_ref, adjust_type):
d = yield from asgit.push_new_dedup_branch(
expect_ok=expect_ok,
repo_dir=repo_dir,
repo_url=repo_url,
operation_name="Adjust",
operation_description="""Original Reference: {original_ref}
Adjust Type: {adjust_type}
""".format(**locals()),
no_change_ok=True,
)
return d
#
# Adjust operation
#
@asyncio.coroutine
def adjust(adjustspec, repo_provider, adjust_provider):
with asutil.TemporaryDirectory(suffix="git") as d:
repo_url = yield from repo_provider(adjustspec["name"], create=False)
# Non-shallow, but branch-only clone of internal repo
yield from expect_ok(
cmd=["git", "clone", "--branch", adjustspec["ref"], "--", repo_url.readwrite, d],
desc="Could not clone with git",
)
yield from asgit.setup_commiter(expect_ok, d)
adjust_type = yield from adjust_provider(d)
result = yield from commit_adjustments(
repo_dir=d,
repo_url=repo_url,
original_ref=adjustspec["ref"],
adjust_type=adjust_type,
)
return result
#
# Adjust providers
#
def adjust_noop():
@asyncio.coroutine
def adjust(repo_dir):
return "NoOp"
return adjust
@asyncio.coroutine
def adjust_subprocess(description, cmd):
@asyncio.coroutine
def adjust(repo_dir):
filled_cmd = [repo_dir if p == "{repo_dir}" else p for p in cmd]
yield from expect_ok(filled_cmd, "Alignment subprocess failed")
return description
return adjust
#
# Supported
#
provider_types = {
"noop": adjust_noop,
"subprocess": adjust_subprocess,
}
|
apache-2.0
|
Python
|
01ec4fd2e294bcb524c6724d6727da7b1a882f0d
|
Exit code 2 for normal not running remote status
|
guildai/guild,guildai/guild,guildai/guild,guildai/guild
|
guild/commands/remote_impl.py
|
guild/commands/remote_impl.py
|
# Copyright 2017-2018 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import guild.remote
from guild import cli
from . import remote_support
def start(args):
remote = remote_support.remote_for_args(args)
_remote_op(remote.start, "start", remote, True, args)
def stop(args):
remote = remote_support.remote_for_args(args)
_remote_op(remote.stop, "stop", remote, False, args)
def _remote_op(op, desc, remote, default_resp, args):
if not args.yes:
cli.out("You are about to %s %s" % (desc, remote.name))
if args.yes or cli.confirm("Continue?", default_resp):
try:
op()
except guild.remote.OperationNotSupported as e:
cli.error(e)
except guild.remote.OperationError as e:
cli.error(e)
def status(args):
remote = remote_support.remote_for_args(args)
try:
remote.status(args.verbose)
except guild.remote.Down as e:
cli.error(
"remote %s is not available (%s)" % (remote.name, e),
exit_status=2)
except guild.remote.OperationError as e:
cli.error(e)
|
# Copyright 2017-2018 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import guild.remote
from guild import cli
from . import remote_support
def start(args):
remote = remote_support.remote_for_args(args)
_remote_op(remote.start, "start", remote, True, args)
def stop(args):
remote = remote_support.remote_for_args(args)
_remote_op(remote.stop, "stop", remote, False, args)
def _remote_op(op, desc, remote, default_resp, args):
if not args.yes:
cli.out("You are about to %s %s" % (desc, remote.name))
if args.yes or cli.confirm("Continue?", default_resp):
try:
op()
except guild.remote.OperationNotSupported as e:
cli.error(e)
except guild.remote.OperationError as e:
cli.error(e)
def status(args):
remote = remote_support.remote_for_args(args)
try:
remote.status(args.verbose)
except guild.remote.Down as e:
cli.error("remote %s is not available (%s)" % (remote.name, e))
except guild.remote.OperationError as e:
cli.error(e)
|
apache-2.0
|
Python
|
859d5ce6553b7651f05f27adec28e8c4330ca9bb
|
Add id of node generating the supervisor event
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
handler/supervisor_to_serf.py
|
handler/supervisor_to_serf.py
|
#!/usr/bin/env python
import json
import sys
from utils import serf_event
def write_stdout(s):
sys.stdout.write(s)
sys.stdout.flush()
def write_stderr(s):
sys.stderr.write(s)
sys.stderr.flush()
def main():
while True:
write_stdout('READY\n') # transition from ACKNOWLEDGED to READY
line = sys.stdin.readline() # read header line from stdin
headers = dict(x.split(':') for x in line.split())
data = sys.stdin.read(int(headers['len'])) # read the event payload
data_dict = dict(x.split(':') for x in data.split())
data_dict['eventname'] = headers['eventname']
data_dict['node'] = serf('info')['agent']['name']
serf_event('supervisor', json.dumps(data_dict))
write_stdout('RESULT 2\nOK') # transition from READY to ACKNOWLEDGED
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import sys
from utils import serf_event
def write_stdout(s):
sys.stdout.write(s)
sys.stdout.flush()
def write_stderr(s):
sys.stderr.write(s)
sys.stderr.flush()
def main():
while True:
write_stdout('READY\n') # transition from ACKNOWLEDGED to READY
line = sys.stdin.readline() # read header line from stdin
headers = dict(x.split(':') for x in line.split())
data = sys.stdin.read(int(headers['len'])) # read the event payload
data_dict = dict(x.split(':') for x in data.split())
data_dict['eventname'] = headers['eventname']
serf_event('myevent', json.dumps(data_dict))
write_stdout('RESULT 2\nOK') # transition from READY to ACKNOWLEDGED
if __name__ == '__main__':
main()
|
mit
|
Python
|
e3916e6403b8933d9d8896b7289321c45b9990d2
|
Refactor if clause to check for string in list
|
genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2
|
wqflask/wqflask/docs.py
|
wqflask/wqflask/docs.py
|
import codecs
from flask import g
from wqflask.database import database_connection
class Docs:
def __init__(self, entry, start_vars={}):
results = None
with database_connection() as conn, conn.cursor() as cursor:
cursor.execute("SELECT Docs.title, CAST(Docs.content AS BINARY) "
"FROM Docs WHERE Docs.entry LIKE %s", (str(entry),))
result = cursor.fetchone()
self.entry = entry
if result:
self.title = result[0]
self.content = result[1].decode("utf-8")
else:
self.title = self.entry.capitalize()
self.content = ""
self.editable = "false"
# ZS: Removing option to edit to see if text still gets vandalized
try:
if g.user_session.record['user_email_address'] == "[email protected]" or g.user_session.record['user_email_address'] == "[email protected]":
self.editable = "true"
except:
pass
def update_text(start_vars):
content = start_vars['ckcontent']
content = content.replace('%', '%%').replace(
'"', '\\"').replace("'", "\\'")
try:
if g.user_session.record.get('user_email_address') in ["[email protected]", "[email protected]"]:
with database_connection() as conn, conn.cursor() as cursor:
cursor.execute("UPDATE Docs SET content=%s WHERE entry=%s",
(content, start_vars.get("entry_type"),))
except:
pass
|
import codecs
from flask import g
from wqflask.database import database_connection
class Docs:
def __init__(self, entry, start_vars={}):
results = None
with database_connection() as conn, conn.cursor() as cursor:
cursor.execute("SELECT Docs.title, CAST(Docs.content AS BINARY) "
"FROM Docs WHERE Docs.entry LIKE %s", (str(entry),))
result = cursor.fetchone()
self.entry = entry
if result:
self.title = result[0]
self.content = result[1].decode("utf-8")
else:
self.title = self.entry.capitalize()
self.content = ""
self.editable = "false"
# ZS: Removing option to edit to see if text still gets vandalized
try:
if g.user_session.record['user_email_address'] == "[email protected]" or g.user_session.record['user_email_address'] == "[email protected]":
self.editable = "true"
except:
pass
def update_text(start_vars):
content = start_vars['ckcontent']
content = content.replace('%', '%%').replace(
'"', '\\"').replace("'", "\\'")
try:
if g.user_session.record['user_email_address'] == "[email protected]" or g.user_session.record['user_email_address'] == "[email protected]":
with database_connection() as conn, conn.cursor() as cursor:
cursor.execute("UPDATE Docs SET content=%s WHERE entry=%s",
(content, start_vars.get("entry_type"),))
except:
pass
|
agpl-3.0
|
Python
|
665943c0736cd83662bc8bebe072045f163b28c9
|
Revise func docstrring
|
bowen0701/algorithms_data_structures
|
alg_insertion_sort.py
|
alg_insertion_sort.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def insertion_sort(nums):
"""Insertion sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Starting at pos i >= 1, swap (num[j-1], num[j]), for j=i,i-1,...,1,
# if order is not correct.
for i in range(1, len(nums)):
for j in range(i, -1, -1):
if j > 0 and nums[j - 1] > nums[j]:
nums[j - 1], nums[j] = nums[j], nums[j - 1]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
insertion_sort(nums)
print(nums)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def insertion_sort(nums):
"""Insertion Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Starting at pos i >= 1, swap (num[j-1], num[j]), for j=i,i-1,...,1,
# if order is not correct.
for i in range(1, len(nums)):
for j in range(i, -1, -1):
if j > 0 and nums[j - 1] > nums[j]:
nums[j - 1], nums[j] = nums[j], nums[j - 1]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
insertion_sort(nums)
print(nums)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
1b47086e3ef45b6e668ed330ac017badc0afae96
|
Add opbeat contrib
|
daviferreira/defprogramming,daviferreira/defprogramming,daviferreira/defprogramming
|
defprogramming/settings.py
|
defprogramming/settings.py
|
# Django settings for defprogramming project.
import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db/development.sqlite3'),
}
}
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 'secret'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.syndication',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
'quotes',
'sorl.thumbnail',
'django_medusa',
'opbeat.contrib.django',
)
MEDUSA_RENDERER_CLASS = "django_medusa.renderers.DiskStaticSiteRenderer"
MEDUSA_MULTITHREAD = False
MEDUSA_DEPLOY_DIR = os.path.join(BASE_DIR, 'cache', 'html')
DEFAULT_CACHE_TIME = 60 * 15
try:
from settings_local import * # NOQA
except ImportError:
from warnings import warn
msg = "You don't have settings_local.py file, using defaults settings."
try:
# don't work in Python 2.4 or before
warn(msg, category=ImportWarning)
except NameError:
warn(msg)
|
# Django settings for defprogramming project.
import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db/development.sqlite3'),
}
}
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
SECRET_KEY = 'secret'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.syndication',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'south',
'quotes',
'sorl.thumbnail',
'django_medusa',
)
MEDUSA_RENDERER_CLASS = "django_medusa.renderers.DiskStaticSiteRenderer"
MEDUSA_MULTITHREAD = False
MEDUSA_DEPLOY_DIR = os.path.join(BASE_DIR, 'cache', 'html')
DEFAULT_CACHE_TIME = 60 * 15
try:
from settings_local import * # NOQA
except ImportError:
from warnings import warn
msg = "You don't have settings_local.py file, using defaults settings."
try:
# don't work in Python 2.4 or before
warn(msg, category=ImportWarning)
except NameError:
warn(msg)
|
mit
|
Python
|
22029728795a850d1b57824c6a91ddd5378f9760
|
fix some typos
|
sassoftware/robj
|
robj/__init__.py
|
robj/__init__.py
|
#
# Copyright (c) 2010 rPath, Inc.
#
# This program is distributed under the terms of the MIT License as found
# in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/mit-license.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the MIT License for full details.
#
"""
rObj REST Client Library
This library is primarily intended for use with rPath provided REST APIs, but
should be able to interact with other REST based services that follow similar
best practices.
Example usage:
>>> import robj
>>> api = robj.connect('http://www.rpath.org/api/')
>>> products = api.products
>>> print products[0].name
"""
from robj.glue import HTTPClient as _HTTPClient
from robj.lib.log import setupLogging as _setupLogging
__ALL__ = ['rObj', 'connect', 'open', ]
def rObj(uri, headers=None, maxClients=None, maxConnections=None,
logging=True):
"""
@param uri: URI for connectiong to the root of the desired web service. This
may contain user information and must be http or https.
@type uri: str
@param headers: Any headers that should be included in all requets.
@type headers: dict
@param maxClients: The maximum number of workers that will be created to
handle requets. Works are created as needed, rather than
being preallocated. (default: 10)
@type maxClients: int
@param maxConnections: The maximum number of connections each client thread
should cache. Client threads only cache one
connection per host. This should only matter if you
are talking to multiple hosts. (default: 2)
@type maxConnections: int
@param logging: Set up a logger.
@type logging: boolean
"""
# Setup logging if requested.
if logging:
# FIXME: Let people specify log files somehow.
_setupLogging()
# Instantiate the http client.
client = _HTTPClient(uri, headers=headers, maxClients=maxClients,
maxConnections=maxConnections)
# Get the root rObj
robj = client.do_GET('/')
return robj
connect = open = rObj
|
#
# Copyright (c) 2010 rPath, Inc.
#
# This program is distributed under the terms of the MIT License as found
# in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/mit-license.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the MIT License for full details.
#
"""
rObj REST Client Library
This library is primarily intened for use with rPath provied REST APIs, but
should be able to interact with other REST based services that follow similar
best practices.
Example usage:
>>> import robj
>>> api = robj.connect('http://www.rpath.org/api/')
>>> products = api.products
>>> print products[0].name
"""
from robj.glue import HTTPClient as _HTTPClient
from robj.lib.log import setupLogging as _setupLogging
__ALL__ = ['rObj', 'connect', 'open', ]
def rObj(uri, headers=None, maxClients=None, maxConnections=None,
logging=True):
"""
@param uri: URI for connectiong to the root of the desired web service. This
may contain user information and must be http or https.
@type uri: str
@param headers: Any headers that should be included in all requets.
@type headers: dict
@param maxClients: The maximum number of workers that will be created to
handle requets. Works are created as needed, rather than
being preallocated. (default: 10)
@type maxClients: int
@param maxConnections: The maximum number of connections each client thread
should cache. Client threads only cache one
connection per host. This should only matter if you
are talking to multiple hosts. (default: 2)
@type maxConnections: int
@param logging: Set up a logger.
@type logging: boolean
"""
# Setup logging if requested.
if logging:
# FIXME: Let people specify log files somehow.
_setupLogging()
# Instantiate the http client.
client = _HTTPClient(uri, headers=headers, maxClients=maxClients,
maxConnections=maxConnections)
# Get the root rObj
robj = client.do_GET('/')
return robj
connect = open = rObj
|
apache-2.0
|
Python
|
1b6b7de39dcb80ff083bd21c6665c0dcaa5200fa
|
Update last_api_activity in Tooltron add_card_event view.
|
CMU-Robotics-Club/roboticsclub.org,sreidy/roboticsclub.org,CMU-Robotics-Club/roboticsclub.org,sreidy/roboticsclub.org,sreidy/roboticsclub.org,CMU-Robotics-Club/roboticsclub.org
|
robocrm/views.py
|
robocrm/views.py
|
from django.http import HttpResponse
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.contrib.auth import authenticate, login
from api.models import APIRequest
from django.views.decorators.http import require_POST
from projects.models import Project
from django.utils import timezone
from .models import *
def roboauth(request, rfid_tag, mach_num):
r = RoboUser.objects.filter(rfid=rfid_tag)
if r.count() > 0:
us = r[0]
else:
return HttpResponse("0")
auth_machines = us.machines.filter(id=mach_num)
if auth_machines.count() > 0 :
return HttpResponse("1")
else :
return HttpResponse("0")
def roboauthall(request, rfid_tag):
r = RoboUser.objects.filter(rfid=rfid_tag)
if r.count() > 0:
us = r[0]
else:
return HttpResponse("0")
auth = 0
for mach in us.machines.all():
auth |= 1 << int(mach.id)
return HttpResponse(str(auth))
@require_POST
def add_card_event(request):
if 'username' in request.POST and 'password' in request.POST:
user = authenticate(username=request.POST['username'],
password=request.POST['password'])
if user is not None and user.is_active:
login(request, user)
tstart = request.POST.get('tstart') # TODO: convert to date
tend = request.POST.get('tend')
user_id = request.POST.get('user_id', 0)
succ = request.POST.get('succ') == '1'
machine_id = int(request.POST.get('machine_id', 1))
try:
robouser = RoboUser.objects.get(rfid__iexact=user_id)
except ObjectDoesNotExist:
robouser = None
machine = Machine.objects.get(id__exact=machine_id)
tooltron = Project.objects.get(name="Tooltron")
api_request = APIRequest(
endpoint="/rfid/",
updater_object=tooltron,
user=robouser,
success=succ,
meta=machine.type,
api_client="",
)
api_request.save()
# Cannot update updated_datetime with tend
# because would be overwritten on save however
# does not matter because Tooltron pushes
# card events every 70ms which a lower resolution
# that what tend even provides so update_datetime
# being the value when this save() is called is okay
api_request.created_datetime = tstart
api_request.save()
# Since Tooltron (for now) does not use Standard API
# manually update it's last_activity field so Officers have the benefit
# of easily being able to see if it is working
tooltron.last_api_activity = timezone.now()
tooltron.save()
return HttpResponse()
|
from django.http import HttpResponse
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.contrib.auth import authenticate, login
from api.models import APIRequest
from django.views.decorators.http import require_POST
from projects.models import Project
from .models import *
def roboauth(request, rfid_tag, mach_num):
r = RoboUser.objects.filter(rfid=rfid_tag)
if r.count() > 0:
us = r[0]
else:
return HttpResponse("0")
auth_machines = us.machines.filter(id=mach_num)
if auth_machines.count() > 0 :
return HttpResponse("1")
else :
return HttpResponse("0")
def roboauthall(request, rfid_tag):
r = RoboUser.objects.filter(rfid=rfid_tag)
if r.count() > 0:
us = r[0]
else:
return HttpResponse("0")
auth = 0
for mach in us.machines.all():
auth |= 1 << int(mach.id)
return HttpResponse(str(auth))
@require_POST
def add_card_event(request):
if 'username' in request.POST and 'password' in request.POST:
user = authenticate(username=request.POST['username'],
password=request.POST['password'])
if user is not None and user.is_active:
login(request, user)
tstart = request.POST['tstart'] # TODO: convert to date
tend = request.POST['tend']
user_id = request.POST['user_id']
succ = request.POST['succ'] == '1'
machine_id = int(request.POST['machine_id'])
try:
robouser = RoboUser.objects.get(rfid__iexact=user_id)
except ObjectDoesNotExist:
robouser = None
machine = Machine.objects.get(id__exact=machine_id)
api_request = APIRequest(
endpoint="/rfid/",
updater_object=Project.objects.get(name="Tooltron"),
user=robouser,
success=succ,
meta=machine.type,
api_client="",
)
api_request.save()
# Cannot update updated_datetime with tend
# because would be overwritten on save however
# does not matter because Tooltron pushes
# card events every 70ms which a lower resolution
# that what tend even provides so update_datetime
# being the value when this save() is called is okay
api_request.created_datetime = tstart
api_request.save()
return HttpResponse()
|
mit
|
Python
|
2a984234d6bef4667af9549459e1fd85fb213626
|
Bump version to v1.14.20
|
Cal-CS-61A-Staff/ok-client
|
client/__init__.py
|
client/__init__.py
|
__version__ = 'v1.14.20'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
__version__ = 'v1.14.19'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
apache-2.0
|
Python
|
c13f78f358b3befe71539804abc80df9179b6bfa
|
bump to v1.7.6
|
Cal-CS-61A-Staff/ok-client
|
client/__init__.py
|
client/__init__.py
|
__version__ = 'v1.7.6'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
__version__ = 'v1.7.5'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
apache-2.0
|
Python
|
c41e4827116635acf37861b19d369f167900bf74
|
Reduce test memory footprint, remove unused variable
|
poldracklab/fmriprep,oesteban/preprocessing-workflow,oesteban/fmriprep,poldracklab/preprocessing-workflow,poldracklab/fmriprep,poldracklab/fmriprep,poldracklab/preprocessing-workflow,oesteban/fmriprep,oesteban/preprocessing-workflow,oesteban/fmriprep
|
fmriprep/interfaces/tests/test_images.py
|
fmriprep/interfaces/tests/test_images.py
|
import time
import numpy as np
import nibabel as nb
from nipype.interfaces import nilearn as nl
from .. import images as im
import pytest
@pytest.mark.parametrize('nvols, nmasks, ext, factor', [
(500, 10, '.nii', 2),
(500, 10, '.nii.gz', 5),
(200, 3, '.nii', 1.1),
(200, 3, '.nii.gz', 2),
(200, 10, '.nii', 1.1),
(200, 10, '.nii.gz', 2),
])
def test_signal_extraction_equivalence(tmpdir, nvols, nmasks, ext, factor):
tmpdir.chdir()
vol_shape = (64, 64, 40)
img_fname = 'img' + ext
masks_fname = 'masks' + ext
random_data = np.random.random(size=vol_shape + (nvols,)) * 2000
random_mask_data = np.random.random(size=vol_shape + (nmasks,)) < 0.2
nb.Nifti1Image(random_data, np.eye(4)).to_filename(img_fname)
nb.Nifti1Image(random_mask_data.astype(np.uint8), np.eye(4)).to_filename(masks_fname)
se1 = nl.SignalExtraction(in_file=img_fname, label_files=masks_fname,
class_labels=['a%d' % i for i in range(nmasks)],
out_file='nlsignals.tsv')
se2 = im.SignalExtraction(in_file=img_fname, label_files=masks_fname,
class_labels=['a%d' % i for i in range(nmasks)],
out_file='imsignals.tsv')
tic = time.time()
se1.run()
toc = time.time()
se2.run()
toc2 = time.time()
tab1 = np.loadtxt('nlsignals.tsv', skiprows=1)
tab2 = np.loadtxt('imsignals.tsv', skiprows=1)
assert np.allclose(tab1, tab2)
t1 = toc - tic
t2 = toc2 - toc
assert t2 < t1 / factor
|
import time
import numpy as np
import nibabel as nb
from nipype.interfaces import nilearn as nl
from .. import images as im
import pytest
@pytest.mark.parametrize('nvols, nmasks, ext, factor', [
(1000, 10, '.nii', 2),
(1000, 10, '.nii.gz', 5),
(200, 3, '.nii', 1.1),
(200, 3, '.nii.gz', 2),
(200, 10, '.nii', 1.1),
(200, 10, '.nii.gz', 2),
])
def test_signal_extraction_equivalence(tmpdir, nvols, nmasks, ext, factor):
orig_dir = tmpdir.chdir()
vol_shape = (64, 64, 40)
img_fname = 'img' + ext
masks_fname = 'masks' + ext
random_data = np.random.random(size=vol_shape + (nvols,)) * 2000
random_mask_data = np.random.random(size=vol_shape + (nmasks,)) < 0.2
nb.Nifti1Image(random_data, np.eye(4)).to_filename(img_fname)
nb.Nifti1Image(random_mask_data.astype(np.uint8), np.eye(4)).to_filename(masks_fname)
se1 = nl.SignalExtraction(in_file=img_fname, label_files=masks_fname,
class_labels=['a%d' % i for i in range(nmasks)],
out_file='nlsignals.tsv')
se2 = im.SignalExtraction(in_file=img_fname, label_files=masks_fname,
class_labels=['a%d' % i for i in range(nmasks)],
out_file='imsignals.tsv')
tic = time.time()
se1.run()
toc = time.time()
se2.run()
toc2 = time.time()
tab1 = np.loadtxt('nlsignals.tsv', skiprows=1)
tab2 = np.loadtxt('imsignals.tsv', skiprows=1)
assert np.allclose(tab1, tab2)
t1 = toc - tic
t2 = toc2 - toc
assert t2 < t1 / factor
|
bsd-3-clause
|
Python
|
76b087986aa90967918ec52b459a857c11743203
|
Update patterns
|
vuolter/pyload,vuolter/pyload,vuolter/pyload
|
module/plugins/hoster/ZippyshareCom.py
|
module/plugins/hoster/ZippyshareCom.py
|
# -*- coding: utf-8 -*-
import re
from os import path
from urllib import unquote
from urlparse import urljoin
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
__version__ = "0.60"
__pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
__description__ = """Zippyshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
NAME_PATTERN = r'("\d{6,}/"[ ]*\+.+?"/|<title>Zippyshare.com - )(?P<N>.+?)("|</title>)'
SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'>File does not exist on this server<'
COOKIES = [(".zippyshare.com", "ziplocale", "en")]
def setup(self):
self.multiDL = True
self.chunkLimit = -1
self.resumeDownload = True
def handleFree(self):
url = self.get_link()
self.download(url)
def getFileInfo(self):
info = super(ZippyshareCom, self).getFileInfo()
self.pyfile.name = info['name'] = unquote(info['name'])
return info
def get_checksum(self):
try:
m = re.search(r'\+[ ]*\((\d+)[ ]*\%[ ]*(\d+)[ ]*\+[ ]*(\d+)[ ]*\%[ ]*(\d+)\)[ ]*\+', self.html)
if m:
a1, a2, c1, c2 = map(int, m.groups())
else:
a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups())
c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups())
b = (a1 % a2) * (c1 % c2)
except:
self.error(_("Unable to calculate checksum"))
else:
return b + 18
def get_link(self):
checksum = self.get_checksum()
p_url = path.join("d", self.info['KEY'], str(checksum), self.pyfile.name)
dl_link = urljoin(self.info['HOST'], p_url)
return dl_link
getInfo = create_getInfo(ZippyshareCom)
|
# -*- coding: utf-8 -*-
import re
from os import path
from urllib import unquote
from urlparse import urljoin
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
__version__ = "0.60"
__pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
__description__ = """Zippyshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
NAME_PATTERN = r'("/[di]/\d+/".+?"/|<title>Zippyshare.com - )(?P<N>.+?)("|</title>)'
SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'>File does not exist on this server<'
COOKIES = [(".zippyshare.com", "ziplocale", "en")]
def setup(self):
self.multiDL = True
self.chunkLimit = -1
self.resumeDownload = True
def handleFree(self):
url = self.get_link()
self.download(url)
def getFileInfo(self):
info = super(ZippyshareCom, self).getFileInfo()
self.pyfile.name = info['name'] = unquote(info['name'])
return info
def get_checksum(self):
try:
a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups())
c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups())
b = (a1 % a2) * (c1 % c2)
except:
self.error(_("Unable to calculate checksum"))
else:
return b + 18
def get_link(self):
checksum = self.get_checksum()
p_url = path.join("d", self.info['KEY'], str(checksum), self.pyfile.name)
dl_link = urljoin(self.info['HOST'], p_url)
return dl_link
getInfo = create_getInfo(ZippyshareCom)
|
agpl-3.0
|
Python
|
3ce75ad5f3e0178394e9d496327c2e11bb74c6ac
|
save schedule to SQL
|
paulfantom/Central-Heating-webpage,paulfantom/Central-Heating-webpage,paulfantom/Central-Heating-webpage,paulfantom/Central-Heating-webpage
|
app/data.py
|
app/data.py
|
from .models import Settings
from app import db
def get_query(db_model):
try:
q = db.session.query(db_model).order_by(db_model.index.desc()).first()
except AttributeError:
try:
q = db.session.query(db_model).order_by(db_model.index.desc()).first()
except AttributeError:
return None
return q
def get_last_row(db_model=Settings):
q = get_query(db_model)
d = {}
for col in q.__table__.columns._data.keys():
d[col] = getattr(q,col)
return d
def get_value(column,db_model=Settings):
q = get_query(db_model)
try:
return getattr(q,column)
except AttributeError:
return None
except TypeError:
return None
def change_setting(name,value):
d = get_last_row(Settings)
if name in d.keys():
d[name] = value
del d['index']
del d['timestamp']
db.session.add(Settings(**d))
db.session.commit()
def parse_timetable(input_json):
return None
|
from .models import Settings
from app import db
def get_query(db_model):
try:
q = db.session.query(db_model).order_by(db_model.index.desc()).first()
except AttributeError:
try:
q = db.session.query(db_model).order_by(db_model.index.desc()).first()
except AttributeError:
return None
return q
def get_last_row(db_model=Settings):
q = get_query(db_model)
d = {}
for col in q.__table__.columns._data.keys():
d[col] = getattr(q,col)
return d
def get_value(column,db_model=Settings):
q = get_query(db_model)
try:
return getattr(q,column)
except AttributeError:
return None
except TypeError:
return None
def change_setting(name,value):
d = get_last_row(Settings)
if name in d.keys():
d[name] = value
del d['index']
del d['timestamp']
db.session.add(Settings(**d))
db.session.commit()
|
mpl-2.0
|
Python
|
e77bc141c8e2564509f093059e61cfb98be79e56
|
Add module docstring to __init__
|
par2/lamana
|
lamana/__init__.py
|
lamana/__init__.py
|
# -----------------------------------------------------------------------------
'''The main init file that stores the package version number.'''
# __version__ is used by find_version() in setup.py
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
__title__ = 'lamana'
__version__ = '0.4.12-dev' # PEP 440 style
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
|
# -----------------------------------------------------------------------------
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
__title__ = 'lamana'
__version__ = '0.4.12-dev' # PEP 440 style
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
|
bsd-3-clause
|
Python
|
2cd901a3975691eb06f695f5e352c0bc46c923a0
|
Bump version to 0.4.11
|
par2/lamana
|
lamana/__init__.py
|
lamana/__init__.py
|
# -----------------------------------------------------------------------------
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
__title__ = 'lamana'
__version__ = '0.4.11.dev0' # PEP 440 style
##__version__ = '0.4.11'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
|
# -----------------------------------------------------------------------------
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
__title__ = 'lamana'
__version__ = '0.4.11.dev0' # PEP 440 style
##__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
|
bsd-3-clause
|
Python
|
edaaaf23bc13996bf571946128f206013045efbb
|
Resolve comilation issue for darwin-framework-tool on M1 (#21761)
|
project-chip/connectedhomeip,project-chip/connectedhomeip,project-chip/connectedhomeip,project-chip/connectedhomeip,project-chip/connectedhomeip,project-chip/connectedhomeip
|
scripts/build/build_darwin_framework.py
|
scripts/build/build_darwin_framework.py
|
#!/usr/bin/env -S python3 -B
# Copyright (c) 2022 Project Matter Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from subprocess import PIPE, Popen
import platform
def run_command(command):
returncode = -1
command_log = b''
print("Running {}".format(command))
with Popen(command, cwd=os.getcwd(), stdout=PIPE, stderr=PIPE) as process:
for line in process.stdout:
command_log += line
for line in process.stderr:
command_log += line
process.wait()
returncode = process.returncode
with open(args.log_path, "wb") as f:
f.write(command_log)
return returncode
def build_darwin_framework(args):
abs_path = os.path.abspath(args.out_path)
if not os.path.exists(abs_path):
os.mkdir(abs_path)
command = [
'xcodebuild',
'-scheme',
args.target,
'-sdk',
'macosx',
'-project',
args.project_path,
'-derivedDataPath',
abs_path,
"PLATFORM_PREFERRED_ARCH={}".format(platform.machine())
]
command_result = run_command(command)
print("Build Framework Result: {}".format(command_result))
exit(command_result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Build the Matter Darwin framework")
parser.add_argument(
"--project_path",
default="src/darwin/Framework/Matter.xcodeproj",
help="Set the project path",
required=True,
)
parser.add_argument(
"--out_path",
default="/tmp/macos_framework_output",
help="Output lpath for framework",
required=True,
)
parser.add_argument("--target",
default="Matter",
help="Name of target to build",
required=True)
parser.add_argument("--log_path",
help="Output log file destination",
required=True)
args = parser.parse_args()
build_darwin_framework(args)
|
#!/usr/bin/env -S python3 -B
# Copyright (c) 2022 Project Matter Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from subprocess import PIPE, Popen
def run_command(command):
returncode = -1
command_log = b''
print("Running {}".format(command))
with Popen(command, cwd=os.getcwd(), stdout=PIPE, stderr=PIPE) as process:
for line in process.stdout:
command_log += line
for line in process.stderr:
command_log += line
process.wait()
returncode = process.returncode
with open(args.log_path, "wb") as f:
f.write(command_log)
return returncode
def build_darwin_framework(args):
abs_path = os.path.abspath(args.out_path)
if not os.path.exists(abs_path):
os.mkdir(abs_path)
command = ['xcodebuild', '-scheme', args.target, '-sdk', 'macosx', '-project', args.project_path, '-derivedDataPath', abs_path]
command_result = run_command(command)
print("Build Framework Result: {}".format(command_result))
exit(command_result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Build the Matter Darwin framework")
parser.add_argument(
"--project_path",
default="src/darwin/Framework/Matter.xcodeproj",
help="Set the project path",
required=True,
)
parser.add_argument(
"--out_path",
default="/tmp/macos_framework_output",
help="Output lpath for framework",
required=True,
)
parser.add_argument("--target",
default="Matter",
help="Name of target to build",
required=True)
parser.add_argument("--log_path",
help="Output log file destination",
required=True)
args = parser.parse_args()
build_darwin_framework(args)
|
apache-2.0
|
Python
|
35d39957d1a4fd847509384dab429092a39715e3
|
Load pre-trained GloVe
|
natsheh/sensim,natsheh/sensim
|
distance.py
|
distance.py
|
# coding: utf-8
# Author: Hussein AL-NATSHEH <[email protected]>
# License: BSD 3 clause
# 2016
import pandas as pd
import argparse
import numpy as np
def load_data(dataset, verbose=0):
if dataset == 'sts':
#Load STS data (combined 2012-2014 and cleaned)
data = pd.read_csv('data/sts_gs_all.csv', dtype={'Score': np.float32})
if verbose == 2:
print data.shape
print data.head(n=10)
elif verbose == 1:
print data.shape
X = data.as_matrix(columns=["Sent1", "Sent2"])
y = data.as_matrix(columns=["Score"])
return X, y
def load_glove(filepath, verbose=0):
data = pd.read_csv(filepath, sep=' ', compression='gzip', skiprows=9, index_col=0, header=None, encoding='utf-8')
if verbose == 2:
print data.shape
print data.head(n=10)
elif verbose == 1:
print data.shape
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", default='sts', type=str)
parser.add_argument("--verbose", default=1, type=int)
parser.add_argument("--glovefile", default='data/glove.6B.300d.tar.gz', type=str)
args = parser.parse_args()
X, y = load_data (args.dataset, args.verbose)
gloveb300d = load_glove(args.glovefile, args.verbose)
|
# coding: utf-8
# Author: Hussein AL-NATSHEH <[email protected]>
# License: BSD 3 clause
# 2016
import pandas as pd
import argparse
import numpy as np
def load_data(dataset, verbose=0):
if dataset == "sts":
#Load STS data (combined 2012-2014 and cleaned)
data = pd.read_csv('data/sts_gs_all.csv', dtype={'Score': np.float32})
if verbose == 2:
print data.shape
print data.head(n=10)
elif verbose == 1:
print data.shape
X = data.as_matrix(columns=["Sent1", "Sent2"])
y = data.as_matrix(columns=["Score"])
return X, y
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", required=True, type=str)
parser.add_argument("--verbose", default=1, type=int)
args = parser.parse_args()
X, y = load_data (args.dataset, args.verbose)
|
bsd-3-clause
|
Python
|
d258bbe78be9cdf8ca2251add74a903f054b032a
|
add login/logout views. closes #7
|
hackupc/backend,hackupc/backend,hackupc/backend,hackupc/backend
|
app/urls.py
|
app/urls.py
|
"""testP URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
# Auth views. Look at this in order to see who to use
# https://docs.djangoproject.com/en/1.10/topics/auth/default/
url(r'^accounts/login/$', auth_views.login, {'template_name': 'admin/login.html'}, name='login'),
url(r'^accounts/logout/$', auth_views.logout, name='logout'),
url(r'^accounts/password/$', auth_views.password_change, name='password_change'),
url(r'^accounts/password/done/$', auth_views.password_change_done, name='password_change_done'),
url(r'^', include('register.urls'))
]
|
"""testP URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from register import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('register.urls'))
]
|
mit
|
Python
|
c136d416c2cb53449e1c175412eeaa46a2f78db1
|
Fix syntax error in email service
|
cgwire/zou
|
zou/app/utils/emails.py
|
zou/app/utils/emails.py
|
from flask_mail import Message
from zou.app import mail, app
def send_email(subject, body, recipient_email, html=None):
"""
Send an email with given subject and body to given recipient.
"""
if html is None:
html = body
with app.app_context():
message = Message(
sender="Kitsu Bot <[email protected]>",
body=body,
html=html,
subject=subject,
recipients=[recipient_email]
)
mail.send(message)
|
from flask_mail import Message
from zou.app import mail, app
def send_email(subject, body, recipient_email, html=None):
"""
Send an email with given subject and body to given recipient.
"""
if html is None:
html = body
with app.app_context():
message = Message(
sender="Kitsu Bot <[email protected]>"
body=body,
html=html,
subject=subject,
recipients=[recipient_email]
)
mail.send(message)
|
agpl-3.0
|
Python
|
d3adfcbcf281f00aa454d4d8e45f6d5502495bde
|
Add get_absolute_url to UserSerializer
|
ZobairAlijan/osf.io,RomanZWang/osf.io,njantrania/osf.io,saradbowman/osf.io,doublebits/osf.io,samchrisinger/osf.io,MerlinZhang/osf.io,jinluyuan/osf.io,mluo613/osf.io,abought/osf.io,CenterForOpenScience/osf.io,reinaH/osf.io,kwierman/osf.io,samanehsan/osf.io,kwierman/osf.io,barbour-em/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,fabianvf/osf.io,lyndsysimon/osf.io,cldershem/osf.io,leb2dg/osf.io,doublebits/osf.io,kch8qx/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,jeffreyliu3230/osf.io,erinspace/osf.io,mluke93/osf.io,brianjgeiger/osf.io,hmoco/osf.io,sbt9uc/osf.io,jnayak1/osf.io,arpitar/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,laurenrevere/osf.io,jnayak1/osf.io,reinaH/osf.io,felliott/osf.io,wearpants/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,aaxelb/osf.io,hmoco/osf.io,caseyrollins/osf.io,samanehsan/osf.io,mattclark/osf.io,abought/osf.io,arpitar/osf.io,Nesiehr/osf.io,bdyetton/prettychart,HarryRybacki/osf.io,ckc6cz/osf.io,dplorimer/osf,jnayak1/osf.io,haoyuchen1992/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,felliott/osf.io,adlius/osf.io,RomanZWang/osf.io,sbt9uc/osf.io,asanfilippo7/osf.io,acshi/osf.io,danielneis/osf.io,baylee-d/osf.io,ckc6cz/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,rdhyee/osf.io,cosenal/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,haoyuchen1992/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,emetsger/osf.io,reinaH/osf.io,kch8qx/osf.io,jolene-esposito/osf.io,acshi/osf.io,samchrisinger/osf.io,cosenal/osf.io,cldershem/osf.io,njantrania/osf.io,abought/osf.io,rdhyee/osf.io,erinspace/osf.io,hmoco/osf.io,mluo613/osf.io,Nesiehr/osf.io,crcresearch/osf.io,njantrania/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,zachjanicki/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,chrisseto/osf.io,wearpants/osf.io,mluo613/osf.io,GageGaskins/osf.io,adlius/osf.io,caneruguz/osf.io,wearpants/osf.io,baylee-d/osf.io,petermalcolm/osf.io,caneruguz/osf.io,sloria/osf.io,icereval/osf.io,mfraezz/osf.io,GageGaskins/osf.io,barbour-em/osf.io,jmcarp/osf.io,fabianvf/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,pattisdr/osf.io,doublebits/osf.io,ckc6cz/osf.io,njantrania/osf.io,mfraezz/osf.io,cslzchen/osf.io,Nesiehr/osf.io,billyhunt/osf.io,fabianvf/osf.io,doublebits/osf.io,MerlinZhang/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,icereval/osf.io,fabianvf/osf.io,jinluyuan/osf.io,bdyetton/prettychart,danielneis/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,sbt9uc/osf.io,pattisdr/osf.io,mluke93/osf.io,SSJohns/osf.io,Ghalko/osf.io,kch8qx/osf.io,rdhyee/osf.io,amyshi188/osf.io,RomanZWang/osf.io,amyshi188/osf.io,barbour-em/osf.io,brandonPurvis/osf.io,icereval/osf.io,ticklemepierce/osf.io,amyshi188/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,caseyrygt/osf.io,cldershem/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,cwisecarver/osf.io,cslzchen/osf.io,acshi/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,dplorimer/osf,SSJohns/osf.io,barbour-em/osf.io,caseyrygt/osf.io,jmcarp/osf.io,dplorimer/osf,caneruguz/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,arpitar/osf.io,wearpants/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,asanfilippo7/osf.io,kch8qx/osf.io,caseyrygt/osf.io,aaxelb/osf.io,rdhyee/osf.io,jinluyuan/osf.io,cosenal/osf.io,emetsger/osf.io,adlius/osf.io,jolene-esposito/osf.io,crcresearch/osf.io,amyshi188/osf.io,acshi/osf.io,GageGaskins/osf.io,jeffreyliu3230/osf.io,reinaH/osf.io,caneruguz/osf.io,lyndsysimon/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,Johnetordoff/osf.io,danielneis/osf.io,emetsger/osf.io,binoculars/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,billyhunt/osf.io,chennan47/osf.io,baylee-d/osf.io,mluo613/osf.io,billyhunt/osf.io,alexschiller/osf.io,pattisdr/osf.io,felliott/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,alexschiller/osf.io,MerlinZhang/osf.io,abought/osf.io,billyhunt/osf.io,alexschiller/osf.io,kwierman/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,chrisseto/osf.io,arpitar/osf.io,acshi/osf.io,chrisseto/osf.io,GageGaskins/osf.io,SSJohns/osf.io,zamattiac/osf.io,alexschiller/osf.io,jeffreyliu3230/osf.io,chennan47/osf.io,samanehsan/osf.io,mattclark/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,jmcarp/osf.io,SSJohns/osf.io,felliott/osf.io,mluke93/osf.io,Nesiehr/osf.io,sloria/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,caseyrygt/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,dplorimer/osf,petermalcolm/osf.io,doublebits/osf.io,jmcarp/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,cosenal/osf.io,mluke93/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,kwierman/osf.io,sloria/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,Ghalko/osf.io,erinspace/osf.io,binoculars/osf.io,jeffreyliu3230/osf.io,KAsante95/osf.io,binoculars/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,emetsger/osf.io,chennan47/osf.io,Ghalko/osf.io,billyhunt/osf.io,danielneis/osf.io,jinluyuan/osf.io,KAsante95/osf.io,zamattiac/osf.io,ckc6cz/osf.io,TomHeatwole/osf.io,bdyetton/prettychart,mattclark/osf.io,hmoco/osf.io,mluo613/osf.io,mfraezz/osf.io,cldershem/osf.io,adlius/osf.io,sbt9uc/osf.io,zamattiac/osf.io
|
api/users/serializers.py
|
api/users/serializers.py
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def absolute_url(self, obj):
return obj.absolute_url
def update(self, instance, validated_data):
# TODO
pass
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
apache-2.0
|
Python
|
4b3f79ae5e30de867941d363d1f186d3c2494b4b
|
Remove obsolete token code.
|
google/gfw-toolkit
|
api_sample/http_utils.py
|
api_sample/http_utils.py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for http response and content handling.
Used for common cracking of the content document returned from a request.
"""
import json
import logging
import sys
# Http response codes to retry - includes quota issues.
# 402: Payment required
# 408: Request timeout
# 503: Service unavailable
# 504: Gateway timeout
RETRY_RESPONSE_CODES = [402, 408, 503, 504]
def _FromJsonString(json_string):
"""Helper to safely attempt a conversion from a json string to an object.
Args:
json_string: Presumably properly formatted json string.
Returns:
Object reflecting the conversion of the json.
"""
try:
return json.loads(json_string)
except ValueError as e:
print 'ERROR: response is not valid json: %s\n%s.' % (e, json_string)
sys.exit(1)
def ParseHttpResult(url, response, content):
"""Helper to more clearly find and return error messages.
Args:
url: full url including https:// for the RESTful API command.
response: response with headers from http.
content: content from the url (unzipped if necessary).
Returns:
If error text is discovered, returns a string with the error text
otherwise returns an object containing the content.
"""
logging.getLogger().debug('----------------------------------------')
logging.getLogger().debug('status=%d' % response.status)
logging.getLogger().debug('----------------------------------------')
logging.getLogger().debug('content=\n%s' % content)
logging.getLogger().debug('----------------------------------------')
if response.status in RETRY_RESPONSE_CODES:
print 'Possible quota problem (%d). %s. You should retry.' % (
response.status, url)
sys.exit(1)
content = _FromJsonString(content)
if 'error' in content:
error_text = ['ERROR: status=%d.' % response.status]
error_text += ['url=%s.' % url]
# The content:error.message seems to be more useful to users. Retrieve it.
message = content.get('error', {}).get('message')
if message:
error_text += ['message=%s' % message]
else:
error_text += ['content=%s' % content]
# The provisioning API is not available if the box is not checked.
if (response.status == 403 and
message == 'Domain cannot use apis.'):
error_text = [message, 'You should check "Enable provisioning API" '
'in your Domain Settings->User Settings.']
return '\n'.join(error_text)
return content
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils for http response and content handling.
Used for common cracking of the content document returned from a request.
"""
import json
import logging
import sys
# Http response codes to retry - includes quota issues.
# 402: Payment required
# 408: Request timeout
# 503: Service unavailable
# 504: Gateway timeout
RETRY_RESPONSE_CODES = [402, 408, 503, 504]
def _FromJsonString(json_string):
"""Helper to safely attempt a conversion from a json string to an object.
Args:
json_string: Presumably properly formatted json string.
Returns:
Object reflecting the conversion of the json.
"""
try:
return json.loads(json_string)
except ValueError as e:
print 'ERROR: response is not valid json: %s\n%s.' % (e, json_string)
sys.exit(1)
def ParseHttpResult(url, response, content):
"""Helper to more clearly find and return error messages.
Args:
url: full url including https:// for the RESTful API command.
response: response with headers from http.
content: content from the url (unzipped if necessary).
Returns:
If error text is discovered, returns a string with the error text
otherwise returns an object containing the content.
"""
logging.getLogger().debug('----------------------------------------')
logging.getLogger().debug('status=%d' % response.status)
logging.getLogger().debug('----------------------------------------')
logging.getLogger().debug('content=\n%s' % content)
logging.getLogger().debug('----------------------------------------')
if response.status in RETRY_RESPONSE_CODES:
print 'Possible quota problem (%d). %s. You should retry.' % (
response.status, url)
sys.exit(1)
content = _FromJsonString(content)
if 'error' in content:
error_text = ['ERROR: status=%d.' % response.status]
error_text += ['url=%s.' % url]
# The content:error.message seems to be more useful to users. Retrieve it.
message = content.get('error', {}).get('message')
if message:
error_text += ['message=%s' % message]
else:
error_text += ['content=%s' % content]
# The provisioning API is not available if the box is not checked.
if (response.status == 403 and
message == 'Domain cannot use apis.'):
error_text = [message, 'You should check "Enable provisioning API" '
'in your Domain Settings->User Settings.']
# When requesting tokens for a specific client_id, if no tokens
# are found, the API server responds with an unexpected 500 error.
# Notice that specific case and fail a little more gracefully.
elif (response.status == 500 and
message == 'No tokens exist for the specified client id'):
error_text = [message]
return '\n'.join(error_text)
return content
|
apache-2.0
|
Python
|
45689b8b2d91310e4002a63253009fddae947bb3
|
Bump copyright year in sphinx docs
|
babab/pycommand,babab/pycommand
|
doc/conf.py
|
doc/conf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pycommand
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pycommand'
copyright = '2013-2015, Benjamin Althues'
version = pycommand.__version__
release = pycommand.__version__
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'nature'
man_pages = [
('index', 'pycommand', 'pycommand Documentation',
['Benjamin Althues'], 3)
]
texinfo_documents = [
('index', 'pycommand', 'pycommand Documentation',
'Benjamin Althues', 'pycommand', pycommand.__doc__,
'Miscellaneous'),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from pycommand import __version__ as pycommand_version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pycommand'
copyright = '2013, Benjamin Althues'
version = pycommand_version
release = pycommand_version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'nature'
man_pages = [
('index', 'pycommand', 'pycommand Documentation',
['Benjamin Althues'], 3)
]
texinfo_documents = [
('index', 'pycommand', 'pycommand Documentation',
'Benjamin Althues', 'pycommand', 'One line description of project.',
'Miscellaneous'),
]
|
isc
|
Python
|
69912ea520a35f8c35a62d7a3c6efe1a9367f03f
|
Fix typo
|
aptivate/ckanext-mapactiontheme,aptivate/ckanext-mapactiontheme,aptivate/ckanext-mapactiontheme,aptivate/ckanext-mapactiontheme,aptivate/ckanext-mapactiontheme
|
ckanext/mapactiontheme/tests/test_admin_controller.py
|
ckanext/mapactiontheme/tests/test_admin_controller.py
|
from ckan.plugins.toolkit import config
import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
from ckan.plugins import load
class TestCustomAdminController(helpers.FunctionalTestBase):
def setup(self):
super(TestCustomAdminController, self).setup()
self.admin = factories.User(name='adminuser', sysadmin=True)
self.editor = factories.User(name='editor')
self.user = factories.User(name='user')
self.organization = factories.Organization(
name='mapaction', user=self.admin)
self.site_url = config.get("ckan.site_url")
self.app = self._get_test_app()
def test_editor_user(self):
context = {'ignore_auth': True}
helpers.call_action(
'organization_member_create',
context,
id=self.organization['id'],
username=self.editor['name'],
role='editor'
)
env = {'REMOTE_USER': self.editor['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
)
assert "Purge" in response.body
assert response.status_int == 200
def test_admin_user(self):
env = {'REMOTE_USER': self.admin['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
)
assert "Purge" in response.body
assert response.status_int == 200
def test_random_user(self):
env = {'REMOTE_USER': self.user['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
expect_errors=True
)
assert response.status_int == 403
def test_no_user(self):
"""Test without a logged in user"""
env = {'REMOTE_USER': ''}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
expect_errors=True
)
assert response.status_int == 403
|
from ckan.plugins.toolkit import config
import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
from ckan.plugins import load
class TestCustomAdminController(helpers.FunctionalTestBase):
def setup(self):
super(TestCustomAdminController, self).setup()
self.admin = factories.User(name='adminuser', sysadmin=True)
self.editor = factories.User(name='editor')
self.user = factories.User(name='user')
self.organization = factories.Organization(
name='mapaction', user=self.admin)
self.site_url = config.get("ckan.site_url")
self.app = self._get_test_app()
def test_editor_user(self):
context = {'ignore_auth': True}
helpers.call_action(
'organization_member_create',
context,
id=self.organization['id'],
username=self.editor['name'],
role='editor'
)
env = {'REMOTE_USER': self.editor['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
)
assert "Purge" in response.body
assert response.status_int == 200
def test_admin_user(self):
env = {'REMOTE_USER': self.admin['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
)
assert "Purge" in response.body
assert response.status_int == 200
def test_random_user(self):
env = {'REMOTE_USER': self.user['name'].encode('utf-8')}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
expect_errors=True
)
assert response.status_int == 403
def test_no_user(self):
"""Test without a loged in user"""
env = {'REMOTE_USER': ''}
response = self.app.get(
url='%s/ckan-admin/trash' % self.site_url,
extra_environ=env,
expect_errors=True
)
assert response.status_int == 403
|
agpl-3.0
|
Python
|
3ae496284e86815304736196bd66052fbfc9b81d
|
Support 'I Want you'
|
litrin/YoClient
|
YoClient.py
|
YoClient.py
|
#!/usr/bin/env python
import httplib
import urllib
class YoClient:
Host = 'api.justyo.co'
Port = 80
NoticeAPI = '/yo/'
BroadcastAPI = '/yoall/'
Headers = {'Cache-Control': 'no-cache',
'Content-Type': 'application/x-www-form-urlencoded'}
#Proxy = 'PROXY-HOSTNAME:PORT'
Proxy = None
Token = '' #Parse your token here
Error = None
link = None
def notice(self, username, link=None):
username = username.upper()
self.setLink(link)
param = {
'username' : username,
'api_token' : self.Token,
}
if self.link is not None:
param['link'] = self.link
return self._action(self.NoticeAPI, param)
def broadcast(self, link=None):
self.setLink(link)
param = { 'api_token' : self.Token }
if self.link is not None:
param['link'] = self.link
return self._action(self.BroadcastAPI, param)
def setLink(self, link):
if link is not None:
self.link = link
def _action(self, API, param):
param = urllib.urlencode(param)
if self.Proxy is not None:
conn = httplib.HTTPConnection(self.Proxy)
API = 'http://' + self.Host + API
else:
conn = httplib.HTTPConnection(host=self.Host, port=self.Port)
conn.request("POST", API, param, self.Headers)
result = conn.getresponse()
status = result.status / 100 == 2
if not status:
self.Error = result.read()
conn.close()
return status
class IWantYo(YoClient):
_imageGenratorUrl = 'http://www.hetemeel.com/unclesamshow.php'
def setLink(self, text):
YoClient.setLink(self, self._genrateUrl(text))
def _genrateUrl(self, text):
return "%s?%s" % (self._imageGenratorUrl, urllib.urlencode({'text' : text}))
setText = setLink
if __name__ == '__main__':
import sys
conn = IWantYo()
link = 'https://github.com/litrin/YoClient'
if len(sys.argv) > 1:
username = sys.argv[1]
status = conn.notice(username, link)
else:
status = conn.broadcast(link)
if (status): exit(0)
exit(1)
|
#!/usr/bin/env python
import httplib
import urllib
class YoClient:
Host = 'api.justyo.co'
Port = 80
NoticeAPI = '/yo/'
BroadcastAPI = '/yoall/'
Headers = {'Cache-Control': 'no-cache',
'Content-Type': 'application/x-www-form-urlencoded'}
#Proxy = 'PROXY-HOSTNAME:PORT'
Proxy = None
Token = '' #Parse your token here
Error = None
link = None
def notice(self, username, link=None):
username = username.upper()
self.setLink(link)
param = {
'username' : username,
'api_token' : self.Token,
}
if self.link is not None:
param['link'] = self.link
return self._action(self.NoticeAPI, param)
def broadcast(self, link=None):
self.setLink(link)
param = { 'api_token' : self.Token }
if self.link is not None:
param['link'] = self.link
return self._action(self.BroadcastAPI, param)
def setLink(self, link):
if link is not None:
self.link = link
def _action(self, API, param):
param = urllib.urlencode(param)
if self.Proxy is not None:
conn = httplib.HTTPConnection(self.Proxy)
API = 'http://' + self.Host + API
else:
conn = httplib.HTTPConnection(host=self.Host, port=self.Port)
conn.request("POST", API, param, self.Headers)
result = conn.getresponse()
status = result.status / 100 == 2
if not status:
self.Error = result.read()
conn.close()
return status
if __name__ == '__main__':
import sys
conn = YoClient()
link = 'https://github.com/litrin/YoClient'
if len(sys.argv) > 1:
username = sys.argv[1]
status = conn.notice(username, link)
else:
status = conn.broadcast(link)
if (status): exit(0)
exit(1)
|
bsd-2-clause
|
Python
|
9ccbc97652db1b7e6c7888b783722eee9f438104
|
make cbpro visible to tests
|
danpaquin/GDAX-Python,danpaquin/coinbase-gdax-python
|
__init__.py
|
__init__.py
|
# for tests
from cbpro.authenticated_client import AuthenticatedClient
from cbpro.public_client import PublicClient
|
mit
|
Python
|
|
020015cccceb3c2391c4764ee2ec29dfc5c461c6
|
Update plugin's register functions to return the object instance instead of performing the registration themselves
|
Curahelper/Cura,bq/Ultimaker-Cura,ad1217/Cura,bq/Ultimaker-Cura,senttech/Cura,lo0ol/Ultimaker-Cura,quillford/Cura,derekhe/Cura,ynotstartups/Wanhao,markwal/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,DeskboxBrazil/Cura,ynotstartups/Wanhao,totalretribution/Cura,ad1217/Cura,fieldOfView/Cura,quillford/Cura,fxtentacle/Cura,derekhe/Cura,hmflash/Cura,DeskboxBrazil/Cura,fieldOfView/Cura,totalretribution/Cura,Curahelper/Cura,markwal/Cura,fxtentacle/Cura,hmflash/Cura
|
__init__.py
|
__init__.py
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
return LayerView.LayerView()
|
from . import LayerView
def getMetaData():
return { "name": "LayerView", "type": "View" }
def register(app):
app.getController().addView("LayerView", LayerView.LayerView())
|
agpl-3.0
|
Python
|
90656a3b4eedac9ae87dbdb5485994c58c2f78d9
|
add pydq
|
Pyiner/pydq
|
__init__.py
|
__init__.py
|
# -*- coding: utf-8 -*-
import six
__title__ = 'pydq'
__version__ = '0.0.1'
__author__ = 'Pyiner'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Pyiner'
__all__ = ['DataQuery']
class DataQuery(object):
def __init__(self, data):
self.data = data
@staticmethod
def item_exist(item, **kwargs):
exist = True
for k, v in kwargs.items():
if k not in item or item[k] != v:
exist = False
break
return exist
def query(self, negate, **kwargs):
d = []
for item in self.data:
exist = self.item_exist(item, **kwargs)
if exist is negate:
d.append(item)
return self.__class__(data=d)
def filter(self, **kwargs):
return self.query(True, **kwargs)
def exclude(self, **kwargs):
return self.query(False, **kwargs)
def order_by(self, field):
desc = field.startswith('-')
field = field.strip('-')
d = sorted(self.data, key=lambda x: x[field], reverse=desc)
return self.__class__(data=d)
def __iter__(self):
return self.data
def __getitem__(self, k):
if not isinstance(k, (slice,) + six.integer_types):
raise TypeError
data = self.data
if isinstance(k, slice):
return data[k.start:k.stop:k.step]
return data[k]
if __name__ == '__main__':
xdata = [{
'a': 1,
'b': 2,
'c': 3
}, {
'a': 2,
'b': 1,
'c': 3
}, {
'a': 3,
'b': 2,
'c': 1
}]
dq = DataQuery(xdata)
for i in dq.filter(c=1):
print i
|
# -*- coding: utf-8 -*-
import six
__title__ = 'requests'
__version__ = '0.0.1'
__author__ = 'Pyiner'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Pyiner'
__all__ = ['DataQuery']
class DataQuery(object):
def __init__(self, data):
self.data = data
@staticmethod
def item_exist(item, **kwargs):
exist = True
for k, v in kwargs.items():
if k not in item or item[k] != v:
exist = False
break
return exist
def query(self, negate, **kwargs):
d = []
for item in self.data:
exist = self.item_exist(item, **kwargs)
if exist is negate:
d.append(item)
return self.__class__(data=d)
def filter(self, **kwargs):
return self.query(True, **kwargs)
def exclude(self, **kwargs):
return self.query(False, **kwargs)
def order_by(self, field):
desc = field.startswith('-')
field = field.strip('-')
d = sorted(self.data, key=lambda x: x[field], reverse=desc)
return self.__class__(data=d)
def __iter__(self):
return self.data
def __getitem__(self, k):
if not isinstance(k, (slice,) + six.integer_types):
raise TypeError
data = self.data
if isinstance(k, slice):
return data[k.start:k.stop:k.step]
return data[k]
if __name__ == '__main__':
xdata = [{
'a': 1,
'b': 2,
'c': 3
}, {
'a': 2,
'b': 1,
'c': 3
}, {
'a': 3,
'b': 2,
'c': 1
}]
dq = DataQuery(xdata)
for i in dq.filter(c=1):
print i
|
apache-2.0
|
Python
|
b186ed26e3250d8b02c94f5bb3b394c35986bcf6
|
Remove an import which snuck in but does not belong.
|
platipy/spyral
|
__init__.py
|
__init__.py
|
"""
Spyral, an awesome library for making games.
"""
__version__ = '0.1.1'
__license__ = 'MIT'
__author__ = 'Robert Deaton'
import compat
import memoize
import point
import camera
import sprite
import scene
import _lib
import event
import animator
import animation
import pygame
import image
import color
import rect
import signal
import vector
Scene = scene.Scene
Image = image.Image
Sprite = sprite.Sprite
Group = sprite.Group
AnimationSprite = animation.AnimationSprite
AnimationGroup = animation.AnimationGroup
Rect = rect.Rect
Signal = signal.Signal
Vec2D = vector.Vec2D
keys = event.keys
director = scene.Director()
def init():
event.init()
pygame.init()
pygame.font.init()
def quit():
pygame.quit()
director._stack = []
|
"""
Spyral, an awesome library for making games.
"""
__version__ = '0.1.1'
__license__ = 'MIT'
__author__ = 'Robert Deaton'
import compat
import memoize
import point
import camera
import sprite
import scene
import _lib
import event
import animator
import animation
import pygame
import image
import color
import rect
import signal
import font
import vector
Scene = scene.Scene
Image = image.Image
Sprite = sprite.Sprite
Group = sprite.Group
AnimationSprite = animation.AnimationSprite
AnimationGroup = animation.AnimationGroup
Rect = rect.Rect
Signal = signal.Signal
Vec2D = vector.Vec2D
keys = event.keys
director = scene.Director()
def init():
event.init()
pygame.init()
pygame.font.init()
def quit():
pygame.quit()
director._stack = []
|
lgpl-2.1
|
Python
|
354738da43c25e7f0de5f439f065b823bb97a413
|
sort by importance
|
cr33dog/pyxfce,cr33dog/pyxfce,cr33dog/pyxfce
|
__init__.py
|
__init__.py
|
__all__ = [ "gui", "mcs", "netk", "panel", "util" ]
import util
import gui
import netk
import mcs
import panel
|
__all__ = [ "gui", "mcs", "netk", "panel", "util" ]
import gui
import mcs
import netk
import panel
import util
|
bsd-3-clause
|
Python
|
c4a4c9333c874b38c121ce1181c12e7ed5aacc88
|
Add __init__.py
|
m4rx9/shellgraphics
|
__init__.py
|
__init__.py
|
from shellgraphics import *
|
from ShellGraphics import *
|
lgpl-2.1
|
Python
|
7bee2061fc0609205bb81dc781efbcd833ca74bb
|
Add save() interface on Workbook
|
kz26/PyExcelerate
|
Workbook.py
|
Workbook.py
|
import Worksheet
from Writer import Writer
class Workbook(object):
def __init__(self, encoding='utf-8'):
self._worksheets = []
self._encoding = encoding
self._writer = Writer(self)
def add_sheet(self, worksheet):
self._worksheets.append(worksheet)
def new_sheet(self, sheet_name):
worksheet = Worksheet.Worksheet(sheet_name, self)
self._worksheets.append(worksheet)
return worksheet
def get_xml_data(self):
for index, ws in enumerate(self._worksheets, 1):
yield (i, ws)
def save(self, output_filename):
self._writer.save(output_filename)
|
import Worksheet
class Workbook(object):
def __init__(self, encoding='utf-8'):
self._worksheets = []
self._encoding = encoding
def add_sheet(self, worksheet):
self._worksheets.append(worksheet)
def new_sheet(self, sheet_name):
worksheet = Worksheet.Worksheet(sheet_name, self)
self._worksheets.append(worksheet)
return worksheet
def get_xml_data(self):
for index, ws in enumerate(self._worksheets, 1):
yield (i, ws)
|
bsd-2-clause
|
Python
|
1ba0a6f93683a0e8f020abe5f34624db33c3a470
|
Update the package version
|
jason0x43/jcalfred
|
__init__.py
|
__init__.py
|
'''Utility classes and functions for creating Alfred workflows'''
__version__ = '20130913.0'
from .alfred import Workflow, WorkflowInfo, Item, JsonFile, LINE
from .keychain import Keychain
|
'''Utility classes and functions for creating Alfred workflows'''
__version__ = '20130909.0'
from .alfred import Workflow, WorkflowInfo, Item, JsonFile, LINE
from .keychain import Keychain
|
mit
|
Python
|
8df7fecb367d27d59402797ecfb9c13ba4f438b7
|
Fix reference to DEFAULT_USER_INTERFACE
|
Ghostkeeper/Luna
|
__main__.py
|
__main__.py
|
#!/usr/bin/env python
#This is free and unencumbered software released into the public domain.
#
#Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
#software, either in source code form or as a compiled binary, for any purpose,
#commercial or non-commercial, and by any means.
#
#In jurisdictions that recognise copyright laws, the author or authors of this
#software dedicate any and all copyright interest in the software to the public
#domain. We make this dedication for the benefit of the public at large and to
#the detriment of our heirs and successors. We intend this dedication to be an
#overt act of relinquishment in perpetuity of all present and future rights to
#this software under copyright law.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
#ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
#WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#For more information, please refer to <https://unlicense.org/>.
"""
Provides a base class for the application, and then starts the application.
"""
import os #For finding the root directory of Luna.
import sys #For reading command line arguments.
import luna.plugins #To initiate the plug-in loading and use the APIs.
class Luna:
"""
Base instance of the application.
"""
DEFAULT_USER_INTERFACE = "automatic"
"""
The default user interface to start with, unless instructed otherwise.
If this user interface does not exist, an error is thrown and the
application closes.
"""
def run(self):
"""
.. function:: run()
Starts the application.
This process will start the plug-in registering, and then selects a user
interface based on the command line arguments.
:returns: ``True`` if the application was finished successfully, or ``False`` if something went wrong.
"""
base_dir = os.path.dirname(os.path.abspath(__file__)) #Add the plugin directories.
luna.plugins.add_plugin_location(os.path.join(base_dir, "plugins"))
luna.plugins.discover()
logger = luna.plugins.api("logger")
logger.set_log_levels([logger.Level.ERROR, logger.Level.CRITICAL, logger.Level.WARNING, logger.Level.INFO, logger.Level.DEBUG])
user_interface_name = self.DEFAULT_USER_INTERFACE
if len(sys.argv) >= 2:
user_interface_name = sys.argv[1]
try:
if not luna.plugins.api("userinterface").exists(user_interface_name):
logger.error("Could not load the user interface {userinterface}. Aborting.", userinterface=user_interface_name)
return False
except ImportError:
logger.error("Could not load the user interface plug-in type. Aborting.")
return False
luna.plugins.api("userinterface").start(user_interface_name)
luna.plugins.api("userinterface").join(user_interface_name)
return True #Success.
#Launches Luna if called from the command line.
if __name__ == "__main__":
_application = Luna()
_application.run()
|
#!/usr/bin/env python
#This is free and unencumbered software released into the public domain.
#
#Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
#software, either in source code form or as a compiled binary, for any purpose,
#commercial or non-commercial, and by any means.
#
#In jurisdictions that recognise copyright laws, the author or authors of this
#software dedicate any and all copyright interest in the software to the public
#domain. We make this dedication for the benefit of the public at large and to
#the detriment of our heirs and successors. We intend this dedication to be an
#overt act of relinquishment in perpetuity of all present and future rights to
#this software under copyright law.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
#ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
#WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#For more information, please refer to <https://unlicense.org/>.
"""
Provides a base class for the application, and then starts the application.
"""
import os #For finding the root directory of Luna.
import sys #For reading command line arguments.
import luna.plugins #To initiate the plug-in loading and use the APIs.
class Luna:
"""
Base instance of the application.
"""
DEFAULT_USER_INTERFACE = "automatic"
"""
The default user interface to start with, unless instructed otherwise.
If this user interface does not exist, an error is thrown and the
application closes.
"""
def run(self):
"""
.. function:: run()
Starts the application.
This process will start the plug-in registering, and then selects a user
interface based on the command line arguments.
:returns: ``True`` if the application was finished successfully, or ``False`` if something went wrong.
"""
base_dir = os.path.dirname(os.path.abspath(__file__)) #Add the plugin directories.
luna.plugins.add_plugin_location(os.path.join(base_dir, "plugins"))
luna.plugins.discover()
logger = luna.plugins.api("logger")
logger.set_log_levels([logger.Level.ERROR, logger.Level.CRITICAL, logger.Level.WARNING, logger.Level.INFO, logger.Level.DEBUG])
user_interface_name = self.DEFAULT_INTERFACE
if len(sys.argv) >= 2:
user_interface_name = sys.argv[1]
try:
if not luna.plugins.api("userinterface").exists(user_interface_name):
logger.error("Could not load the user interface {userinterface}. Aborting.", userinterface=user_interface_name)
return False
except ImportError:
logger.error("Could not load the user interface plug-in type. Aborting.")
return False
luna.plugins.api("userinterface").start(user_interface_name)
luna.plugins.api("userinterface").join(user_interface_name)
return True #Success.
#Launches Luna if called from the command line.
if __name__ == "__main__":
_application = Luna()
_application.run()
|
cc0-1.0
|
Python
|
0b1b1ef6d3af248021125135b7534b3870517890
|
Add tasks urls
|
toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile
|
task/urls.py
|
task/urls.py
|
from django.conf.urls import include, url
from views import *
from rest_framework_jwt.views import obtain_jwt_token, refresh_jwt_token, verify_jwt_token
urlpatterns = [
]
|
apache-2.0
|
Python
|
|
85610d7e69abe8f75c9f64ede243dfaeb97b5601
|
Update spider.py
|
mascalx/PiSpider,mascalx/PiSpider
|
script/spider.py
|
script/spider.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import thread, time, cv2
import numpy as np
from gpiozero import Motor, PWMLED
# Eyelib can be managed by following variables:
# blinking : If True start a blink animation (1 cycle)
# blinkspd : Speed of blinking
# eyeangle :Ddirection of view
# eyedistance : Distance of pupil from center
# eyelid : Lid aperture (0=full open, 100=full close)
# autoblink : If True blinking is automatic
# eye : Eye image
# lid : Lid base image
import eyelib
# Last dewarped frame can be accessed by using variable "panorama"
# A copy should be used to access data in order to avoid in-between garbage
import dewarp
# Constants
M_FWD = 0 # GPIO pin for forward movement
M_BWD = 0 # GPIO pin for backward movement
M_CKW = 0 # GPIO pin for clockwise rotation
M_CCW = 0 # GPIO pin for counterclockwise rotation
BLIGHT = 0 # GPIO pin for TFT backlight control
ANG_SPD = 0 # Angular speed for head rotation
Facing = 0 # Current direction (approximate)
# Actuators creation
motor = Motor(M_FWD, M_BWD, pwm=True)
head = Motor(M_CKW, M_CCW, pwm=True)
backlight = PWMLED(BLIGHT)
# Move the spider forward or backward. Speed -1..0 = backward, 0..1 = forward
def Move(spd):
if (spd>0):
motor.forward(spd)
else:
motor.backward(abs(spd))
if (spd==0):
motor.stop()
# Rotate the head. Angle is the approximate rotation (0..360). Speed -1..0 = counterclockwise, 0..1 = clockwise
def Rotate(angle,spd):
global Facing
if (spd>0):
head.forward(spd)
else:
head.backward(abs(spd))
if (spd<>0):
time.sleep(ANG_SPD(angle/abs(spd)))
head.stop()
Facing=(Facing-(angle*(np.sign(spd))))%360
# Returns the angles of the brightest spot in the panoramic frame (x = horizontal, y = vertical)
def FindBrightestSpot(img):
gray = cv2.cvtColor(dewarp.panorama, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (11, 11), 0)
minVal, maxVal, minLoc, maxLoc = cv2.minMaxLoc(gray)
X = ((dewarp.Wd-maxLoc[0])*360)/dewarp.Wd # Offset to be added to fix start position of the panorama
Y = ((dewarp.Hd-maxLoc[0])*30)/dewarp.Hd # Should be fixed after computing the right vertical FOV of the panorama
return X,Y
# Main program
if __name__ == '__main__':
backlight.value=1 # Start with backlight at full brightness
motor.stop() # Be sure the robot is not moving
head.stop() # Be sure the robot is not moving
thread.start_new_thread(Eye, ()) # Eye thread
thread.start_new_thread(UnWarp, ()) # Unwarping thread
while True: # Loop forever
pass
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import thread, time, cv2
import numpy as np
from gpiozero import Motor, PWMLED
# Eyelib can be managed by following variables:
# blinking : If True start a blink animation (1 cycle)
# blinkspd : Speed of blinking
# eye : Eye image
# lid : Lid base image
import eyelib
# Last dewarped frame can be accessed by using variable "panorama"
# A copy should be used to access data in order to avoid in-between garbage
import dewarp
# Constants
M_FWD = 0 # GPIO pin for forward movement
M_BWD = 0 # GPIO pin for backward movement
M_CKW = 0 # GPIO pin for clockwise rotation
M_CCW = 0 # GPIO pin for counterclockwise rotation
BLIGHT = 0 # GPIO pin for TFT backlight control
ANG_SPD = 0 # Angular speed for head rotation
Facing = 0 # Current direction (approximate)
# Actuators creation
motor = Motor(M_FWD, M_BWD, pwm=True)
head = Motor(M_CKW, M_CCW, pwm=True)
backlight = PWMLED(BLIGHT)
# Move the spider forward or backward. Speed -1..0 = backward, 0..1 = forward
def Move(spd):
if (spd>0):
motor.forward(spd)
else:
motor.backward(abs(spd))
if (spd==0):
motor.stop()
# Rotate the head. Angle is the approximate rotation (0..360). Speed -1..0 = counterclockwise, 0..1 = clockwise
def Rotate(angle,spd):
global Facing
if (spd>0):
head.forward(spd)
else:
head.backward(abs(spd))
if (spd<>0):
time.sleep(ANG_SPD(angle/abs(spd)))
head.stop()
Facing=(Facing-(angle*(np.sign(spd))))%360
# Returns the angles of the brightest spot in the panoramic frame (x = horizontal, y = vertical)
def FindBrightestSpot(img):
gray = cv2.cvtColor(dewarp.panorama, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (11, 11), 0)
minVal, maxVal, minLoc, maxLoc = cv2.minMaxLoc(gray)
X = ((dewarp.Wd-maxLoc[0])*360)/dewarp.Wd # Offset to be added to fix start position of the panorama
Y = ((dewarp.Hd-maxLoc[0])*30)/dewarp.Hd # Should be fixed after computing the right vertical FOV of the panorama
return X,Y
# Main program
if __name__ == '__main__':
backlight.value=1 # Start with backlight at full brightness
motor.stop() # Be sure the robot is not moving
head.stop() # Be sure the robot is not moving
thread.start_new_thread(Eye, ()) # Eye thread
thread.start_new_thread(UnWarp, ()) # Unwarping thread
while True: # Loop forever
pass
|
apache-2.0
|
Python
|
f37f556ed497cf9c69f780290465610216ed6f5c
|
refactor ProfileViewTestCase: add setUp inherited by super class
|
jupiny/EnglishDiary,jupiny/EnglishDiary,jupiny/EnglishDiary
|
english_diary/profiles/tests/test_views.py
|
english_diary/profiles/tests/test_views.py
|
from django.test import Client
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
import datetime
from core.tests.base import BaseTestCase
from profiles.utils import set_expiration_date
class ProfileViewTestCase(BaseTestCase):
def setUp(self):
super(ProfileViewTestCase, self).setUp()
self.client = Client()
def test_verify_user_verification_key(self):
response = self.client.get(
reverse(
"profiles:email_verification",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
follow=True,
)
self.assertEqual(
response.status_code,
200,
)
self.assertRedirects(
response,
"/signin/?next=/",
)
self.assertTrue(
get_user_model().objects.last().is_verified,
)
def test_user_verification_key_expires(self):
# Make verification_key expire
self.user.profile.key_expires = set_expiration_date(-1)
self.user.profile.save()
response = self.client.get(
reverse(
"profiles:email_verification",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
follow=True,
)
self.assertTrue(
get_user_model().objects.last().profile.is_expired_key,
)
self.assertEqual(
response.status_code,
200,
)
self.assertRedirects(
response,
reverse(
"profiles:key_expires",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
)
|
from django.test import Client
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
import datetime
from core.tests.base import BaseTestCase
from profiles.utils import set_expiration_date
class ProfileViewTestCase(BaseTestCase):
def test_verify_user_verification_key(self):
client = Client()
response = client.get(
reverse(
"profiles:email_verification",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
follow=True,
)
self.assertEqual(
response.status_code,
200,
)
self.assertRedirects(
response,
"/signin/?next=/",
)
self.assertTrue(
get_user_model().objects.last().is_verified,
)
def test_user_verification_key_expires(self):
# Make verification_key expire
self.user.profile.key_expires = set_expiration_date(-1)
self.user.profile.save()
client = Client()
response = client.get(
reverse(
"profiles:email_verification",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
follow=True,
)
self.assertTrue(
get_user_model().objects.last().profile.is_expired_key,
)
self.assertEqual(
response.status_code,
200,
)
self.assertRedirects(
response,
reverse(
"profiles:key_expires",
kwargs={
"verification_key": self.user.profile.verification_key,
}
),
)
|
mit
|
Python
|
ce45df98d7fbd9b2679d20ac21a188a18295b667
|
remove debug print
|
hkernbach/arangodb,joerg84/arangodb,joerg84/arangodb,Simran-B/arangodb,Simran-B/arangodb,hkernbach/arangodb,hkernbach/arangodb,hkernbach/arangodb,joerg84/arangodb,Simran-B/arangodb,arangodb/arangodb,graetzer/arangodb,Simran-B/arangodb,joerg84/arangodb,hkernbach/arangodb,Simran-B/arangodb,joerg84/arangodb,wiltonlazary/arangodb,arangodb/arangodb,wiltonlazary/arangodb,fceller/arangodb,fceller/arangodb,graetzer/arangodb,arangodb/arangodb,joerg84/arangodb,fceller/arangodb,graetzer/arangodb,joerg84/arangodb,wiltonlazary/arangodb,hkernbach/arangodb,Simran-B/arangodb,graetzer/arangodb,graetzer/arangodb,wiltonlazary/arangodb,arangodb/arangodb,hkernbach/arangodb,joerg84/arangodb,joerg84/arangodb,arangodb/arangodb,fceller/arangodb,hkernbach/arangodb,Simran-B/arangodb,graetzer/arangodb,fceller/arangodb,wiltonlazary/arangodb,joerg84/arangodb,hkernbach/arangodb,joerg84/arangodb,wiltonlazary/arangodb,fceller/arangodb,graetzer/arangodb,fceller/arangodb,arangodb/arangodb,Simran-B/arangodb,joerg84/arangodb,graetzer/arangodb,hkernbach/arangodb,joerg84/arangodb,Simran-B/arangodb,wiltonlazary/arangodb,graetzer/arangodb,graetzer/arangodb,fceller/arangodb,graetzer/arangodb,hkernbach/arangodb,graetzer/arangodb,fceller/arangodb,wiltonlazary/arangodb,Simran-B/arangodb,hkernbach/arangodb,hkernbach/arangodb,graetzer/arangodb,fceller/arangodb,arangodb/arangodb,joerg84/arangodb,hkernbach/arangodb,arangodb/arangodb,graetzer/arangodb
|
3rdParty/V8/v5.7.0.0/gypfiles/gyp_v8.py
|
3rdParty/V8/v5.7.0.0/gypfiles/gyp_v8.py
|
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This file is (possibly, depending on python version) imported by
# gyp_v8 when GYP_PARALLEL=1 and it creates sub-processes through the
# multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for imports
# that don't end in .py (and aren't directories with an __init__.py). This
# wrapper makes "import gyp_v8" work with those old versions and makes it
# possible to execute gyp_v8.py directly on Windows where the extension is
# useful.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..'))
if ("-Dbyteorder=big" not in sys.argv and "-Dbyteorder=little" not in sys.argv):
sys.argv.append("-Dbyteorder=" + sys.byteorder)
sys.argv.append("-DPYTHON_EXECUTABLE=" + sys.executable)
sys.byteorder
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_v8'))
|
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This file is (possibly, depending on python version) imported by
# gyp_v8 when GYP_PARALLEL=1 and it creates sub-processes through the
# multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for imports
# that don't end in .py (and aren't directories with an __init__.py). This
# wrapper makes "import gyp_v8" work with those old versions and makes it
# possible to execute gyp_v8.py directly on Windows where the extension is
# useful.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..'))
if ("-Dbyteorder=big" not in sys.argv and "-Dbyteorder=little" not in sys.argv):
sys.argv.append("-Dbyteorder=" + sys.byteorder)
sys.argv.append("-DPYTHON_EXECUTABLE=" + sys.executable)
print("x"*80)
print(sys.argv)
sys.byteorder
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_v8'))
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.