commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
e103191a347baf446e76a12b9bc77986aa95dc2d
|
udata/__init__.py
|
udata/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
uData
'''
from __future__ import unicode_literals
__version__ = '1.2.11.dev'
__description__ = 'Open data portal'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
uData
'''
from __future__ import unicode_literals
__version__ = '1.3.0.dev'
__description__ = 'Open data portal'
|
Set working version to 1.3.0.dev
|
Set working version to 1.3.0.dev
|
Python
|
agpl-3.0
|
opendatateam/udata,opendatateam/udata,opendatateam/udata,etalab/udata,etalab/udata,etalab/udata
|
84e80ed4d36b102761e2c9b6900d46c4e126e9e6
|
main.py
|
main.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from google.cloud import error_reporting
from google.cloud import logging
from analysis import Analysis
from trading import Trading
from twitter import Twitter
# Analyzes Trump tweets, makes stock trades, and sends tweet alerts.
def twitter_callback(text, link):
companies = analysis.find_companies(text)
logger.log_text("Using companies: %s" % companies, severity="DEBUG")
if companies:
trading.make_trades(companies)
twitter.tweet(companies, link)
if __name__ == "__main__":
logger = logging.Client(use_gax=False).logger("main")
error_client = error_reporting.Client()
# Restart in a loop if there are any errors so we stay up.
while True:
logger.log_text("Starting new session.", severity="INFO")
analysis = Analysis()
trading = Trading()
twitter = Twitter(twitter_callback)
try:
twitter.start_streaming()
except Exception as exception:
error_client.report_exception()
logger.log_text("Exception on main thread: %s" % exception,
severity="ERROR")
finally:
logger.log_text("Ending session.", severity="INFO")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from google.cloud import error_reporting
from google.cloud import logging
from analysis import Analysis
from trading import Trading
from twitter import Twitter
# Analyzes Trump tweets, makes stock trades, and sends tweet alerts.
def twitter_callback(text, link):
# Initialize these here to create separate httplib2 instances per thread.
analysis = Analysis()
trading = Trading()
companies = analysis.find_companies(text)
logger.log_text("Using companies: %s" % companies, severity="DEBUG")
if companies:
trading.make_trades(companies)
twitter.tweet(companies, link)
if __name__ == "__main__":
logger = logging.Client(use_gax=False).logger("main")
error_client = error_reporting.Client()
# Restart in a loop if there are any errors so we stay up.
while True:
logger.log_text("Starting new session.", severity="INFO")
twitter = Twitter(twitter_callback)
try:
twitter.start_streaming()
except Exception as exception:
error_client.report_exception()
logger.log_text("Exception on main thread: %s" % exception,
severity="ERROR")
finally:
logger.log_text("Ending session.", severity="INFO")
|
Use separate instances of analysis and trading for each thread
|
Use separate instances of analysis and trading for each thread
|
Python
|
mit
|
maxbbraun/trump2cash
|
bd7ef1be82a6cd68060dee47046d90202b3a9e0c
|
tempest/api/volume/test_availability_zone.py
|
tempest/api/volume/test_availability_zone.py
|
# Copyright 2014 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest.lib import decorators
class AvailabilityZoneTestJSON(base.BaseVolumeTest):
"""Tests Availability Zone API List"""
@classmethod
def setup_clients(cls):
super(AvailabilityZoneTestJSON, cls).setup_clients()
cls.client = cls.availability_zone_client
@decorators.idempotent_id('01f1ae88-eba9-4c6b-a011-6f7ace06b725')
def test_get_availability_zone_list(self):
# List of availability zone
availability_zone = (self.client.list_availability_zones()
['availabilityZoneInfo'])
self.assertNotEmpty(availability_zone)
|
# Copyright 2014 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest.lib import decorators
class AvailabilityZoneTestJSON(base.BaseVolumeTest):
"""Tests Availability Zone API List"""
@decorators.idempotent_id('01f1ae88-eba9-4c6b-a011-6f7ace06b725')
def test_get_availability_zone_list(self):
# List of availability zone
availability_zone = (
self.availability_zone_client.list_availability_zones()
['availabilityZoneInfo'])
self.assertNotEmpty(availability_zone)
|
Remove unnecessary client alias in AvailabilityZoneTestJson
|
Remove unnecessary client alias in AvailabilityZoneTestJson
The class AvailabilityZoneTestJson is inherited from base.BaseVolumeTest,
and the latter has already declared the availability_zone_client. This
patch removes the unnecessary client alias for availability_zone_client.
Change-Id: I287d742087a72928774325681bb70837ecad72f7
|
Python
|
apache-2.0
|
masayukig/tempest,masayukig/tempest,Juniper/tempest,Juniper/tempest,openstack/tempest,cisco-openstack/tempest,cisco-openstack/tempest,openstack/tempest
|
bbfb09205974efa969fc636b6e1079a84dad3619
|
mcstatus/application.py
|
mcstatus/application.py
|
from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
try:
query = MinecraftQuery("mc.voltaire.sh", 25565, 10, 3)
basicQuery = query.get_status()
fullQuery = query.get_rules()
except socket.error as e:
if not options.quiet:
return "Server is down or unreachable:\n" + e.message
if not options.quiet:
numOnline = 'The server has %d players filling %d total slots. There are %d free slots.' % (basicQuery['numplayers'], basicQuery['maxplayers'], basicQuery['maxplayers'] - basic_status['numplayers'])
playersOnline = 'Online now: %s' % (fullQuery['players'])
return numOnline + "\n" + playersOnline
return "ermahgerd"
if __name__ == '__main__':
app.run()
|
from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
query = MinecraftQuery("142.54.162.42", 25565)
basic_status = query.get_status()
all_status = query.get_rules()
server_info = 'The server has %d / %d players.' % (basic_status['numplayers'], basic_status['maxplayers'])
status_info = 'Online now: %s' % (all_status['players'])
return "<pre>" + server_info + "\n" + status_info + "</pre>"
if __name__ == '__main__':
app.run()
|
Revert "check for connection failure"
|
Revert "check for connection failure"
This reverts commit cf4bd49e150f5542a5a7abba908ca81ebe1b9e75.
|
Python
|
bsd-3-clause
|
voltaire/minecraft-site-old,voltaire/minecraft-site-old
|
906247c6431b85c90f8aec8a7f4f73f1064abeba
|
mezzanine/pages/urls.py
|
mezzanine/pages/urls.py
|
from django.conf.urls.defaults import patterns, url
# Page patterns.
urlpatterns = patterns("mezzanine.pages.views",
url("^admin_page_ordering/$", "admin_page_ordering",
name="admin_page_ordering"),
url("^(?P<slug>.*)/$", "page", name="page"),
)
|
from django.conf.urls.defaults import patterns, url
from django.conf import settings
# Page patterns.
urlpatterns = patterns("mezzanine.pages.views",
url("^admin_page_ordering/$", "admin_page_ordering",
name="admin_page_ordering"),
url("^(?P<slug>.*)" + ("/" if settings.APPEND_SLASH else "") + "$", "page", name="page"),
)
|
Use Page URLs without trailing slash when settings.APPEND_SLASH is False
|
Use Page URLs without trailing slash when settings.APPEND_SLASH is False
|
Python
|
bsd-2-clause
|
biomassives/mezzanine,AlexHill/mezzanine,spookylukey/mezzanine,frankchin/mezzanine,saintbird/mezzanine,adrian-the-git/mezzanine,jjz/mezzanine,agepoly/mezzanine,geodesign/mezzanine,jerivas/mezzanine,saintbird/mezzanine,wyzex/mezzanine,Skytorn86/mezzanine,industrydive/mezzanine,ryneeverett/mezzanine,eino-makitalo/mezzanine,theclanks/mezzanine,mush42/mezzanine,tuxinhang1989/mezzanine,orlenko/sfpirg,sjuxax/mezzanine,theclanks/mezzanine,mush42/mezzanine,orlenko/plei,orlenko/plei,frankier/mezzanine,molokov/mezzanine,dustinrb/mezzanine,Skytorn86/mezzanine,promil23/mezzanine,wrwrwr/mezzanine,dekomote/mezzanine-modeltranslation-backport,viaregio/mezzanine,stephenmcd/mezzanine,biomassives/mezzanine,ZeroXn/mezzanine,promil23/mezzanine,tuxinhang1989/mezzanine,dovydas/mezzanine,stephenmcd/mezzanine,nikolas/mezzanine,christianwgd/mezzanine,promil23/mezzanine,eino-makitalo/mezzanine,theclanks/mezzanine,orlenko/plei,sjdines/mezzanine,wbtuomela/mezzanine,jerivas/mezzanine,wbtuomela/mezzanine,wrwrwr/mezzanine,Cajoline/mezzanine,sjuxax/mezzanine,frankchin/mezzanine,SoLoHiC/mezzanine,scarcry/snm-mezzanine,AlexHill/mezzanine,Kniyl/mezzanine,gbosh/mezzanine,batpad/mezzanine,molokov/mezzanine,orlenko/sfpirg,readevalprint/mezzanine,webounty/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,sjdines/mezzanine,sjuxax/mezzanine,SoLoHiC/mezzanine,douglaskastle/mezzanine,fusionbox/mezzanine,ryneeverett/mezzanine,Kniyl/mezzanine,christianwgd/mezzanine,wyzex/mezzanine,emile2016/mezzanine,scarcry/snm-mezzanine,readevalprint/mezzanine,douglaskastle/mezzanine,joshcartme/mezzanine,damnfine/mezzanine,gradel/mezzanine,Cicero-Zhao/mezzanine,cccs-web/mezzanine,damnfine/mezzanine,damnfine/mezzanine,wyzex/mezzanine,dovydas/mezzanine,webounty/mezzanine,Cajoline/mezzanine,vladir/mezzanine,frankier/mezzanine,douglaskastle/mezzanine,Kniyl/mezzanine,dustinrb/mezzanine,orlenko/sfpirg,PegasusWang/mezzanine,dovydas/mezzanine,emile2016/mezzanine,sjdines/mezzanine,SoLoHiC/mezzanine,emile2016/mezzanine,jjz/mezzanine,readevalprint/mezzanine,gbosh/mezzanine,wbtuomela/mezzanine,agepoly/mezzanine,Cicero-Zhao/mezzanine,fusionbox/mezzanine,stbarnabas/mezzanine,ZeroXn/mezzanine,webounty/mezzanine,viaregio/mezzanine,dsanders11/mezzanine,scarcry/snm-mezzanine,eino-makitalo/mezzanine,adrian-the-git/mezzanine,nikolas/mezzanine,jerivas/mezzanine,dekomote/mezzanine-modeltranslation-backport,geodesign/mezzanine,Cajoline/mezzanine,ryneeverett/mezzanine,frankier/mezzanine,joshcartme/mezzanine,vladir/mezzanine,stbarnabas/mezzanine,mush42/mezzanine,dustinrb/mezzanine,jjz/mezzanine,PegasusWang/mezzanine,frankchin/mezzanine,dekomote/mezzanine-modeltranslation-backport,gradel/mezzanine,tuxinhang1989/mezzanine,saintbird/mezzanine,viaregio/mezzanine,spookylukey/mezzanine,nikolas/mezzanine,industrydive/mezzanine,PegasusWang/mezzanine,joshcartme/mezzanine,agepoly/mezzanine,gradel/mezzanine,stephenmcd/mezzanine,geodesign/mezzanine,adrian-the-git/mezzanine,biomassives/mezzanine,industrydive/mezzanine,cccs-web/mezzanine,vladir/mezzanine,batpad/mezzanine,dsanders11/mezzanine,gbosh/mezzanine,ZeroXn/mezzanine,spookylukey/mezzanine,christianwgd/mezzanine,dsanders11/mezzanine
|
a9a5b9007f44495806bbdb876553974cd2d44e04
|
src/libpepper/utils/type_is.py
|
src/libpepper/utils/type_is.py
|
# Copyright (C) 2013 Andy Balaam and The Pepper Developers
# Released under the MIT License. See the file COPYING.txt for details.
# I assure you before God that what I am writing to you is no lie.
# Galations 1 v20
import types
import abc
def type_is( typ, inst ):
"""
Check that instance is of (exact) type typ.
Throws an Assertion error if not.
The arguments order is supposed to be reminiscent of C/Java style
function declarations.
"""
# Arguably, we should allow duck typing to take its course
# here - there could be some other metaclass we need to
# add later, but I prefer the easier-to-understand error
# message we get by checking the type.
if not (
type(typ) == type or
type(typ) == types.ClassType or
type(typ) == abc.ABCMeta
):
raise AssertionError(
"Wrong arguments to type_is: the first argument must be a " +
"class or type, not %s" % typ.__class__.__name__
)
if inst.__class__ != typ:
raise AssertionError(
"type_is check failed: expected a %s but found a %s." % (
typ.__name__, inst.__class__.__name__ ) )
|
# Copyright (C) 2013 Andy Balaam and The Pepper Developers
# Released under the MIT License. See the file COPYING.txt for details.
# I assure you before God that what I am writing to you is no lie.
# Galations 1 v20
import types
import abc
def type_is( typ, inst ):
"""
Check that inst is of (exact) type typ.
Throws an Assertion error if not.
The arguments order is supposed to be reminiscent of C/Java style
function declarations.
"""
# Arguably, we should allow duck typing to take its course
# here - there could be some other metaclass we need to
# add later, but I prefer the easier-to-understand error
# message we get by checking the type.
if not (
type(typ) == type or
type(typ) == types.ClassType or
type(typ) == abc.ABCMeta
):
raise AssertionError(
"Wrong arguments to type_is: the first argument must be a " +
"class or type, not %s" % typ.__class__.__name__
)
if inst.__class__ != typ:
raise AssertionError(
"type_is check failed: expected a %s but found a %s." % (
typ.__name__, inst.__class__.__name__ ) )
|
Fix incorrect name in comment.
|
Fix incorrect name in comment.
|
Python
|
mit
|
andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper
|
32d5f2bfcaad65230632f1854fabd2e9de1c151b
|
tests/query_test/test_chars.py
|
tests/query_test/test_chars.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
Fix char test to only run on test/none.
|
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <[email protected]>
|
Python
|
apache-2.0
|
ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC,ibmsoe/ImpalaPPC
|
9909ce7e900097b74b701ad52c0aae5ba68b8823
|
morenines/repository.py
|
morenines/repository.py
|
import os
from morenines import output
from morenines import util
from morenines.index import Index
from morenines.ignores import Ignores
NAMES = {
'repo_dir': '.morenines',
'index': 'index',
'ignore': 'ignore',
}
class Repository(object):
def __init__(self):
self.path = None
self.index = None
self.ignores = None
def open(self, path):
repo_dir_path = find_repo(path)
if not repo_dir_path:
output.error("Cannot find repository in '{}' or any parent dir".format(path))
util.abort()
self.path = repo_dir_path
self.index = Index.read(os.path.join(self.path, NAMES['index']))
self.ignores = Ignores.read(os.path.join(self.path, NAMES['ignore']))
def find_repo(start_path):
if start_path == '/':
return None
path = os.path.join(start_path, NAMES['repo_dir'])
if os.path.isdir(path):
return path
parent = os.path.split(start_path)[0]
return find_repo(parent)
|
import os
from morenines import output
from morenines import util
from morenines.index import Index
from morenines.ignores import Ignores
NAMES = {
'mn_dir': '.morenines',
'index': 'index',
'ignore': 'ignore',
}
class Repository(object):
def __init__(self):
self.path = None
self.index = None
self.ignore = None
def init_paths(self, repo_path):
self.path = repo_path
self.mn_dir_path = os.path.join(self.path, NAMES['mn_dir'])
self.index_path = os.path.join(self.mn_dir_path, NAMES['index'])
self.ignore_path = os.path.join(self.mn_dir_path, NAMES['ignore'])
def open(self, path):
repo_path = find_repo(path)
if not repo_path:
output.error("Cannot find repository in '{}' or any parent dir".format(path))
util.abort()
self.init_paths(repo_path)
if os.path.isfile(self.index_path):
self.index = Index.read(self.index_path)
else:
self.index = Index(self.path)
if os.path.isfile(self.ignore_path):
self.ignore = Ignores.read(self.ignore_path)
else:
self.ignore = Ignores()
def find_repo(start_path):
if start_path == '/':
return None
mn_dir_path = os.path.join(start_path, NAMES['mn_dir'])
if os.path.isdir(mn_dir_path):
return start_path
parent = os.path.split(start_path)[0]
return find_repo(parent)
|
Rework Repository to include paths
|
Rework Repository to include paths
|
Python
|
mit
|
mcgid/morenines,mcgid/morenines
|
df95836c39f859a455afd06e32840f5e68fc2c1d
|
XIA2Version.py
|
XIA2Version.py
|
#!/usr/bin/env python
# XIA2Version.py
# Copyright (C) 2006 CCLRC, Graeme Winter
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
#
# 6th June 2006
#
# A file containing the version number of the current xia2. Generally useful.
#
VersionNumber = "0.3.8.0"
Version = "XIA2 %s" % VersionNumber
CVSTag = "xia2-%s" % VersionNumber.replace('.', '_')
Directory = "xia2-%s" % VersionNumber
if __name__ == '__main__':
print 'This is XIA 2 version %s' % VersionNumber
print 'This should be in a directory called "%s"' % Directory
print 'And should be CVS tagged as "%s"' % CVSTag
|
#!/usr/bin/env python
# XIA2Version.py
# Copyright (C) 2006 CCLRC, Graeme Winter
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
#
# 6th June 2006
#
# A file containing the version number of the current xia2. Generally useful.
#
VersionNumber = "0.4.0.0"
Version = "XIA2 %s" % VersionNumber
CVSTag = "xia2-%s" % VersionNumber.replace('.', '_')
Directory = "xia2-%s" % VersionNumber
if __name__ == '__main__':
print 'This is XIA 2 version %s' % VersionNumber
print 'This should be in a directory called "%s"' % Directory
print 'And should be CVS tagged as "%s"' % CVSTag
|
Update xia2 version prior to release of DIALS 1.0
|
Update xia2 version prior to release of DIALS 1.0
|
Python
|
bsd-3-clause
|
xia2/xia2,xia2/xia2
|
0fe3d41a4aa69f1ce39f61623d10985c20e012e8
|
network/ipv4_network.py
|
network/ipv4_network.py
|
import network
import math
class Ipv4Net(network.Network):
def __init__(self, net_id, netmask):
self.netmask = netmask
self.network_id = net_id
@property
def network_id(self):
return self._netid
@network_id.setter
def network_id(self, addr):
self._netid = addr & self.netmask
@property
def netmask(self):
return self._mask
@netmask.setter
def netmask(self, addr):
# TODO: Bug test
if math.log(addr + 1, 2) % 1 == 0:
self._mask = addr
else:
raise ValueError("Improper value for netmask %s, must be address \
with only consecutive bits" %addr)
# do i even need these functions or should I instead use something like pop/set/builtins etc
def addhost(self, host):
# TODO: make sure it's isinstance(ipv4) etc etc
pass
def delhost(self, host):
# TODO: everything o_o
pass
__all__ = ['Ipv4Net']
|
import network
import math
class Ipv4Net(network.Network):
def __init__(self, net_id, netmask):
self.netmask = netmask
self.network_id = net_id
@property
def network_id(self):
return self._netid
@network_id.setter
def network_id(self, addr):
self._netid = addr & self.netmask
@property
def netmask(self):
return self._mask
@netmask.setter
def netmask(self, addr):
# TODO: Bug test
cidr = int(math.log(addr + 1, 2))
if 2**cidr != addr + 1:
raise ValueError("Improper value for netmask %s, must be address \
with only consecutive bits" %addr)
else:
self._mask = addr
# do i even need these functions or should I instead use something like pop/set/builtins etc
def addhost(self, host):
# TODO: make sure it's isinstance(ipv4) etc etc
pass
def delhost(self, host):
# TODO: everything o_o
pass
__all__ = ['Ipv4Net']
|
Revert "Minor tweak to make netmask checking more flashy :V"
|
Revert "Minor tweak to make netmask checking more flashy :V"
This reverts commit 6a1bfc3ae8dda1932555a7aada3dd5f0e792507f.
|
Python
|
mit
|
foxfluff/network-py
|
dc071e4961c7db7e98e7dfdcd74cce368ce31039
|
dataportal/tests/test_examples.py
|
dataportal/tests/test_examples.py
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
import subprocess
from nose.tools import assert_true, assert_equal
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
examples = [temperature_ramp, multisource_event, image_and_scalar]
def run_example_programmatically(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def run_example_from_commandline(example):
command = ['python', example.__file__]
p = subprocess.Popen(command)
return_code = p.wait()
assert_equal(return_code, 0) # successful execution
def test_examples_programmatically():
for example in examples:
yield run_example_programmatically, example
def test_examples_from_commandline():
for example in examples:
yield run_example_from_commandline, example
|
Test commandline execution of examples.
|
TST: Test commandline execution of examples.
|
Python
|
bsd-3-clause
|
tacaswell/dataportal,NSLS-II/datamuxer,tacaswell/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/datamuxer,NSLS-II/dataportal,ericdill/databroker,ericdill/datamuxer,ericdill/databroker,danielballan/dataportal,danielballan/datamuxer,danielballan/dataportal
|
cba6d3639f348d60133069fadec223837fde0005
|
setup.py
|
setup.py
|
""" Setup script for python packaging """
from setuptools import setup
setup(
name="nbwavedrom",
packages=["nbwavedrom"],
version="0.3.0",
description="Wavedrom timing diagrams for Jupyter Notebook",
author="witchard",
author_email="[email protected]",
url="https://github.com/witchard/nbwavedrom",
download_url="https://github.com/witchard/nbwavedrom/tarball/0.3.0",
keywords=["wavedrom", "jupyter", "notebook"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
)
# DONT FORGET TO CHANGE DOWNLOAD_URL WHEN DOING A RELEASE!
# Thanks to this guide: http://peterdowns.com/posts/first-time-with-pypi.html
# Release with:
# git tag <version>
# git push --tags
# python setup.py sdist upload -r pypi
|
""" Setup script for python packaging """
from setuptools import setup
setup(
name="nbwavedrom",
packages=["nbwavedrom"],
version="0.3.0",
description="Wavedrom timing diagrams for Jupyter Notebook",
author="witchard",
author_email="[email protected]",
url="https://github.com/witchard/nbwavedrom",
download_url="https://github.com/witchard/nbwavedrom/tarball/0.3.0",
keywords=["wavedrom", "jupyter", "notebook"],
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
],
)
# Release with:
# Modify release URL above to new version
# git tag <version>
# git push --tags
# python setup.py sdist
# twine upload dist/nbwavedrom-<version>.tar.gz
|
Update notes on how to publish
|
Update notes on how to publish
|
Python
|
mit
|
witchard/ipython-wavedrom,witchard/ipython-wavedrom,witchard/ipython-wavedrom
|
e6cbf0c51e3bd0f639584a3bbd97c394bd844c57
|
setup.py
|
setup.py
|
from setuptools import setup
import addict
SHORT='Addict is a dictionary whose items can be set using both attribute and item syntax.'
LONG=('Addict is a module that exposes a dictionary subclass that allows items to be set like attributes. '
'Values are gettable and settable using both attribute and item syntax. '
'For more info check out the README at \'github.com/mewwts/addict\'.')
setup(
name='addict',
version=addict.__version__,
packages=['addict'],
url='https://github.com/mewwts/addict',
author=addict.__author__,
author_email='[email protected]',
classifiers=(
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
),
description=SHORT,
long_description=LONG,
test_suite='test_addict'
)
|
from setuptools import setup
import addict
SHORT='Addict is a dictionary whose items can be set using both attribute and item syntax.'
LONG=('Addict is a module that exposes a dictionary subclass that allows items to be set like attributes. '
'Values are gettable and settable using both attribute and item syntax. '
'For more info check out the README at \'github.com/mewwts/addict\'.')
setup(
name='addict',
version=addict.__version__,
packages=['addict'],
url='https://github.com/mewwts/addict',
author=addict.__author__,
author_email='[email protected]',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
),
description=SHORT,
long_description=LONG,
test_suite='test_addict'
)
|
Include versions tested on Travis in Trove classifiers
|
Include versions tested on Travis in Trove classifiers
|
Python
|
mit
|
mewwts/addict
|
1915cde046c1817c45317ad8ce882e807671fca3
|
oauth_api/permissions.py
|
oauth_api/permissions.py
|
from rest_framework.permissions import BasePermission
SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS']
class OAuth2ScopePermission(BasePermission):
"""
Make sure request is authenticated and token has right scope set.
"""
def has_permission(self, request, view):
token = request.auth
read_only = request.method in SAFE_METHODS
if not token:
return False
if hasattr(token, 'scope'):
scopes = self.get_scopes(request, view)
if scopes['required'] is not None:
is_valid = token.is_valid(scopes['required'])
if is_valid == False:
return False
else:
# View did not define any required scopes
is_valid = False
# Check for method specific scopes
if read_only:
if scopes['read'] is not None:
return token.is_valid(scopes['read'])
else:
if scopes['write'] is not None:
return token.is_valid(scopes['write'])
return is_valid
return False
def get_scopes(self, request, view):
return {
'required': getattr(view, 'required_scopes', None),
'read': getattr(view, 'read_scopes', None),
'write': getattr(view, 'write_scopes', None),
}
|
from django.core.exceptions import ImproperlyConfigured
from rest_framework.permissions import BasePermission
SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS']
class OAuth2ScopePermission(BasePermission):
"""
Make sure request is authenticated and token has right scope set.
"""
def has_permission(self, request, view):
token = request.auth
read_only = request.method in SAFE_METHODS
if not token:
return False
if hasattr(token, 'scope'):
scopes = self.get_scopes(request, view)
if scopes['required'] is not None:
is_valid = token.is_valid(scopes['required'])
if is_valid == False:
return False
else:
# View did not define any required scopes
is_valid = False
# Check for method specific scopes
if read_only:
if scopes['read'] is not None:
return token.is_valid(scopes['read'])
else:
if scopes['write'] is not None:
return token.is_valid(scopes['write'])
return is_valid
assert False, ('OAuth2ScopePermission requires the '
'`oauth_api.authentication.OAuth2Authentication` '
'class to be used.')
def get_scopes(self, request, view):
required = getattr(view, 'required_scopes', None)
read = getattr(view, 'read_scopes', None)
write = getattr(view, 'write_scopes', None)
if not required and not read and not write:
raise ImproperlyConfigured('OAuth protected resources requires scopes. Please add required_scopes, read_scopes or write_scopes.')
return {
'required': required,
'read': read,
'write': write,
}
|
Raise ImproperlyConfigured if resource has no scopes defined
|
Raise ImproperlyConfigured if resource has no scopes defined
|
Python
|
bsd-2-clause
|
eofs/django-oauth-api,eofs/django-oauth-api
|
846a005c20ddc2e7702be48f5b2839b1c9fd1576
|
project/apps/api/management/commands/denormalize.py
|
project/apps/api/management/commands/denormalize.py
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
Remove ranking from denormalization command
|
Remove ranking from denormalization command
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore
|
5da0d563070f3a966f005a6987dd3e83d52bcaf9
|
tailorscad/builder/openscad.py
|
tailorscad/builder/openscad.py
|
import subprocess
import os
#from tailorscad.config import ScadConfig
BASE_DIR = '/usr/bin/'
DEFAULT = BASE_DIR + 'openscad'
def build_with_openscad(state):
args = build_args_from_state(state)
out_call = ''
for arg in args:
out_call += ' ' + arg
print 'args:', out_call
try:
subprocess.check_call(args)
return True
except subprocess.CalledProcessError as (e):
print str(e)
return False
def build_args_from_state(state):
#executable = ScadConfig.open_scad if ScadConfig.open_scad else DEFAULT
executable = 'openscad'
replace = ['-D']
if state.params:
print 'state params:', state.params
replace = [':'.join((key, str(value)))
for key, value in state.params.iteritems()]
output = os.path.join(state.output_directory, state.name + ".stl")
args = [executable, '-o', output]
if len(replace) > 1:
args.extend(replace)
args.append(state.main_path)
return args
|
import subprocess
import os
#from tailorscad.config import ScadConfig
BASE_DIR = '/usr/bin/'
DEFAULT = BASE_DIR + 'openscad'
def build_with_openscad(state):
args = build_args_from_state(state)
out_call = ''
for arg in args:
out_call += ' ' + arg
print 'args:', out_call
try:
subprocess.check_call(args)
return True
except subprocess.CalledProcessError as (e):
print str(e)
return False
def build_args_from_state(state):
#executable = ScadConfig.open_scad if ScadConfig.open_scad else DEFAULT
executable = 'openscad'
replace = []
if state.params:
print 'state params:', state.params
# TODO: Handle string exceptions
replace = ['-D ' + '='.join((key, str(value)))
for key, value in state.params.iteritems()]
print 'state replace:', replace
# TODO: Handle different output types
output = os.path.join(state.output_directory, state.name + ".stl")
args = [executable, '-o', output]
if len(replace) >= 1:
args.extend(replace)
args.append(state.main_path)
return args
|
Fix params not being generated for other files
|
Fix params not being generated for other files
|
Python
|
mit
|
savorywatt/tailorSCAD
|
b98ab1c800a13792bdca69c5788e91bc07f1e215
|
rpp/encoder.py
|
rpp/encoder.py
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += name
for value, strvalue in zip(values, strvalues):
if isinstance(value, Symbol):
result += '\n' + (' ' * (level + 1) * indent) + strvalue
else:
result += ' ' + strvalue
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
Insert new line for each Symbol object
|
Insert new line for each Symbol object
|
Python
|
bsd-3-clause
|
Perlence/rpp
|
ec768a1aac8429f0c2c1f27c521c8a5f40d32411
|
enocean/consolelogger.py
|
enocean/consolelogger.py
|
# -*- encoding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
import logging
def init_logging(level=logging.DEBUG):
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('enocean')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
|
# -*- encoding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
import logging
def init_logging(level=logging.DEBUG):
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('enocean')
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(formatter)
logger.addHandler(ch)
|
Use proper logging level in `init_logging`.
|
Use proper logging level in `init_logging`.
|
Python
|
mit
|
Ethal/enocean,kipe/enocean,kipe/enocean,Ethal/enocean
|
42be31155361e5b91c445844da8aefbfd0f44348
|
ezoutlet/__init__.py
|
ezoutlet/__init__.py
|
# Copyright (C) 2015 Schweitzer Engineering Laboratories, Inc.
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from .ez_outlet import EzOutlet, EzOutletError, EzOutletUsageError, main
__version__ = '0.0.1-dev2'
|
# Copyright (C) 2015 Schweitzer Engineering Laboratories, Inc.
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from .ez_outlet import EzOutlet, EzOutletError, EzOutletUsageError, main
__version__ = '0.0.1-dev3'
|
Increment version number for release.
|
Increment version number for release.
|
Python
|
mit
|
jtpereyda/ezoutlet
|
2b70879e2d73453d81ed738f34cf20d39afdc3ad
|
byceps/blueprints/authorization/decorators.py
|
byceps/blueprints/authorization/decorators.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.authorization.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from functools import wraps
from flask import abort, g
def permission_required(permission):
"""Ensure the current user has the given permission."""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if permission not in g.current_user.permissions:
abort(403)
return func(*args, **kwargs)
return wrapper
return decorator
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.authorization.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from functools import wraps
from flask import abort, g
def permission_required(permission):
"""Ensure the current user has the given permission."""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not g.current_user.has_permission(permission):
abort(403)
return func(*args, **kwargs)
return wrapper
return decorator
|
Call `has_permission` method instead of accessing the collection directly
|
Call `has_permission` method instead of accessing the collection directly
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
c7d522393930dabc1bb6997bbc2b450d043817e8
|
wagtail/utils/widgets.py
|
wagtail/utils/widgets.py
|
from django.forms.widgets import Widget
from django.utils.safestring import mark_safe
class WidgetWithScript(Widget):
def render_html(self, name, value, attrs):
"""Render the HTML (non-JS) portion of the field markup"""
return super().render(name, value, attrs)
def render(self, name, value, attrs=None):
# no point trying to come up with sensible semantics for when 'id' is missing from attrs,
# so let's make sure it fails early in the process
try:
id_ = attrs['id']
except (KeyError, TypeError):
raise TypeError("WidgetWithScript cannot be rendered without an 'id' attribute")
widget_html = self.render_html(name, value, attrs)
js = self.render_js_init(id_, name, value)
out = '{0}<script>{1}</script>'.format(widget_html, js)
return mark_safe(out)
def render_js_init(self, id_, name, value):
return ''
|
from django.forms.widgets import Widget
from django.utils.safestring import mark_safe
class WidgetWithScript(Widget):
def render_html(self, name, value, attrs):
"""Render the HTML (non-JS) portion of the field markup"""
return super().render(name, value, attrs)
def render(self, name, value, attrs=None, renderer=None):
# no point trying to come up with sensible semantics for when 'id' is missing from attrs,
# so let's make sure it fails early in the process
try:
id_ = attrs['id']
except (KeyError, TypeError):
raise TypeError("WidgetWithScript cannot be rendered without an 'id' attribute")
widget_html = self.render_html(name, value, attrs)
js = self.render_js_init(id_, name, value)
out = '{0}<script>{1}</script>'.format(widget_html, js)
return mark_safe(out)
def render_js_init(self, id_, name, value):
return ''
|
Fix WidgetWithScript to accept renderer kwarg
|
Fix WidgetWithScript to accept renderer kwarg
|
Python
|
bsd-3-clause
|
FlipperPA/wagtail,nimasmi/wagtail,timorieber/wagtail,kaedroho/wagtail,wagtail/wagtail,takeflight/wagtail,zerolab/wagtail,kaedroho/wagtail,gasman/wagtail,timorieber/wagtail,zerolab/wagtail,wagtail/wagtail,mikedingjan/wagtail,jnns/wagtail,zerolab/wagtail,mixxorz/wagtail,nimasmi/wagtail,zerolab/wagtail,mixxorz/wagtail,wagtail/wagtail,timorieber/wagtail,mikedingjan/wagtail,gasman/wagtail,gasman/wagtail,nimasmi/wagtail,torchbox/wagtail,torchbox/wagtail,nealtodd/wagtail,mikedingjan/wagtail,thenewguy/wagtail,jnns/wagtail,gasman/wagtail,timorieber/wagtail,FlipperPA/wagtail,nimasmi/wagtail,thenewguy/wagtail,kaedroho/wagtail,jnns/wagtail,nealtodd/wagtail,gasman/wagtail,nealtodd/wagtail,thenewguy/wagtail,kaedroho/wagtail,zerolab/wagtail,kaedroho/wagtail,mixxorz/wagtail,rsalmaso/wagtail,torchbox/wagtail,rsalmaso/wagtail,takeflight/wagtail,nealtodd/wagtail,takeflight/wagtail,takeflight/wagtail,wagtail/wagtail,rsalmaso/wagtail,thenewguy/wagtail,FlipperPA/wagtail,wagtail/wagtail,torchbox/wagtail,rsalmaso/wagtail,FlipperPA/wagtail,mixxorz/wagtail,jnns/wagtail,rsalmaso/wagtail,mixxorz/wagtail,mikedingjan/wagtail,thenewguy/wagtail
|
ea3576e16b0a8278cd9d35715c8881e9d136eec8
|
paystackapi/tests/test_cpanel.py
|
paystackapi/tests/test_cpanel.py
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.cpanel import ControlPanel
class TestPage(BaseTestCase):
@httpretty.activate
def test_fetch_payment_session_timeout(self):
"""Method defined to test fetch payment session timeout."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/integration/payment_session_timeout"),
content_type='text/json',
body='{"status": true, "message": "Payment session timeout retrieved"}',
status=201,
)
response = ControlPanel.fetch_payment_session_timeout()
self.assertTrue(response['status'])
|
import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.cpanel import ControlPanel
class TestPage(BaseTestCase):
@httpretty.activate
def test_fetch_payment_session_timeout(self):
"""Method defined to test fetch payment session timeout."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/integration/payment_session_timeout"),
content_type='text/json',
body='{"status": true, "message": "Payment session timeout retrieved"}',
status=201,
)
response = ControlPanel.fetch_payment_session_timeout()
self.assertTrue(response['status'])
@httpretty.activate
def test_fetch_payment_session_timeout(self):
"""Method defined to test update payment session timeout."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/integration/payment_session_timeout"),
content_type='text/json',
body='{"status": true, "message": "Payment session timeout updated"}',
status=201,
)
response = ControlPanel.update_payment_session_timeout(timeout=30)
self.assertTrue(response['status'])
|
Add control panel test for update
|
Add control panel test for update
|
Python
|
mit
|
andela-sjames/paystack-python
|
3d88e62cdd2521472d0475a5f4c8598d49f88571
|
code/python/echomesh/sound/CPlayer.py
|
code/python/echomesh/sound/CPlayer.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import cechomesh
from echomesh.expression.Envelope import Envelope
from echomesh.sound import PlayerSetter
from echomesh.util import Log
from echomesh.util.thread.MasterRunnable import MasterRunnable
LOGGER = Log.logger(__name__)
def test():
print('test!')
class CPlayer(MasterRunnable):
def __init__(self, element, level=1, pan=0, loops=1, length=-1, **kwds):
super(CPlayer, self).__init__()
PlayerSetter.evaluate_player(
self, element, level=level, pan=pan, loops=loops, length=length, **kwds)
self.test2 = self.test
self.player = cechomesh.AudioSource(
self._filename, self._loops, self._begin, self._end,
self._length, "", 2, self._level, self._pan, self.pause)
def __del__(self):
super(CPlayer, self).__del__()
self.unload()
def _on_begin(self):
return self.player.begin()
def _on_run(self):
return self.player.run()
def _on_pause(self):
return self.player.pause()
def test(self):
print('test2!!')
def unload(self):
super(CPlayer, self).unload()
self.player.unload()
|
from __future__ import absolute_import, division, print_function, unicode_literals
import cechomesh
from echomesh.expression.Envelope import Envelope
from echomesh.sound import PlayerSetter
from echomesh.util import Log
from echomesh.util.thread.MasterRunnable import MasterRunnable
LOGGER = Log.logger(__name__)
class CPlayer(MasterRunnable):
def __init__(self, element, level=1, pan=0, loops=1, length=-1, **kwds):
super(CPlayer, self).__init__()
PlayerSetter.evaluate_player(
self, element, level=level, pan=pan, loops=loops, length=length, **kwds)
self.test2 = self.test
self.player = cechomesh.AudioSource(
self._filename, self._loops, self._begin, self._end,
self._length, "", 2, self._level, self._pan, self.pause)
def __del__(self):
super(CPlayer, self).__del__()
self.unload()
def _on_begin(self):
return self.player.begin()
def _on_run(self):
return self.player.run()
def _on_pause(self):
return self.player.pause()
def unload(self):
super(CPlayer, self).unload()
self.player.unload()
|
Remove tiny amounts of cruft.
|
Remove tiny amounts of cruft.
|
Python
|
mit
|
rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh
|
8e0d61aa69a15a9efc967ec263bc73c3018f9b3d
|
process_to_only_word.py
|
process_to_only_word.py
|
# -*- coding: utf-8 -*-
import re
import sys
###########################################################################
# This code is developing yet!!
# Target file(Already morphological analysis file) to process to word only.
###########################################################################
argvs = sys.argv
argc = len(argvs)
if (argc != 2):
print('Usage: # python %s filename' % argvs[0])
quit()
with open(argvs[1],mode='r', encoding='utf-8') as read_file:
line = read_file.readlines()
for l in line:
split_text_list = str(l).split()
if(len(split_text_list) == 0):
continue
print(split_text_list[0])
|
# -*- coding: utf-8 -*-
import re
import sys
###########################################################################
# This code is developing yet!!
# Target file(Already morphological analysis file) to process to word only.
# <How to use>
# python process_to_only_word.py <Target file(Already morphological analysis file)> > result.txt
###########################################################################
argvs = sys.argv
argc = len(argvs)
if (argc != 2):
print('Usage: # python %s filename' % argvs[0])
quit()
with open(argvs[1],mode='r', encoding='utf-8') as read_file:
line = read_file.readlines()
for l in line:
split_text_list = str(l).split()
if(len(split_text_list) == 0):
continue
print(split_text_list[0])
|
Add a "How to use(comment)"
|
Add a "How to use(comment)"
|
Python
|
mit
|
shinshin86/little-magnifying-py-glass,shinshin86/little-magnifying-py-glass
|
c2a8e69a5deee8f72c561f50732570801d4fc9ae
|
tests/test_stack_operations.py
|
tests/test_stack_operations.py
|
import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
|
import pytest
from thinglang.execution.errors import UnknownVariable
from thinglang.runner import run
def test_stack_resolution_in_block():
assert run("""
thing Program
does start
number i = 0
Output.write("outside before, i =", i)
if true
Output.write("inside before, i =", i)
i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""").output == """
outside before, i = 0
inside before, i = 0
inside after, i = 10
outside after, i = 10""".strip()
def test_stack_resolution_error_during_access_after_nested_deceleration():
with pytest.raises(UnknownVariable):
run("""
thing Program
does start
if true
number i = 10
Output.write("inside after, i =", i)
Output.write("outside after, i =", i)
""")
|
Add test for variables declared inside a scope (and accessed outside)
|
Add test for variables declared inside a scope (and accessed outside)
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
aefa8a3d6d4c809c7e470b22a0c9fb2c0875ba8b
|
project/project/urls.py
|
project/project/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk', app_name='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app', app_name='example_app')
),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app')
),
url(
r'^admin/',
admin.site.urls
),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Remove unneeded app_name from test project to be django 2 compatible
|
Remove unneeded app_name from test project to be django 2 compatible
|
Python
|
mit
|
crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,django-silk/silk,django-silk/silk,jazzband/silk,django-silk/silk,crunchr/silk,mtford90/silk,jazzband/silk,mtford90/silk,django-silk/silk
|
aae29a385129e6a1573fac2c631eff8db8ea3079
|
stackdio/stackdio/__init__.py
|
stackdio/stackdio/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import sys
from .version import __version__, __version_info__ # NOQA
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
try:
from .celery import app as celery_app
except ImportError:
sys.stderr.write('Not importing celery... Ignore if this is running setup.py.\n')
__copyright__ = "Copyright 2014, Digital Reasoning"
__license__ = "Apache License Version 2.0, January 2004"
__maintainer__ = "https://github.com/stackdio/stackdio"
|
# -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import sys
from .version import __version__, __version_info__ # NOQA
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
try:
from .celery import app as celery_app
except ImportError:
sys.stderr.write("Not importing celery... "
"Ignore if this if you're currently running setup.py.\n")
__copyright__ = "Copyright 2014, Digital Reasoning"
__license__ = "Apache License Version 2.0, January 2004"
__maintainer__ = "https://github.com/stackdio/stackdio"
|
Print a more useful warning message
|
Print a more useful warning message
|
Python
|
apache-2.0
|
stackdio/stackdio,clarkperkins/stackdio,stackdio/stackdio,clarkperkins/stackdio,clarkperkins/stackdio,clarkperkins/stackdio,stackdio/stackdio,stackdio/stackdio
|
f457cb13325cbc4b83a3450032856e86fe1285cf
|
kokki/cookbooks/mongodb/libraries/server.py
|
kokki/cookbooks/mongodb/libraries/server.py
|
import os
from kokki import *
def setup(name, **kwargs):
env = Environment.get_instance()
config = env.config.mongodb.copy()
config.update(kwargs)
config['configpath'] = "/etc/mongodb/%s.conf" % name
if 'dbpath' not in kwargs:
config['dbpath'] = os.path.join(config.dbpath, name)
if 'logfilename' not in kwargs:
config['logfilename'] = "%s.log" % name
Directory("/etc/mongodb",
owner = "root",
group = "root",
mode = 0755)
Directory(config.dbpath,
owner = "mongodb",
group = "mongodb",
mode = 0755,
recursive = True)
Service("mongodb-%s" % name)
File("/etc/init/mongodb-%s" % name,
owner = "root",
group = "root",
mode = 0644,
content = Template("mongodb/upstart.conf.j2", variables=dict(mongodb=config)),
notifies = [
("reload", env.resources["Service"]["mongodb-%s" % name], True),
])
File(config.configpath,
owner = "root",
group = "root",
mode = 0644,
content = Template("mongodb/mongodb.conf.j2", variables=dict(mongodb=config)),
notifies = [("restart", env.resources["Service"]["mongodb-%s" % name])])
|
import os
from kokki import *
def setup(name, **kwargs):
env = Environment.get_instance()
config = env.config.mongodb.copy()
config.update(kwargs)
config['configpath'] = "/etc/mongodb/%s.conf" % name
if 'dbpath' not in kwargs:
config['dbpath'] = os.path.join(config.dbpath, name)
if 'logfilename' not in kwargs:
config['logfilename'] = "%s.log" % name
Directory("/etc/mongodb",
owner = "root",
group = "root",
mode = 0755)
Directory(config.dbpath,
owner = "mongodb",
group = "mongodb",
mode = 0755,
recursive = True)
Service("mongodb-%s" % name)
File("/etc/init/mongodb-%s.conf" % name,
owner = "root",
group = "root",
mode = 0644,
content = Template("mongodb/upstart.conf.j2", variables=dict(mongodb=config)),
notifies = [
("reload", env.resources["Service"]["mongodb-%s" % name], True),
])
File(config.configpath,
owner = "root",
group = "root",
mode = 0644,
content = Template("mongodb/mongodb.conf.j2", variables=dict(mongodb=config)),
notifies = [("restart", env.resources["Service"]["mongodb-%s" % name])])
|
Make sure to end upstart script with .conf
|
Make sure to end upstart script with .conf
|
Python
|
bsd-3-clause
|
samuel/kokki
|
067bbbc6c9edbf55606fe6f236c70affd86a1fc0
|
tests/convert/test_unit.py
|
tests/convert/test_unit.py
|
from unittest.mock import patch
from smif.convert.unit import parse_unit
def test_parse_unit_valid():
"""Parse a valid unit
"""
meter = parse_unit('m')
assert str(meter) == 'meter'
@patch('smif.convert.unit.LOGGER.warning')
def test_parse_unit_invalid(warning_logger):
"""Warn if unit not recognised
"""
unit = 'unrecognisable'
parse_unit(unit)
msg = "Unrecognised unit: %s"
warning_logger.assert_called_with(msg, unit)
|
import numpy as np
from unittest.mock import patch
from smif.convert.unit import parse_unit
from smif.convert import UnitConvertor
def test_parse_unit_valid():
"""Parse a valid unit
"""
meter = parse_unit('m')
assert str(meter) == 'meter'
@patch('smif.convert.unit.LOGGER.warning')
def test_parse_unit_invalid(warning_logger):
"""Warn if unit not recognised
"""
unit = 'unrecognisable'
parse_unit(unit)
msg = "Unrecognised unit: %s"
warning_logger.assert_called_with(msg, unit)
def test_convert_unit():
data = np.array([[1, 2], [3, 4]], dtype=float)
convertor = UnitConvertor()
actual = convertor.convert(data, 'liter', 'milliliter')
expected = np.array([[1000, 2000], [3000, 4000]], dtype=float)
np.allclose(actual, expected)
|
Add test for normal unit conversion
|
Add test for normal unit conversion
|
Python
|
mit
|
tomalrussell/smif,tomalrussell/smif,nismod/smif,nismod/smif,tomalrussell/smif,nismod/smif,nismod/smif,willu47/smif,willu47/smif,willu47/smif,willu47/smif,tomalrussell/smif
|
c56a6c2f861d50d2bdc38ee33d30e4ef614a2de0
|
tests/sim/test_entities.py
|
tests/sim/test_entities.py
|
import unittest
from hunting.sim.entities import *
class TestFighter(unittest.TestCase):
def test_minimum_speed_is_one(self):
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=-5).speed, 1)
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=0).speed, 1)
|
import unittest
from hunting.sim.entities import *
class TestPropertyEffect(unittest.TestCase):
def setUp(self):
self.fighter = Fighter(100, 100, 100, 0, base_speed=100)
def test_add_remove_power(self):
power_buff = PropertyEffect(PROPERTY_POWER, value=100)
self.fighter.add_effect(power_buff)
self.assertEqual(self.fighter.power, 200)
self.fighter.remove_effect(power_buff)
self.assertEqual(self.fighter.power, 100)
def test_add_remove_speed(self):
speed_buff = PropertyEffect(PROPERTY_SPEED, value=100)
self.fighter.add_effect(speed_buff)
self.assertEqual(self.fighter.speed, 200)
self.fighter.remove_effect(speed_buff)
self.assertEqual(self.fighter.speed, 100)
def test_add_remove_defense(self):
defense_buff = PropertyEffect(PROPERTY_DEFENSE, value=100)
self.fighter.add_effect(defense_buff)
self.assertEqual(self.fighter.defense, 200)
self.fighter.remove_effect(defense_buff)
self.assertEqual(self.fighter.defense, 100)
class TestFighter(unittest.TestCase):
def test_minimum_speed_is_one(self):
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=-5).speed, 1)
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=0).speed, 1)
|
Add failing tests for buffs
|
Add failing tests for buffs
|
Python
|
mit
|
MoyTW/RL_Arena_Experiment
|
51660291b043b88eab599c59d8c1ef7ae9dc74d7
|
src/core/models.py
|
src/core/models.py
|
from django.db import models
from django.contrib.auth.models import User
from util.session import get_or_generate_session_name
class Session(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(User, blank=True, null=True)
started_at = models.DateTimeField('started at', auto_now_add=True)
# On Python 3: def __str__(self):
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
existing_session_names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
self.name = get_or_generate_session_name(self.name, existing_session_names)
super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
class Spec(models.Model):
code = models.TextField()
session = models.ForeignKey(Session)
author = models.ForeignKey(User, verbose_name='The author of this last update.')
tests_passed = models.NullBooleanField(default=False)
saved_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'saved_at'
|
from django.db import models
from django.contrib.auth.models import User
from util.session import get_or_generate_session_name
from util.session import DEFAULT_SESSION_NAME_PREFIX
class Session(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(User, blank=True, null=True)
started_at = models.DateTimeField('started at', auto_now_add=True)
# On Python 3: def __str__(self):
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
existing_session_names = Session.objects.filter(name__startswith=DEFAULT_SESSION_NAME_PREFIX, user=self.user).only('name')
self.name = get_or_generate_session_name(self.name, existing_session_names)
super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
class Spec(models.Model):
code = models.TextField()
session = models.ForeignKey(Session)
author = models.ForeignKey(User, verbose_name='The author of this last update.')
tests_passed = models.NullBooleanField(default=False)
saved_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'saved_at'
|
Use the existing default name.
|
Use the existing default name.
|
Python
|
mit
|
uxebu/tddbin-backend,uxebu/tddbin-backend
|
9c6f3e1994f686e57092a7cd947c49b4f857743e
|
apps/predict/urls.py
|
apps/predict/urls.py
|
"""
Predict app's urls
"""
#
# pylint: disable=bad-whitespace
#
from django.conf.urls import patterns, include, url
from .views import *
def url_tree(regex, *urls):
"""Quick access to stitching url patterns"""
return url(regex, include(patterns('', *urls)))
urlpatterns = patterns('',
url(r'^$', Datasets.as_view(), name="view_my_datasets"),
url_tree(r'^upload/',
url(r'^$', UploadChoices.as_view(), name="upload"),
url(r'^manual/$', UploadManual.as_view(), name="upload_manual"),
url_tree(r'^(?P<type>[\w-]+)/',
url(r'^$', UploadView.as_view(), name="upload"),
url(r'^(?P<fastq>[\w-]+)/$', UploadView.as_view(), name="upload"),
),
),
url_tree(r'^(?P<slug>\w{32})/',
url(r'^$', DatasetView.as_view(), name="view_single_dataset"),
url(r'^callback/$', Callback.as_view(), name="callback"),
url(r'^note/$', AddNote.as_view(), name="add_note"),
),
)
|
"""
Predict app's urls
"""
#
# pylint: disable=bad-whitespace
#
from django.conf.urls import patterns, include, url
from .views import *
def url_tree(regex, *urls):
"""Quick access to stitching url patterns"""
return url(regex, include(patterns('', *urls)))
urlpatterns = patterns('',
url(r'^$', Datasets.as_view(), name="view_my_datasets"),
url_tree(r'^upload/',
url(r'^$', UploadChoices.as_view(), name="upload"),
url(r'^(?P<type>[\w-]+)/', UploadView.as_view(), name="upload"),
),
url_tree(r'^(?P<slug>\w{32})/',
url(r'^$', DatasetView.as_view(), name="view_single_dataset"),
url(r'^note/$', AddNote.as_view(), name="add_note"),
),
)
|
Remove callback url and bring uploads together
|
Remove callback url and bring uploads together
|
Python
|
agpl-3.0
|
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
|
a077a5b7731e7d609b5c3adc8f8176ad79053f17
|
rmake/lib/twisted_extras/tools.py
|
rmake/lib/twisted_extras/tools.py
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
self._lock.release()
del self._waiting[d]
return result
return d
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
del self._waiting[d]
self._lock.release()
return result
return d
|
Fix Serializer locking bug that caused it to skip calls it should have made
|
Fix Serializer locking bug that caused it to skip calls it should have made
|
Python
|
apache-2.0
|
sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake3
|
42e16bf376a64995a8b70a91829a82d7b0f3e1a1
|
gameanalysis/__main__.py
|
gameanalysis/__main__.py
|
"""Command line module"""
import argparse
import pkgutil
import sys
import gameanalysis
from gameanalysis import script
def create_parser():
"""Create the default parser"""
modules = [imp.find_module(name).load_module(name) for imp, name, _
in pkgutil.iter_modules(script.__path__)]
parser = argparse.ArgumentParser(
description="""Command line access to the game analysis toolkit.""")
parser.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(gameanalysis.__version__))
subparsers = parser.add_subparsers(
title='commands', dest='command', metavar='<command>', help="""The
commands to execute. Available commands are:""")
for module in modules:
subparser = module.add_parser(subparsers)
subparser.main = module.main
return parser, subparsers.choices
def amain(*argv):
"""Entry point for game analysis"""
parser, commands = create_parser()
args = parser.parse_args(argv)
if args.command is None:
parser.print_help()
sys.exit(1)
else:
commands[args.command].main(args)
def main():
"""Entry point for game analysis"""
amain(*sys.argv[1:])
if __name__ == '__main__':
main()
|
"""Command line module"""
import argparse
import logging
import pkgutil
import sys
import gameanalysis
from gameanalysis import script
def create_parser():
"""Create the default parser"""
modules = [imp.find_module(name).load_module(name) for imp, name, _
in pkgutil.iter_modules(script.__path__)]
parser = argparse.ArgumentParser(
description="""Command line access to the game analysis toolkit.""")
parser.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(gameanalysis.__version__))
parser.add_argument(
'-v', '--verbose', action='count', default=0, help="""Set the verbosity
level depending on the number of times specified, up to a maximum of
three.""")
subparsers = parser.add_subparsers(
title='commands', dest='command', metavar='<command>', help="""The
commands to execute. Available commands are:""")
for module in modules:
subparser = module.add_parser(subparsers)
subparser.main = module.main
return parser, subparsers.choices
def amain(*argv):
"""Entry point for game analysis"""
parser, commands = create_parser()
args = parser.parse_args(argv)
if args.command is None:
parser.print_help()
sys.exit(1)
else:
logging.basicConfig(level=40 - 10 * min(args.verbose, 3))
commands[args.command].main(args)
def main():
"""Entry point for game analysis"""
amain(*sys.argv[1:])
if __name__ == '__main__':
main()
|
Add logging verbosity to game analysis
|
Add logging verbosity to game analysis
|
Python
|
apache-2.0
|
egtaonline/GameAnalysis
|
ccc57956f83af2c8ef8c9e064fadfe5db2155302
|
avatar/conf.py
|
avatar/conf.py
|
from django.conf import settings
from PIL import Image
from appconf import AppConf
class AvatarConf(AppConf):
DEFAULT_SIZE = 80
RESIZE_METHOD = Image.ANTIALIAS
STORAGE_DIR = 'avatars'
STORAGE_PARAMS = {}
GRAVATAR_FIELD = 'email'
GRAVATAR_BASE_URL = 'http://www.gravatar.com/avatar/'
GRAVATAR_BACKUP = True
GRAVATAR_DEFAULT = None
DEFAULT_URL = 'avatar/img/default.jpg'
MAX_AVATARS_PER_USER = 42
MAX_SIZE = 1024 * 1024
THUMB_FORMAT = 'JPEG'
THUMB_QUALITY = 85
USERID_AS_USERDIRNAME = False
HASH_FILENAMES = False
HASH_USERDIRNAMES = False
ALLOWED_FILE_EXTS = None
CACHE_TIMEOUT = 60 * 60
STORAGE = settings.DEFAULT_FILE_STORAGE
CLEANUP_DELETED = False
AUTO_GENERATE_SIZES = (DEFAULT_SIZE,)
AVATAR_ALLOWED_MIMETYPES = []
def configure_auto_generate_avatar_sizes(self, value):
return value or getattr(settings, 'AVATAR_AUTO_GENERATE_SIZES',
(self.DEFAULT_SIZE,))
|
from django.conf import settings
from PIL import Image
from appconf import AppConf
class AvatarConf(AppConf):
DEFAULT_SIZE = 80
RESIZE_METHOD = Image.ANTIALIAS
STORAGE_DIR = 'avatars'
STORAGE_PARAMS = {}
GRAVATAR_FIELD = 'email'
GRAVATAR_BASE_URL = 'https://www.gravatar.com/avatar/'
GRAVATAR_BACKUP = True
GRAVATAR_DEFAULT = None
DEFAULT_URL = 'avatar/img/default.jpg'
MAX_AVATARS_PER_USER = 42
MAX_SIZE = 1024 * 1024
THUMB_FORMAT = 'JPEG'
THUMB_QUALITY = 85
USERID_AS_USERDIRNAME = False
HASH_FILENAMES = False
HASH_USERDIRNAMES = False
ALLOWED_FILE_EXTS = None
CACHE_TIMEOUT = 60 * 60
STORAGE = settings.DEFAULT_FILE_STORAGE
CLEANUP_DELETED = False
AUTO_GENERATE_SIZES = (DEFAULT_SIZE,)
AVATAR_ALLOWED_MIMETYPES = []
def configure_auto_generate_avatar_sizes(self, value):
return value or getattr(settings, 'AVATAR_AUTO_GENERATE_SIZES',
(self.DEFAULT_SIZE,))
|
Use https url to avoid serving insecure content
|
Gravatar: Use https url to avoid serving insecure content
|
Python
|
bsd-3-clause
|
tbabej/django-avatar,tbabej/django-avatar
|
f5d0f8cd145c759cff6d5f6cfeb46459efaa63ca
|
sale_line_description/__openerp__.py
|
sale_line_description/__openerp__.py
|
# -*- coding: utf-8 -*-
#
#
# Copyright (C) 2013-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': "Sale line description",
'version': '1.0',
'category': 'Sales Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale',
],
"data": [
'security/sale_security.xml',
'res_config_view.xml',
],
"active": False,
"installable": True
}
|
# -*- coding: utf-8 -*-
#
#
# Copyright (C) 2013-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': "Sale line description",
'version': '1.0',
'category': 'Sales Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale',
],
"data": [
'security/sale_security.xml',
'res_config_view.xml',
],
"installable": True
}
|
Remove active key since is deprecated
|
Remove active key since is deprecated
|
Python
|
agpl-3.0
|
anas-taji/sale-workflow,brain-tec/sale-workflow,luistorresm/sale-workflow,factorlibre/sale-workflow,BT-fgarbely/sale-workflow,adhoc-dev/sale-workflow,jjscarafia/sale-workflow,richard-willowit/sale-workflow,VitalPet/sale-workflow,akretion/sale-workflow,ddico/sale-workflow,fevxie/sale-workflow,xpansa/sale-workflow,Endika/sale-workflow,alexsandrohaag/sale-workflow,acsone/sale-workflow,kittiu/sale-workflow,BT-cserra/sale-workflow,akretion/sale-workflow,numerigraphe/sale-workflow,diagramsoftware/sale-workflow,BT-ojossen/sale-workflow,thomaspaulb/sale-workflow,damdam-s/sale-workflow,jabibi/sale-workflow,numerigraphe/sale-workflow,clubit/sale-workflow,BT-jmichaud/sale-workflow,open-synergy/sale-workflow,Eficent/sale-workflow,anybox/sale-workflow,Antiun/sale-workflow,guewen/sale-workflow,acsone/sale-workflow,kittiu/sale-workflow,brain-tec/sale-workflow,Rona111/sale-workflow
|
032ce88cbee5399c97486122e7bd3b8013e88dda
|
djangae/__init__.py
|
djangae/__init__.py
|
import os
import sys
extra_library_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "lib")
if extra_library_path not in sys.path:
sys.path.insert(1, extra_library_path)
default_app_config = 'djangae.apps.DjangaeConfig'
from patches import json
json.patch()
|
import os
import sys
extra_library_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "lib")
if extra_library_path not in sys.path:
sys.path.insert(1, extra_library_path)
default_app_config = 'djangae.apps.DjangaeConfig'
from .patches import json
json.patch()
|
Make import of `patches` explicitly relative.
|
Make import of `patches` explicitly relative.
Avoids potential conflict if you add a global package called `patches`, and will avoid breakage if this moves to Python 3.
|
Python
|
bsd-3-clause
|
armirusco/djangae,potatolondon/djangae,armirusco/djangae,potatolondon/djangae,kirberich/djangae,grzes/djangae,kirberich/djangae,armirusco/djangae,asendecka/djangae,grzes/djangae,kirberich/djangae,asendecka/djangae,asendecka/djangae,grzes/djangae
|
c6926dda0a9e6e1515721e54788c29d0ef8b58a4
|
tests/test_sqlcompletion.py
|
tests/test_sqlcompletion.py
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
|
from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
|
Add a test for where clause and rename all tests functions.
|
Add a test for where clause and rename all tests functions.
|
Python
|
bsd-3-clause
|
thedrow/pgcli,d33tah/pgcli,n-someya/pgcli,bitmonk/pgcli,joewalnes/pgcli,yx91490/pgcli,TamasNo1/pgcli,MattOates/pgcli,TamasNo1/pgcli,j-bennet/pgcli,lk1ngaa7/pgcli,zhiyuanshi/pgcli,koljonen/pgcli,dbcli/vcli,dbcli/pgcli,lk1ngaa7/pgcli,dbcli/pgcli,j-bennet/pgcli,suzukaze/pgcli,janusnic/pgcli,darikg/pgcli,johshoff/pgcli,nosun/pgcli,w4ngyi/pgcli,darikg/pgcli,stuartquin/pgcli,dbcli/vcli,bitemyapp/pgcli,d33tah/pgcli,suzukaze/pgcli,bitemyapp/pgcli,koljonen/pgcli,thedrow/pgcli,joewalnes/pgcli,nosun/pgcli,yx91490/pgcli,johshoff/pgcli,janusnic/pgcli,n-someya/pgcli,bitmonk/pgcli,zhiyuanshi/pgcli,w4ngyi/pgcli,MattOates/pgcli
|
52982c735f729ddf0a9c020d495906c4a4899462
|
txircd/modules/rfc/umode_i.py
|
txircd/modules/rfc/umode_i.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class InvisibleMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "InvisibleMode"
core = True
affectedActions = [ "showchanneluser" ]
def actions(self):
return [ ("modeactioncheck-user-i-showchanneluser", 1, self.isInvisible) ]
def userModes(self):
return [ ("i", ModeType.NoParam, self) ]
def isInvisible(self, user, channel, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def apply(self, actionName, user, param, channel, fromUser, sameUser):
if user != sameUser:
return None
if not channel or fromUser not in channel.users:
return False
return None
invisibleMode = InvisibleMode()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class InvisibleMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "InvisibleMode"
core = True
affectedActions = [ "showchanneluser", "showuser" ]
def actions(self):
return [ ("modeactioncheck-user-i-showchanneluser", 1, self.isInvisibleChan),
("modeactioncheck-user-i-showuser", 1, self.isInvisibleUser) ]
def userModes(self):
return [ ("i", ModeType.NoParam, self) ]
def isInvisibleChan(self, user, channel, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def isInvisibleUser(self, user, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def apply(self, actionName, user, param, *params):
if actionName == "showchanneluser":
return self.applyChannels(user, *params)
return self.applyUsers(user, *params)
def applyChannels(self, user, channel, fromUser, sameUser):
if user != sameUser:
return None
if not channel or fromUser not in channel.users:
return False
return None
def applyUsers(self, user, fromUser, sameUser):
if user != sameUser:
return None
for channel in fromUser.channels:
if user in channel.users:
return None
return False
invisibleMode = InvisibleMode()
|
Make the invisible check action not necessarily require an accompanying channel
|
Make the invisible check action not necessarily require an accompanying channel
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd
|
8d8eeaa9fd06cd1fc9860a4e5b215e8ed7f107af
|
openprocurement/tender/esco/views/award_complaint.py
|
openprocurement/tender/esco/views/award_complaint.py
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.utils import optendersresource
from openprocurement.tender.openeu.views.award_complaint import TenderEUAwardComplaintResource
@optendersresource(name='esco.EU:TenderAward Complaints',
collection_path='/tenders/{tender_id}/awards/{award_id}/complaints',
path='/tenders/{tender_id}/awards/{award_id}/complaints/{complaint_id}',
procurementMethodType='esco.EU',
description="Tender ESCO EU Award complaints")
class TenderESCOEUAwardComplaintResource(TenderEUAwardComplaintResource):
""" Tender ESCO EU Award Complaint Resource """
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.utils import optendersresource
from openprocurement.tender.openeu.views.award_complaint import TenderEUAwardComplaintResource
@optendersresource(name='esco.EU:Tender Award Complaints',
collection_path='/tenders/{tender_id}/awards/{award_id}/complaints',
path='/tenders/{tender_id}/awards/{award_id}/complaints/{complaint_id}',
procurementMethodType='esco.EU',
description="Tender ESCO EU Award complaints")
class TenderESCOEUAwardComplaintResource(TenderEUAwardComplaintResource):
""" Tender ESCO EU Award Complaint Resource """
|
Fix in award complaint view
|
Fix in award complaint view
|
Python
|
apache-2.0
|
Scandie/openprocurement.tender.esco,openprocurement/openprocurement.tender.esco
|
0ce8e39bc641c2b68c3c2bd460a38efc5e4eff01
|
automate.py
|
automate.py
|
import login
import time
import navigate
import modal
def automate(username, password, tag):
driver = login.login(username, password)
navigate.tag(driver, tag)
modal.open(driver)
while True:
try:
modal.follow(driver)
modal.next(driver)
except Exception as e:
print(e.__doc__)
print(e.message)
navigate.tag(driver, tag)
modal.open(driver)
time.sleep(3)
|
import login
import time
import navigate
import modal
def automate(username, password, tag):
driver = login.login(username, password)
navigate.tag(driver, tag)
modal.open(driver)
while True:
try:
modal.follow(driver)
modal.next(driver)
except Exception as e:
print(e.__doc__)
print(e.message)
navigate.tag(driver, tag)
modal.open(driver)
time.sleep(30)
|
Increase timer on automation to avoid raising suspicion
|
Increase timer on automation to avoid raising suspicion
|
Python
|
mit
|
jshaker/igbot
|
c447ca3d85d9862be38034be85b2328e3d6b02a3
|
vcproj/tests/test_solution.py
|
vcproj/tests/test_solution.py
|
import vcproj.solution
import tempfile, filecmp
import pytest
@pytest.fixture(scope="session")
def test_sol():
return vcproj.solution.parse('vcproj/tests/test_solution/vc15sol/vc15sol.sln')
def test_all_projects(test_sol):
projects = test_sol.project_names()
len(list(projects)) == 59
def test_project_names(test_sol):
projects = test_sol.project_names()
assert 'Helper' in projects
assert 'MDraw' in projects
def test_project_files(test_sol):
proj_files = list(test_sol.project_files())
assert 'PrivateLib\\PrivateLib.vcxproj' in proj_files
assert 'Helper\\Helper.vcxproj' in proj_files
assert 'Resource\\Resource.vcxproj' in proj_files
def test_dependencies(test_sol):
deps = list(test_sol.dependencies('DXHHTest'))
assert deps == ['Public', 'MDraw']
def test_set_dependencies():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
s.set_dependencies('lib1', ['lib2'])
assert list(s.dependencies('lib1')) == ['lib2']
def test_write():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
temp = tempfile.NamedTemporaryFile()
temp.close()
s.write(temp.name)
assert filecmp.cmp('vcproj/tests/test_solution/test.sln', temp.name)
|
import vcproj.solution
import tempfile, filecmp
import pytest
@pytest.fixture(scope="session")
def test_sol():
return vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
def test_project_files(test_sol):
assert list(test_sol.project_files()) == ['test\\test.vcxproj', 'lib1\\lib1.vcxproj', 'lib2\\lib2.vcxproj']
def test_dependencies(test_sol):
assert list(test_sol.dependencies('test')) == ['lib1', 'lib2']
def test_project_names(test_sol):
assert list(test_sol.project_names()) == ['test', 'lib1', 'lib2']
def test_set_dependencies(test_sol):
test_sol.set_dependencies('lib1', ['lib2'])
assert list(test_sol.dependencies('lib1')) == ['lib2']
def test_write():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
temp = tempfile.NamedTemporaryFile()
temp.close()
s.write(temp.name)
assert filecmp.cmp('vcproj/tests/test_solution/test.sln', temp.name)
|
Add back in test of 2010 solution
|
Add back in test of 2010 solution
|
Python
|
unlicense
|
jhandley/pyvcproj,jhandley/pyvcproj,jhandley/pyvcproj
|
fb91bf1e7c1677124f4aa1ce9c534fb437145980
|
pygametemplate/helper.py
|
pygametemplate/helper.py
|
"""Module containing helper functions for using pygame."""
def load_class_assets(calling_object, assets_dict):
"""Load class assets. Only call if class_assets_loaded is False."""
calling_class = type(calling_object)
for attribute_name in assets_dict:
setattr(calling_class, attribute_name, assets_dict[attribute_name])
setattr(calling_class, "class_assets_loaded", True)
def wrap_text(text, font, max_width):
"""
Returns an array of lines which can be blitted beneath each other
in the given font in a box of the given maximum width.
"""
def wrap_paragraph(paragraph):
"""Wraps text that doesn't contain newlines."""
def too_long(string):
return font.size(string)[0] > max_width
def raise_word_too_long_error(word):
raise ValueError("\"%s\" is too long to be wrapped." % word)
lines = []
words = paragraph.split()
line = words.pop(0)
if too_long(line):
raise_word_too_long_error(line)
for word in words:
if too_long(word):
raise_word_too_long_error(word)
if too_long(" ".join((line, word))):
lines.append(line)
line = word
else:
line = " ".join((line, word))
lines.append(line)
return lines
paragraphs = text.split("\n")
return sum(map(wrap_paragraph, paragraphs), [])
|
"""Module containing helper functions for using pygame."""
def load_class_assets(calling_object, assets_dict):
"""Load class assets. Only call if class_assets_loaded is False."""
calling_class = type(calling_object)
for attribute_name in assets_dict:
setattr(calling_class, attribute_name, assets_dict[attribute_name])
setattr(calling_class, "class_assets_loaded", True)
def wrap_text(text, font, max_width):
"""
Returns an array of lines which can be blitted beneath each other
in the given font in a box of the given maximum width.
"""
def wrap_paragraph(paragraph):
"""Wraps text that doesn't contain newlines."""
def too_long(string):
return font.size(string)[0] > max_width
def raise_word_too_long_error(word):
raise ValueError(f"'{word}' is too long to be wrapped.")
lines = []
words = paragraph.split()
line = words.pop(0)
if too_long(line):
raise_word_too_long_error(line)
for word in words:
if too_long(word):
raise_word_too_long_error(word)
if too_long(" ".join((line, word))):
lines.append(line)
line = word
else:
line = " ".join((line, word))
lines.append(line)
return lines
paragraphs = text.split("\n")
return sum(map(wrap_paragraph, paragraphs), [])
|
Replace % with f-string :)
|
Replace % with f-string :)
|
Python
|
mit
|
AndyDeany/pygame-template
|
b57d0b0d3d65995270318d94b551d8bacda73d22
|
baseline.py
|
baseline.py
|
#/usr/bin/python
""" Baseline example that needs to be beaten """
import numpy as np
import matplotlib.pyplot as plt
x, y, yerr = np.loadtxt("data/data.txt", unpack=True)
A = np.vstack((np.ones_like(x), x)).T
C = np.diag(yerr * yerr)
cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A)))
b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y)))
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr, c="k", fmt="o")
x_range = np.array([min(x), max(x)])
ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100)
ax.set_xlabel("x")
ax.set_ylabel("y")
fig.savefig("assets/result.png")
print m_ls, b_ls
|
#/usr/bin/python
""" Baseline example that needs to be beaten """
import os
import numpy as np
import matplotlib.pyplot as plt
x, y, yerr = np.loadtxt("data/data.txt", unpack=True)
A = np.vstack((np.ones_like(x), x)).T
C = np.diag(yerr * yerr)
cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A)))
b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y)))
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr, c="k", fmt="o")
x_range = np.array([min(x), max(x)])
ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100)
ax.set_xlabel("x")
ax.set_ylabel("y")
fig.savefig("assets/result.png")
print("Results of m, b: ({0:.4f} {1:.4f})".format(m_ls, b_ls))
# Let's store result parameters in environment variables, and we will deal
# with more complex values (e.g., uncertainties, etc) later
os.environ["RESULT_M"] = "{0:.5f}".format(m_ls)
os.environ["RESULT_B"] = "{0:.5f}".format(b_ls)
|
Add results to environment parameters RESULT_M, RESULT_B
|
Add results to environment parameters RESULT_M, RESULT_B
|
Python
|
mit
|
arfon/dottravis,arfon/dottravis
|
b960962472f1c40fbaa1338d2cba316810ba119b
|
tt_dailyemailblast/admin.py
|
tt_dailyemailblast/admin.py
|
from django.contrib import admin
from django.db import models as django_models
from tinymce.widgets import TinyMCE
from .models import (Recipient, RecipientList, DailyEmailBlast,
DailyEmailBlastType)
def send_blasts(model_admin, request, qs):
for blast in qs:
print blast.send()
class RecipientInline(admin.TabularInline):
model = RecipientList.recipients.through
verbose_name = 'recipient'
verbose_name_plural = 'recipients'
class RecipientListAdmin(admin.ModelAdmin):
model = RecipientList
inlines = [RecipientInline]
class RecipientListInline(admin.TabularInline):
model = DailyEmailBlast.recipient_lists.through
verbose_name = 'recipient list'
verbose_name_plural = 'recipient lists'
class DailyEmailBlastAdmin(admin.ModelAdmin):
model = DailyEmailBlast
inlines = [RecipientListInline]
formfield_overrides = {
django_models.TextField: {'widget': TinyMCE()},
}
actions = [send_blasts]
admin.site.register(DailyEmailBlastType)
admin.site.register(Recipient)
admin.site.register(RecipientList, RecipientListAdmin)
admin.site.register(DailyEmailBlast, DailyEmailBlastAdmin)
|
from django.contrib import admin
from django.db import models as django_models
from tinymce.widgets import TinyMCE
from .models import (Recipient, RecipientList, DailyEmailBlast,
DailyEmailBlastType)
def send_blasts(model_admin, request, qs):
for blast in qs:
print blast.send()
class RecipientInline(admin.TabularInline):
model = RecipientList.recipients.through
verbose_name = 'recipient'
verbose_name_plural = 'recipients'
class RecipientListAdmin(admin.ModelAdmin):
model = RecipientList
inlines = [RecipientInline]
class RecipientListInline(admin.TabularInline):
model = DailyEmailBlast.recipient_lists.through
verbose_name = 'recipient list'
verbose_name_plural = 'recipient lists'
class DailyEmailBlastAdmin(admin.ModelAdmin):
model = DailyEmailBlast
inlines = [RecipientListInline]
list_display = ('blast_type', 'created_on', 'sent_on',
'send_completed_on',)
formfield_overrides = {
django_models.TextField: {'widget': TinyMCE()},
}
actions = [send_blasts]
admin.site.register(DailyEmailBlastType)
admin.site.register(Recipient)
admin.site.register(RecipientList, RecipientListAdmin)
admin.site.register(DailyEmailBlast, DailyEmailBlastAdmin)
|
Include all dates in blast list display
|
Include all dates in blast list display
|
Python
|
apache-2.0
|
texastribune/tt_dailyemailblast,texastribune/tt_dailyemailblast
|
ef404dad280ec2f7317e0176d3e91b20d1bbe7c0
|
inbox/notify/__init__.py
|
inbox/notify/__init__.py
|
from redis import StrictRedis, BlockingConnectionPool
from inbox.config import config
import json
REDIS_HOSTNAME = config.get('NOTIFY_QUEUE_REDIS_HOSTNAME')
REDIS_DB = int(config.get('NOTIFY_QUEUE_REDIS_DB'))
MAX_CONNECTIONS = 40
redis_pool = BlockingConnectionPool(
max_connections=MAX_CONNECTIONS,
host=REDIS_HOSTNAME, port=6379, db=REDIS_DB)
def notify_transaction(transaction, db_session):
from inbox.models import Namespace
# We're only interested in "message created" events
if transaction.command != 'insert' or transaction.object_type != 'message':
return
namespace = db_session.query(Namespace).get(transaction.namespace_id)
redis_client = StrictRedis(connection_pool=redis_pool)
job = {
'class': 'ProcessMessageQueue',
'args': [
'nylas_notification',
namespace.public_id,
transaction.object_public_id
]
}
pipeline = redis_client.pipeline()
pipeline.sadd('resque:queues', 'nylas_default')
pipeline.lpush('resque:queue:nylas_default', json.dumps(job))
pipeline.execute()
pipeline.reset()
|
import json
from redis import StrictRedis, BlockingConnectionPool
from inbox.config import config
from nylas.logging import get_logger
log = get_logger()
REDIS_HOSTNAME = config.get('NOTIFY_QUEUE_REDIS_HOSTNAME')
REDIS_PORT = int(config.get('NOTIFY_QUEUE_REDIS_PORT', 6379))
REDIS_DB = int(config.get('NOTIFY_QUEUE_REDIS_DB'))
MAX_CONNECTIONS = 40
redis_pool = BlockingConnectionPool(
max_connections=MAX_CONNECTIONS,
host=REDIS_HOSTNAME, port=REDIS_PORT, db=REDIS_DB)
def notify_transaction(transaction, db_session):
from inbox.models import Namespace
# We're only interested in "message created" events
if transaction.command != 'insert' or transaction.object_type != 'message':
return
log.info('Transaction prepared to enqueue',
transaction_id=transaction.record_id)
namespace = db_session.query(Namespace).get(transaction.namespace_id)
redis_client = StrictRedis(connection_pool=redis_pool)
job = {
'class': 'ProcessMessageQueue',
'args': [
'nylas_notification',
namespace.public_id,
transaction.object_public_id
]
}
try:
pipeline = redis_client.pipeline()
pipeline.sadd('resque:queues', 'nylas_default')
pipeline.lpush('resque:queue:nylas_default', json.dumps(job))
log.info('Transaction enqueued',
transaction_id=transaction.record_id,
namespace_id=transaction.namespace_id,
job_details=job)
pipeline.execute()
pipeline.reset()
except Exception as e:
log.error('Transaction not enqueued!',
transaction_id=transaction.record_id,
namespace_id=transaction.namespace_id,
job_details=job,
error=e)
raise e
|
Add logger an try/except logic
|
Add logger an try/except logic
|
Python
|
agpl-3.0
|
jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine
|
45261d57bdb1ee23c84ea6c5d83550b7e84c26f1
|
highlander/highlander.py
|
highlander/highlander.py
|
from functools import wraps
from logging import getLogger
from os import getcwd, unlink
from os.path import join, realpath, isfile
from psutil import Process
logger = getLogger(__name__)
def one(f):
@wraps(f)
def decorator():
pid_file = realpath(join(getcwd(), '.pid'))
if _is_running(pid_file):
exit(0)
_set_running(pid_file)
try:
f()
finally:
unlink(pid_file)
return decorator
def _is_running():
pass
def _read_pid_file(filename):
if not isfile(str(filename)):
return None
with open(filename, 'r') as f:
pid, create_time = f.read().split(',')
return Process(int(pid))
def _set_running(filename):
p = Process()
with open(filename, 'w') as f:
f.write('{},{}'.format(p.pid, p.create_time()))
|
from functools import wraps
from logging import getLogger
from os import getcwd, unlink
from os.path import join, realpath, isfile
from psutil import Process
logger = getLogger(__name__)
def one(f):
@wraps(f)
def decorator():
pid_file = realpath(join(getcwd(), '.pid'))
if _is_running(pid_file):
exit(0)
_set_running(pid_file)
try:
f()
finally:
unlink(pid_file)
return decorator
def _is_running():
pass
def _read_pid_file(filename):
if not isfile(str(filename)):
return None
with open(filename, 'r') as f:
pid, create_time = f.read().split(',')
return Process(int(pid))
def _set_running(filename):
if isfile(str(filename)):
raise Exception('PID file already exists.')
p = Process()
with open(filename, 'w') as f:
f.write('{},{}'.format(p.pid, p.create_time()))
|
Check to make sure the file doesn't exist if we get to the set running state.
|
Check to make sure the file doesn't exist if we get to the set running state.
|
Python
|
mit
|
chriscannon/highlander
|
3d1612e5f9e20cf74a962dd4ca1b538776d5ec7e
|
StationPopWithoutTrain.py
|
StationPopWithoutTrain.py
|
def before_train_station_pop(station, escalator):
# calculate the number of people waiting to depart on the train by the time the train arive.
station.travelers_departing = station.travelers_departing + (escalator.rate * escalators.entering * station.train_wait)
# number of people who have arived and want to leave.
station.travelers_arriving = station.travelers_arriving - (escalator.rate * station.train_time
# Get the total station population.
population = station.pop
return population
|
"""This module calculates the number of people in the station by the time the next train arives"""
def before_train_station_pop(station, escalator):
"""This function calculates the total number of people as a sume of people
waiting to board the next train, and the number of people waiting to leave
the station by the elebvators."""
station.travelers_departing = station.travelers_departing + (escalator.rate * station.escalators_entering * station.train_wait)
# number of people who have arived and want to leave.
station.travelers_arriving = station.travelers_arriving - (escalator.rate * station.escalators_exiting * station.train_wait)
|
Simplify the function to calculate the platform population between trains
|
Simplify the function to calculate the platform population between trains
The function to calculate the change in platform population in the time
between trains was needlessly complex. It has now been simplified.
ref #17
|
Python
|
mit
|
ForestPride/rail-problem
|
88bd31ebfcaafe7de386f8d00869eed6286066f7
|
cetacean/response.py
|
cetacean/response.py
|
#!/usr/bin/env python
# encoding: utf-8
import re
class Response(object):
"""Represents an HTTP response that is hopefully a HAL document."""
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._hal_regex = re.compile(r"application/hal\+json")
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return self._hal_regex.match(self._response.headers['content-type'])
|
#!/usr/bin/env python
# encoding: utf-8
import re
class Response(object):
"""Represents an HTTP response that is hopefully a HAL document."""
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._hal_regex = re.compile(r"application/hal\+json")
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
|
Make Response.is_hal() return a literal boolean.
|
Make Response.is_hal() return a literal boolean.
|
Python
|
mit
|
nanorepublica/cetacean-python,benhamill/cetacean-python
|
b3befb47d4b48e83b42fc6b10a10269d32cafb4e
|
src-backend/api/urls.py
|
src-backend/api/urls.py
|
from django.conf.urls import url, include
from views import ProcedureViewSet
from rest_framework import routers
router = routers.SimpleRouter()
router.register(r'procedures', ProcedureViewSet)
urlpatterns = [
url(r'^', include(router.urls))
]
|
from django.conf.urls import url, include
from views import ProcedureViewSet
from rest_framework import routers
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'procedures', ProcedureViewSet)
urlpatterns = [
url(r'^', include(router.urls))
]
|
Remove trailing slash from the router
|
Remove trailing slash from the router
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
c40fc13dca5a0596a72d5c26214777f8a2845675
|
tests/test_repr.py
|
tests/test_repr.py
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = p.__str__()
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = p.__str__()
# verify
assert isinstance(value, str)
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
|
Use str(p) and not p.__str__()
|
Use str(p) and not p.__str__()
|
Python
|
isc
|
bangi123/pexpect,Depado/pexpect,dongguangming/pexpect,Depado/pexpect,quatanium/pexpect,Wakeupbuddy/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,crdoconnor/pexpect,crdoconnor/pexpect,quatanium/pexpect,nodish/pexpect,quatanium/pexpect,crdoconnor/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,bangi123/pexpect,blink1073/pexpect,blink1073/pexpect,nodish/pexpect,blink1073/pexpect,bangi123/pexpect,nodish/pexpect,dongguangming/pexpect,bangi123/pexpect,dongguangming/pexpect
|
6110bc1137f5e3f1f12249c366323c6c0b48dbe3
|
IPython/nbconvert/utils/base.py
|
IPython/nbconvert/utils/base.py
|
"""Global configuration class."""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import List
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.traitlets import Unicode
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class NbConvertBase(LoggingConfigurable):
"""Global configurable class for shared config
Useful for display data priority that might be use by many transformers
"""
display_data_priority = List(['javascript', 'html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'],
config=True,
help= """
An ordered list of preferred output type, the first
encountered will usually be used when converting discarding
the others.
"""
)
default_language = Unicode('ipython', config=True, help='default highlight language')
def __init__(self, **kw):
super(NbConvertBase, self).__init__(**kw)
|
"""Global configuration class."""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import List
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.traitlets import Unicode
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class NbConvertBase(LoggingConfigurable):
"""Global configurable class for shared config
Useful for display data priority that might be use by many transformers
"""
display_data_priority = List(['html', 'javascript', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'],
config=True,
help= """
An ordered list of preferred output type, the first
encountered will usually be used when converting discarding
the others.
"""
)
default_language = Unicode('ipython', config=True, help='default highlight language')
def __init__(self, **kw):
super(NbConvertBase, self).__init__(**kw)
|
Revert "Moved JS in front of HTML"
|
Revert "Moved JS in front of HTML"
This reverts commit 8b0164edde418138d4e28c20d63fa422931ae6a8.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
9a6467688f567abc405a3fca6c4bfda7b6cd0351
|
FileWatcher.py
|
FileWatcher.py
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if event.src_path == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = filePath
self.callback = callback
self.eventHandler = MyEventHandler(filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
Handle filepaths in an OS independent manner.
|
Handle filepaths in an OS independent manner.
--CAR
|
Python
|
apache-2.0
|
BBN-Q/PyQLab,calebjordan/PyQLab,Plourde-Research-Lab/PyQLab,rmcgurrin/PyQLab
|
b50ef13cb25c795a1ad3b2bfdbbb47b709fcbd39
|
binding/python/__init__.py
|
binding/python/__init__.py
|
# This file is part of SpaceVecAlg.
#
# SpaceVecAlg is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpaceVecAlg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with SpaceVecAlg. If not, see <http://www.gnu.org/licenses/>.
from _rbdyn import *
|
# This file is part of RBDyn.
#
# RBDyn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RBDyn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with RBDyn. If not, see <http://www.gnu.org/licenses/>.
from _rbdyn import *
|
Fix bad copy/past in licence header.
|
Fix bad copy/past in licence header.
|
Python
|
bsd-2-clause
|
jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn
|
02ea7389a364fe1f7df48542d4727c32374da452
|
scripts/master/factory/dart/channels.py
|
scripts/master/factory/dart/channels.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.2', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.3', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
Update stable channel builders to pull from 1.3
|
Update stable channel builders to pull from 1.3
Review URL: https://codereview.chromium.org/225263024
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262391 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
c1e5822f07e2fe4ca47633ed3dfda7d7bee64b6c
|
nvchecker/source/aiohttp_httpclient.py
|
nvchecker/source/aiohttp_httpclient.py
|
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <[email protected]>, et al.
import atexit
import aiohttp
connector = aiohttp.TCPConnector(limit=20)
__all__ = ['session', 'HTTPError']
class HTTPError(Exception):
def __init__(self, code, message, response):
self.code = code
self.message = message
self.response = response
class BetterClientSession(aiohttp.ClientSession):
async def _request(self, *args, **kwargs):
if hasattr(self, "nv_config") and self.nv_config.get("proxy"):
kwargs.setdefault("proxy", self.nv_config.get("proxy"))
res = await super(BetterClientSession, self)._request(
*args, **kwargs)
if res.status >= 400:
raise HTTPError(res.status, res.reason, res)
return res
session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5)
atexit.register(session.close)
|
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <[email protected]>, et al.
import atexit
import asyncio
import aiohttp
connector = aiohttp.TCPConnector(limit=20)
__all__ = ['session', 'HTTPError']
class HTTPError(Exception):
def __init__(self, code, message, response):
self.code = code
self.message = message
self.response = response
class BetterClientSession(aiohttp.ClientSession):
async def _request(self, *args, **kwargs):
if hasattr(self, "nv_config") and self.nv_config.get("proxy"):
kwargs.setdefault("proxy", self.nv_config.get("proxy"))
res = await super(BetterClientSession, self)._request(
*args, **kwargs)
if res.status >= 400:
raise HTTPError(res.status, res.reason, res)
return res
session = BetterClientSession(connector=connector)
@atexit.register
def cleanup():
loop = asyncio.get_event_loop()
loop.run_until_complete(session.close())
|
Handle graceful exit and timeout
|
Handle graceful exit and timeout
Timeout was refactored and the defaults work correctly here.
|
Python
|
mit
|
lilydjwg/nvchecker
|
9f81cc80e1a82f4a26e400dd2ae5c290abe48382
|
Discord/tree.py
|
Discord/tree.py
|
from discord import app_commands
import logging
import sys
import traceback
import sentry_sdk
class CommandTree(app_commands.CommandTree):
async def on_error(self, interaction, command, error):
sentry_sdk.capture_exception(error)
print(
f"Ignoring exception in slash command {command.name}",
# TODO: Use full name
file = sys.stderr
)
traceback.print_exception(
type(error), error, error.__traceback__, file = sys.stderr
)
logging.getLogger("errors").error(
"Uncaught exception\n",
exc_info = (type(error), error, error.__traceback__)
)
|
from discord import app_commands
import logging
import sys
import traceback
import sentry_sdk
class CommandTree(app_commands.CommandTree):
async def on_error(self, interaction, error):
sentry_sdk.capture_exception(error)
print(
f"Ignoring exception in slash command {interaction.command.name}",
# TODO: Use full name
file = sys.stderr
)
traceback.print_exception(
type(error), error, error.__traceback__, file = sys.stderr
)
logging.getLogger("errors").error(
"Uncaught exception\n",
exc_info = (type(error), error, error.__traceback__)
)
|
Update CommandTree.on_error to only take two parameters
|
[Discord] Update CommandTree.on_error to only take two parameters
Remove command parameter, matching discord.py update, and use Interaction.command instead
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
9bab06465d2b2665efaac4edea2e3c538c600908
|
installer/hooks/hook-moduleManager.py
|
installer/hooks/hook-moduleManager.py
|
import os
import sys
if sys.platform.startswith('win'):
sys.path.insert(0, 'c:/work/code/dscas3/')
else:
sys.path.insert(0, '/home/cpbotha/work/code/dscas3/')
import modules
# * we need to give the module paths relative to the directory moduleManager
# is in (I think, since this is the hook for moduleManager)
# * the installer will treat these imports as if they were explicitly
# imported by the moduleManager, so THEIR dependecies will automatically
# be analysed.
ml2 = ["modules." + i for i in modules.module_list]
hiddenimports = ml2
print "[*] hook-moduleManager.py - HIDDENIMPORTS"
print hiddenimports
|
import os
import sys
if sys.platform.startswith('win'):
sys.path.insert(0, 'c:/work/code/dscas3/')
else:
sys.path.insert(0, '/home/cpbotha/work/code/dscas3/')
import modules
# * we need to give the module paths relative to the directory moduleManager
# is in (I think, since this is the hook for moduleManager)
# * the installer will treat these imports as if they were explicitly
# imported by the moduleManager, so THEIR dependecies will automatically
# be analysed.
ml2 = ["modules." + i for i in modules.moduleList]
hiddenimports = ml2
print "[*] hook-moduleManager.py - HIDDENIMPORTS"
print hiddenimports
|
Update installer to new scheme.
|
Update installer to new scheme.
|
Python
|
bsd-3-clause
|
chrisidefix/devide,zhangfangyan/devide,nagyistoce/devide,ivoflipse/devide,fvpolpeta/devide,chrisidefix/devide,fvpolpeta/devide,ivoflipse/devide,nagyistoce/devide,zhangfangyan/devide
|
3856b48af3e83f49a66c0c29b81e0a80ad3248d9
|
nubes/connectors/aws/connector.py
|
nubes/connectors/aws/connector.py
|
import boto3.session
from nubes.connectors import base
class AWSConnector(base.BaseConnector):
def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
self.connection = boto3.session.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name)
@classmethod
def name(cls):
return "aws"
def create_server(self, image_id, min_count, max_count, **kwargs):
ec2_resource = self.connection.resource("ec2")
server = ec2_resource.create_instances(ImageId=image_id,
MinCount=min_count,
MaxCount=max_count,
**kwargs)
return server
def list_servers(self):
ec2_client = self.connection.client("ec2")
desc = ec2_client.describe_instances()
return desc
def delete_server(self, instance_id):
ec2_resource = self.connection.resource("ec2")
ec2_resource.instances.filter(
InstanceIds=[instance_id]).stop()
ec2_resource.instances.filter(
InstanceIds=[instance_id]).terminate()
|
import boto3.session
from nubes.connectors import base
class AWSConnector(base.BaseConnector):
def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
self.connection = boto3.session.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name)
self.ec2_resource = self.connection.resource("ec2")
self.ec2_client = self.connection.client("ec2")
@classmethod
def name(cls):
return "aws"
def create_server(self, image_id, min_count, max_count, **kwargs):
server = self.ec2_resource.create_instances(ImageId=image_id,
MinCount=min_count,
MaxCount=max_count,
**kwargs)
return server
def list_servers(self):
desc = self.ec2_client.describe_instances()
return desc
def delete_server(self, instance_id):
self.ec2_resource.instances.filter(
InstanceIds=[instance_id]).stop()
self.ec2_resource.instances.filter(
InstanceIds=[instance_id]).terminate()
|
Move client and resource to __init__
|
Move client and resource to __init__
* moved the calls to create the ec2 session resource session client
to the init
|
Python
|
apache-2.0
|
omninubes/nubes
|
770bbf80a78d2f418e47ca2dc641c7dccbb86cac
|
rollbar/test/asgi_tests/helper.py
|
rollbar/test/asgi_tests/helper.py
|
import asyncio
import functools
from rollbar.contrib.asgi import ASGIApp
def async_test_func_wrapper(asyncfunc):
@functools.wraps(asyncfunc)
def wrapper(*args, **kwargs):
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(asyncfunc(*args, **kwargs))
finally:
loop.close()
else:
loop.run_until_complete(asyncfunc(*args, **kwargs))
return wrapper
@ASGIApp
class FailingTestASGIApp:
def __init__(self):
self.asgi_app = async_test_func_wrapper(self.asgi_app)
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
import asyncio
import functools
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def wrap_async(asyncfunc):
@functools.wraps(asyncfunc)
def wrapper(*args, **kwargs):
run(asyncfunc(*args, **kwargs))
return wrapper
@ASGIApp
class FailingTestASGIApp:
def __init__(self):
self.asgi_app = wrap_async(self.asgi_app)
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
Refactor async wrapper. Use asyncio.run() for Py3.7
|
Refactor async wrapper. Use asyncio.run() for Py3.7
|
Python
|
mit
|
rollbar/pyrollbar
|
c32e87894d4baf404d5b300459fc68a6d9d973c8
|
zun/db/__init__.py
|
zun/db/__init__.py
|
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
|
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
|
Remove the duplicated config sqlite_db
|
Remove the duplicated config sqlite_db
The config sqlite_db has been removed from oslo.db. See here:
https://review.openstack.org/#/c/449437/
Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
a0b7ba97cd996209ce2e9770ba0ec25111c85dd2
|
rapcom/__init__.py
|
rapcom/__init__.py
|
# -*- coding: utf-8 -*-
"""The primary module for the program.
Variables:
__version_info__: A tuple containing the individual parts of the version.
__version__: The version string.
"""
from __future__ import unicode_literals
__version_info__ = (0, 1, 0)
__version__ = '.'.join(map(str, __version_info__))
|
# -*- coding: utf-8 -*-
"""The primary module for the program.
Variables:
__version_info__: A tuple containing the individual parts of the version.
__version__: The version string.
"""
from __future__ import unicode_literals
__version_info__ = (0, 1, 1, 'dev')
__version__ = '.'.join(map(str, __version_info__))
|
Update the version to 0.1.1.dev
|
Update the version to 0.1.1.dev
|
Python
|
mit
|
contains-io/rcli
|
b8ac8edbd12c6b021815e4fa4fd68cfee7dc18cf
|
frigg/builds/api.py
|
frigg/builds/api.py
|
# -*- coding: utf8 -*-
import json
from django.http import HttpResponse, Http404
from django.http.response import JsonResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from frigg.decorators import token_required
from .models import Build, Project
@token_required
@csrf_exempt
def report_build(request):
try:
payload = json.loads(request.body)
build = Build.objects.get(pk=payload['id'])
build.handle_worker_report(payload)
response = JsonResponse({'message': 'Thanks for building it'})
except Build.DoesNotExist:
response = JsonResponse({'error': 'Build not found'})
response.status_code = 404
return response
@csrf_exempt
def build_badge(request, owner, project, branch='master'):
project = get_object_or_404(Project, owner=owner, name=project)
badge = project.get_badge(branch)
if badge is None:
raise Http404
return HttpResponse(content=badge, content_type='image/svg+xml')
|
# -*- coding: utf8 -*-
import json
from django.http import HttpResponse, Http404
from django.http.response import JsonResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from frigg.decorators import token_required
from .models import Build, Project
@token_required
@csrf_exempt
def report_build(request):
try:
payload = json.loads(request.body)
build = Build.objects.get(pk=payload['id'])
build.handle_worker_report(payload)
response = JsonResponse({'message': 'Thanks for building it'})
except Build.DoesNotExist:
response = JsonResponse({'error': 'Build not found'})
response.status_code = 404
return response
@never_cache
@csrf_exempt
def build_badge(request, owner, project, branch='master'):
project = get_object_or_404(Project, owner=owner, name=project)
badge = project.get_badge(branch)
if badge is None:
raise Http404
return HttpResponse(content=badge, content_type='image/svg+xml')
|
Add @never_cache decorator to the badge view
|
Add @never_cache decorator to the badge view
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
04b7e79ce3fed1afac129098badb632ca226fdee
|
dispatch.py
|
dispatch.py
|
#!/usr/bin/env python
"""
Copyright (c) 2008-2011, Anthony Garcia <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
#!/usr/bin/env python
"""
Copyright (c) 2008-2011, Anthony Garcia <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
# wsgi
application = render.application.wsgifunc()
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
Add wsgi handler by default
|
Add wsgi handler by default
|
Python
|
isc
|
Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2
|
67255ac86d2ef91ce355655112c919f2e08045b4
|
django_uwsgi/urls.py
|
django_uwsgi/urls.py
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^$', views.UwsgiStatus.as_view(), name='uwsgi_index'),
url(r'^reload/$', views.UwsgiReload.as_view(), name='uwsgi_reload'),
url(r'^clear_cache/$', views.UwsgiCacheClear.as_view(), name='uwsgi_cache_clear'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.UwsgiStatus.as_view(), name='uwsgi_index'),
url(r'^reload/$', views.UwsgiReload.as_view(), name='uwsgi_reload'),
url(r'^clear_cache/$', views.UwsgiCacheClear.as_view(), name='uwsgi_cache_clear'),
]
|
Remove usage of patterns in import line
|
Remove usage of patterns in import line
|
Python
|
mit
|
unbit/django-uwsgi,unbit/django-uwsgi
|
e2ac721cb3e745a149e039aec0f71d33d1e28efc
|
api/base/renderers.py
|
api/base/renderers.py
|
import re
from rest_framework.renderers import JSONRenderer, BrowsableAPIRenderer
class JSONAPIRenderer(JSONRenderer):
format = "jsonapi"
media_type = 'application/vnd.api+json'
def render(self, data, accepted_media_type=None, renderer_context=None):
stuff = super(JSONAPIRenderer, self).render(data, accepted_media_type, renderer_context)
new_stuff = re.sub(r'"<esi:include src=\\"(.*?)\\"\/>"', r'<esi:include src="\1"/>', stuff)
return new_stuff
class BrowsableAPIRendererNoForms(BrowsableAPIRenderer):
"""
Renders browsable API but omits HTML forms
"""
def get_context(self, *args, **kwargs):
context = super(BrowsableAPIRendererNoForms, self).get_context(*args, **kwargs)
unwanted_forms = ('put_form', 'post_form', 'delete_form', 'raw_data_put_form',
'raw_data_post_form', 'raw_data_patch_form', 'raw_data_put_or_patch_form')
for form in unwanted_forms:
del context[form]
return context
|
import re
from rest_framework.renderers import JSONRenderer, BrowsableAPIRenderer
class JSONAPIRenderer(JSONRenderer):
format = "jsonapi"
media_type = 'application/vnd.api+json'
def render(self, data, accepted_media_type=None, renderer_context=None):
# TODO: There should be a way to do this that is conditional on esi being requested and
# TODO: In such a way that it doesn't use regex unless there's absolutely no other way.
initial_rendering = super(JSONAPIRenderer, self).render(data, accepted_media_type, renderer_context)
augmented_rendering = re.sub(r'"<esi:include src=\\"(.*?)\\"\/>"', r'<esi:include src="\1"/>', initial_rendering)
return augmented_rendering
class BrowsableAPIRendererNoForms(BrowsableAPIRenderer):
"""
Renders browsable API but omits HTML forms
"""
def get_context(self, *args, **kwargs):
context = super(BrowsableAPIRendererNoForms, self).get_context(*args, **kwargs)
unwanted_forms = ('put_form', 'post_form', 'delete_form', 'raw_data_put_form',
'raw_data_post_form', 'raw_data_patch_form', 'raw_data_put_or_patch_form')
for form in unwanted_forms:
del context[form]
return context
|
Add TODO and clean up render function
|
Add TODO and clean up render function
[#OSF-5081]
|
Python
|
apache-2.0
|
adlius/osf.io,brandonPurvis/osf.io,mattclark/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,emetsger/osf.io,kch8qx/osf.io,samchrisinger/osf.io,samchrisinger/osf.io,kch8qx/osf.io,rdhyee/osf.io,binoculars/osf.io,aaxelb/osf.io,samchrisinger/osf.io,hmoco/osf.io,binoculars/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,rdhyee/osf.io,chrisseto/osf.io,doublebits/osf.io,crcresearch/osf.io,billyhunt/osf.io,cslzchen/osf.io,mfraezz/osf.io,abought/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,amyshi188/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,sloria/osf.io,GageGaskins/osf.io,caneruguz/osf.io,chrisseto/osf.io,saradbowman/osf.io,abought/osf.io,doublebits/osf.io,laurenrevere/osf.io,mattclark/osf.io,wearpants/osf.io,adlius/osf.io,emetsger/osf.io,chennan47/osf.io,leb2dg/osf.io,KAsante95/osf.io,felliott/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,zachjanicki/osf.io,alexschiller/osf.io,zachjanicki/osf.io,KAsante95/osf.io,cwisecarver/osf.io,doublebits/osf.io,mfraezz/osf.io,acshi/osf.io,binoculars/osf.io,alexschiller/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,wearpants/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,mluo613/osf.io,acshi/osf.io,doublebits/osf.io,felliott/osf.io,Nesiehr/osf.io,mluo613/osf.io,caneruguz/osf.io,chrisseto/osf.io,emetsger/osf.io,baylee-d/osf.io,chrisseto/osf.io,abought/osf.io,zachjanicki/osf.io,jnayak1/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,mluo613/osf.io,erinspace/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,RomanZWang/osf.io,billyhunt/osf.io,billyhunt/osf.io,chennan47/osf.io,RomanZWang/osf.io,mfraezz/osf.io,abought/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,emetsger/osf.io,icereval/osf.io,Nesiehr/osf.io,chennan47/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,wearpants/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,mattclark/osf.io,kwierman/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,felliott/osf.io,adlius/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,adlius/osf.io,sloria/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,mluke93/osf.io,caseyrollins/osf.io,mluke93/osf.io,erinspace/osf.io,saradbowman/osf.io,leb2dg/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,KAsante95/osf.io,wearpants/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,pattisdr/osf.io,rdhyee/osf.io,zachjanicki/osf.io,caneruguz/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,alexschiller/osf.io,alexschiller/osf.io,cslzchen/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,kch8qx/osf.io,acshi/osf.io,brandonPurvis/osf.io,pattisdr/osf.io,GageGaskins/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,mluke93/osf.io,baylee-d/osf.io,mluke93/osf.io,icereval/osf.io,jnayak1/osf.io,zamattiac/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,felliott/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,cslzchen/osf.io,baylee-d/osf.io,pattisdr/osf.io,aaxelb/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,hmoco/osf.io,hmoco/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,erinspace/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,kwierman/osf.io,jnayak1/osf.io,mluo613/osf.io,acshi/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,leb2dg/osf.io,billyhunt/osf.io,caneruguz/osf.io,mluo613/osf.io,kwierman/osf.io,aaxelb/osf.io,Nesiehr/osf.io
|
b2cac05be3f6c510edfaf1ae478fabdcf06fd19a
|
mgsv_names.py
|
mgsv_names.py
|
import random
global adjectives, animals, rares
with open('adjectives.txt') as f:
adjectives = f.readlines()
with open('animals.txt') as f:
animals = f.readlines()
with open('rares.txt') as f:
rares = f.readlines()
uncommons = {
# Adjectives:
'master': 'miller',
'raging': 'bull',
'hidden': 'dragon',
'humming': 'bird',
'spicy': 'sandworm',
# Animals:
'ocelot': 'revolver',
'lion': 'snooping',
'tiger': 'crouching',
'hippo': 'hungry',
'falcon': 'punching',
}
def get_name():
adj = random.choice(adjectives).strip()
anim = random.choice(animals).strip()
r = random.random()
if r < 0.001 or r >= 0.999:
return random.choice(rares).strip()
elif r < 0.3 and adj in uncommons:
return ' '.join((adj, uncommons[adj]))
elif r >= 0.7 and anim in uncommons:
return ' '.join((uncommons[anim], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(get_name())
|
import random, os
global adjectives, animals, rares
with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f:
adjectives = f.readlines()
with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f:
animals = f.readlines()
with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f:
rares = f.readlines()
uncommons = {
# Adjectives:
'master': 'miller',
'raging': 'bull',
'hidden': 'dragon',
'humming': 'bird',
'spicy': 'sandworm',
# Animals:
'ocelot': 'revolver',
'lion': 'snooping',
'tiger': 'crouching',
'hippo': 'hungry',
'falcon': 'punching',
}
def generate_name():
adj = random.choice(adjectives).strip()
anim = random.choice(animals).strip()
r = random.random()
if r < 0.001 or r >= 0.999:
return random.choice(rares).strip()
elif r < 0.3 and adj in uncommons:
return ' '.join((adj, uncommons[adj]))
elif r >= 0.7 and anim in uncommons:
return ' '.join((uncommons[anim], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(generate_name())
|
Load text files from the same dir as the script.
|
Load text files from the same dir as the script.
Also renamed our name generator.
|
Python
|
unlicense
|
rotated8/mgsv_names
|
40edd4a635dd8f83a21f15f22883e7dae8d8d0a8
|
test/test_modes/test_backspace.py
|
test/test_modes/test_backspace.py
|
from pyqode.qt import QtCore
from pyqode.qt.QtTest import QTest
from pyqode.core.api import TextHelper
from pyqode.core import modes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(modes.SmartBackSpaceMode)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_key_pressed(editor):
QTest.qWait(1000)
TextHelper(editor).goto_line(20, 4)
assert editor.textCursor().positionInBlock() == 4
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 0
TextHelper(editor).goto_line(19, 5)
assert editor.textCursor().positionInBlock() == 5
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 4
TextHelper(editor).goto_line(20, 0)
assert editor.textCursor().positionInBlock() == 0
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 0
|
from pyqode.qt import QtCore
from pyqode.qt.QtTest import QTest
from pyqode.core.api import TextHelper
from pyqode.core import modes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(modes.SmartBackSpaceMode)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_key_pressed(editor):
QTest.qWait(1000)
TextHelper(editor).goto_line(21, 4)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 4
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 0
TextHelper(editor).goto_line(19, 5)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 5
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 4
TextHelper(editor).goto_line(20, 0)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 0
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 28
|
Fix test backspace (this test has to be changed since the parent implementation is now called when there is no space to eat)
|
Fix test backspace (this test has to be changed since the parent implementation is now called when there is no space to eat)
|
Python
|
mit
|
pyQode/pyqode.core,zwadar/pyqode.core,pyQode/pyqode.core
|
e68836173dec1e1fe80e07cca8eb67ebe19e424e
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
|
Use a less pathetic method to retrieve the PyCEGUI dirname
|
MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
|
Python
|
mit
|
ruleless/CEGUI,OpenTechEngine/CEGUI,ruleless/CEGUI,ruleless/CEGUI,ruleless/CEGUI,OpenTechEngine/CEGUI,OpenTechEngine/CEGUI,OpenTechEngine/CEGUI
|
cd30723af9f82b7a91d1ad1e2a5b86f88d8f4b17
|
harvester/post_processing/dedup_sourceresource.py
|
harvester/post_processing/dedup_sourceresource.py
|
# pass in a Couchdb doc, get back one with de-duplicated sourceResource values
def dedup_sourceresource(doc):
''' Look for duplicate values in the doc['sourceResource'] and
remove.
Values must be *exactly* the same
'''
for key, value in doc['sourceResource'].items():
if not isinstance(value, basestring):
new_list = []
for item in value:
if item not in new_list:
new_list.append(item)
doc['sourceResource'][key] = new_list
return doc
|
# pass in a Couchdb doc, get back one with de-duplicated sourceResource values
def dedup_sourceresource(doc):
''' Look for duplicate values in the doc['sourceResource'] and
remove.
Values must be *exactly* the same
'''
for key, value in doc['sourceResource'].items():
if isinstance(value, list):
# can't use set() because of dict values (non-hashable)
new_list = []
for item in value:
if item not in new_list:
new_list.append(item)
doc['sourceResource'][key] = new_list
return doc
|
Make sure dedup item is a list.
|
Make sure dedup item is a list.
|
Python
|
bsd-3-clause
|
barbarahui/harvester,ucldc/harvester,ucldc/harvester,mredar/harvester,mredar/harvester,barbarahui/harvester
|
b98bd25a8b25ca055ca92393f24b6a04382457a8
|
forms.py
|
forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
|
Add universal interface for validation error message
|
Add universal interface for validation error message
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
1af120a5ce7f2fc35aeb7e77a747b0e8382bba51
|
api_tests/utils.py
|
api_tests/utils.py
|
from blinker import ANY
from urlparse import urlparse
from contextlib import contextmanager
from addons.osfstorage import settings as osfstorage_settings
def create_test_file(node, user, filename='test_file', create_guid=True):
osfstorage = node.get_addon('osfstorage')
root_node = osfstorage.get_root()
test_file = root_node.append_file(filename)
if create_guid:
test_file.get_guid(create=True)
test_file.create_version(user, {
'object': '06d80e',
'service': 'cloud',
osfstorage_settings.WATERBUTLER_RESOURCE: 'osf',
}, {
'size': 1337,
'contentType': 'img/png'
}).save()
return test_file
def urlparse_drop_netloc(url):
url = urlparse(url)
if url[4]:
return url[2] + '?' + url[4]
return url[2]
@contextmanager
def disconnected_from_listeners(signal):
"""Temporarily disconnect all listeners for a Blinker signal."""
listeners = list(signal.receivers_for(ANY))
for listener in listeners:
signal.disconnect(listener)
yield
for listener in listeners:
signal.connect(listener)
|
from blinker import ANY
from urlparse import urlparse
from contextlib import contextmanager
from addons.osfstorage import settings as osfstorage_settings
def create_test_file(target, user, filename='test_file', create_guid=True):
osfstorage = target.get_addon('osfstorage')
root_node = osfstorage.get_root()
test_file = root_node.append_file(filename)
if create_guid:
test_file.get_guid(create=True)
test_file.create_version(user, {
'object': '06d80e',
'service': 'cloud',
osfstorage_settings.WATERBUTLER_RESOURCE: 'osf',
}, {
'size': 1337,
'contentType': 'img/png'
}).save()
return test_file
def urlparse_drop_netloc(url):
url = urlparse(url)
if url[4]:
return url[2] + '?' + url[4]
return url[2]
@contextmanager
def disconnected_from_listeners(signal):
"""Temporarily disconnect all listeners for a Blinker signal."""
listeners = list(signal.receivers_for(ANY))
for listener in listeners:
signal.disconnect(listener)
yield
for listener in listeners:
signal.connect(listener)
|
Update api test util to create files to use target name instead
|
Update api test util to create files to use target name instead
|
Python
|
apache-2.0
|
mattclark/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,adlius/osf.io,pattisdr/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mattclark/osf.io,mfraezz/osf.io,mfraezz/osf.io,cslzchen/osf.io,felliott/osf.io,adlius/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,saradbowman/osf.io,felliott/osf.io,Johnetordoff/osf.io,erinspace/osf.io,caseyrollins/osf.io,aaxelb/osf.io,erinspace/osf.io,brianjgeiger/osf.io,adlius/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,felliott/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,baylee-d/osf.io,mfraezz/osf.io,felliott/osf.io,caseyrollins/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,adlius/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,Johnetordoff/osf.io
|
21dbb8af7412c04b768a9d68e1f8566786d5100c
|
mdot_rest/serializers.py
|
mdot_rest/serializers.py
|
from .models import Resource, ResourceLink, IntendedAudience
from rest_framework import serializers
class ResourceLinkSerializer(serializers.ModelSerializer):
class Meta:
model = ResourceLink
fields = ('link_type', 'url',)
class IntendedAudienceSerializer(serializers.ModelSerializer):
class Meta:
model = IntendedAudience
fields = ('audience',)
class ResourceSerializer(serializers.ModelSerializer):
resource_links = ResourceLinkSerializer(many=True, read_only=True)
intended_audiences = IntendedAudienceSerializer(many=True, read_only=True)
class Meta:
model = Resource
fields = (
'id',
'title',
'feature_desc',
'featured',
'accessible',
'responsive_web',
'resource_links',
'intended_audiences',
'campus_bothell',
'campus_tacoma',
'campus_seattle',
'created_date',
'last_modified',
)
|
from .models import Resource, ResourceLink, IntendedAudience
from rest_framework import serializers
class ResourceLinkSerializer(serializers.ModelSerializer):
class Meta:
model = ResourceLink
fields = ('link_type', 'url',)
class IntendedAudienceSerializer(serializers.ModelSerializer):
class Meta:
model = IntendedAudience
fields = ('audience',)
class ResourceSerializer(serializers.ModelSerializer):
resource_links = ResourceLinkSerializer(many=True, read_only=True)
intended_audiences = IntendedAudienceSerializer(many=True, read_only=True)
class Meta:
model = Resource
fields = (
'id',
'title',
'feature_desc',
'image',
'featured',
'accessible',
'responsive_web',
'resource_links',
'intended_audiences',
'campus_bothell',
'campus_tacoma',
'campus_seattle',
'created_date',
'last_modified',
)
|
Add image field to the resource serialization.
|
Add image field to the resource serialization.
|
Python
|
apache-2.0
|
uw-it-aca/mdot-rest,uw-it-aca/mdot-rest
|
f7c9bbd5ac49254d564a56ba3713b55abcfa4079
|
byceps/blueprints/news/views.py
|
byceps/blueprints/news/views.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.news.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
ITEMS_PER_PAGE = 4
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items = news_service.get_items_paginated(g.party.brand.id, page,
ITEMS_PER_PAGE)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.news.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, current_app, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items_per_page = _get_items_per_page_value()
items = news_service.get_items_paginated(g.party.brand.id, page,
items_per_page)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
def _get_items_per_page_value(default=4):
return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default))
|
Allow configuration of the number of news items per page
|
Allow configuration of the number of news items per page
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
5352e164b38099cbc7fe4eba87c00bc1c1d30d44
|
bluezero/eddystone.py
|
bluezero/eddystone.py
|
"""
Level 1 file for creating Eddystone beacons
"""
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url):
service_data = tools.url_to_advert(url, 0x10, 0x00)
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
|
"""
Level 1 file for creating Eddystone beacons
"""
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url, tx_power=0x08):
"""
The Eddystone-URL frame broadcasts a URL using a compressed encoding
format in order to fit more within the limited advertisement packet.
Example:
>>> from bluezero import eddystone
>>> eddystone.EddystoneURL('https://github.com/ukBaz')
:param url: String containing URL e.g. ('http://camjam.me')
:param tx_power:
"""
service_data = tools.url_to_advert(url, 0x10, tx_power)
if len(service_data) > 17:
raise Exception('URL too long')
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
|
Test for URL length error
|
Test for URL length error
|
Python
|
mit
|
ukBaz/python-bluezero,ukBaz/python-bluezero
|
8d229401ea69799638d8cd005bc4dc87bb4327a4
|
src/mist/io/tests/MyRequestsClass.py
|
src/mist/io/tests/MyRequestsClass.py
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None):
self.headers = {'Cookie': cookie}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
|
Add csrf token in MyRequests class
|
Add csrf token in MyRequests class
|
Python
|
agpl-3.0
|
kelonye/mist.io,munkiat/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,kelonye/mist.io,zBMNForks/mist.io,Lao-liu/mist.io,johnnyWalnut/mist.io,munkiat/mist.io,DimensionDataCBUSydney/mist.io,Lao-liu/mist.io,zBMNForks/mist.io,zBMNForks/mist.io,johnnyWalnut/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,munkiat/mist.io,kelonye/mist.io
|
6dceb819f86fd469a4d817dec0156646a5f574cf
|
matchzoo/data_generator/callbacks/lambda_callback.py
|
matchzoo/data_generator/callbacks/lambda_callback.py
|
from matchzoo.data_generator.callbacks.callback import Callback
class LambdaCallback(Callback):
"""
LambdaCallback. Just a shorthand for creating a callback class.
See :class:`matchzoo.data_generator.callbacks.Callback` for more details.
Example:
>>> from matchzoo.data_generator.callbacks import LambdaCallback
>>> callback = LambdaCallback(on_batch_unpacked=print)
>>> callback.on_batch_unpacked('x', 'y')
x y
"""
def __init__(self, on_batch_data_pack=None, on_batch_unpacked=None):
"""Init."""
self._on_batch_unpacked = on_batch_unpacked
self._on_batch_data_pack = on_batch_data_pack
def on_batch_data_pack(self, data_pack):
"""`on_batch_data_pack`."""
if self._on_batch_data_pack:
self._on_batch_data_pack(data_pack)
def on_batch_unpacked(self, x, y):
"""`on_batch_unpacked`."""
if self._on_batch_unpacked:
self._on_batch_unpacked(x, y)
|
from matchzoo.data_generator.callbacks.callback import Callback
class LambdaCallback(Callback):
"""
LambdaCallback. Just a shorthand for creating a callback class.
See :class:`matchzoo.data_generator.callbacks.Callback` for more details.
Example:
>>> import matchzoo as mz
>>> from matchzoo.data_generator.callbacks import LambdaCallback
>>> data = mz.datasets.toy.load_data()
>>> batch_func = lambda x: print(type(x))
>>> unpack_func = lambda x, y: print(type(x), type(y))
>>> callback = LambdaCallback(on_batch_data_pack=batch_func,
... on_batch_unpacked=unpack_func)
>>> data_gen = mz.DataGenerator(
... data, batch_size=len(data), callbacks=[callback])
>>> _ = data_gen[0]
<class 'matchzoo.data_pack.data_pack.DataPack'>
<class 'dict'> <class 'numpy.ndarray'>
"""
def __init__(self, on_batch_data_pack=None, on_batch_unpacked=None):
"""Init."""
self._on_batch_unpacked = on_batch_unpacked
self._on_batch_data_pack = on_batch_data_pack
def on_batch_data_pack(self, data_pack):
"""`on_batch_data_pack`."""
if self._on_batch_data_pack:
self._on_batch_data_pack(data_pack)
def on_batch_unpacked(self, x, y):
"""`on_batch_unpacked`."""
if self._on_batch_unpacked:
self._on_batch_unpacked(x, y)
|
Update data generator lambda callback docs.
|
Update data generator lambda callback docs.
|
Python
|
apache-2.0
|
faneshion/MatchZoo,faneshion/MatchZoo
|
73e4f2c333e7b4f02dbb0ec344a3a671ba97cac3
|
library-examples/read-replace-export-excel.py
|
library-examples/read-replace-export-excel.py
|
"""
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
import shutil
from openpyxl import load_workbook, Workbook
shutil.copyfile('sample-input-fortest.xlsx','sample-input-fortest-out.xlsx')
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
print "can iterate sheets, rows and columns intuitively"
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
seq_no+=1
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
|
"""
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
import shutil
from openpyxl import load_workbook, Workbook
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
#go trhough the excel file, extract strings & replace with number.
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
cell.value=str(seq_no)
seq_no+=1
#save the file containing numbers that replaced the string.
wb2.save('sample-input-fortest-out.xlsx')
#save the extracted strings
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
|
Change so original input does not change.
|
Change so original input does not change.
|
Python
|
apache-2.0
|
iku000888/Excel_Translation_Helper
|
dfaffd1e2c189a9d85c493db76b9751b4c802bce
|
python/scannerpy/stdlib/tensorflow.py
|
python/scannerpy/stdlib/tensorflow.py
|
from ..kernel import Kernel
from scannerpy import DeviceType
class TensorFlowKernel(Kernel):
def __init__(self, config):
import tensorflow as tf
# If this is a CPU kernel, tell TF that it should not use
# any GPUs for its graph operations
cpu_only = True
visible_device_list = []
tf_config = tf.ConfigProto()
for handle in config.devices:
if handle.type == DeviceType.GPU.value:
visible_device_list.append(str(handle.id))
cpu_only = False
if cpu_only:
tf_config.device_count['GPU'] = 0
else:
tf_config.gpu_options.visible_device_list = ','.join(visible_device_list)
# TODO: wrap this in "with device"
self.config = config
self.tf_config = tf_config
self.graph = self.build_graph()
self.sess = tf.Session(config=self.tf_config, graph=self.graph)
self.sess.as_default()
self.protobufs = config.protobufs
def close(self):
self.sess.close()
def build_graph(self):
raise NotImplementedError
def execute(self):
raise NotImplementedError
|
from ..kernel import Kernel
from scannerpy import DeviceType
class TensorFlowKernel(Kernel):
def __init__(self, config):
import tensorflow as tf
# If this is a CPU kernel, tell TF that it should not use
# any GPUs for its graph operations
cpu_only = True
visible_device_list = []
tf_config = tf.ConfigProto()
for handle in config.devices:
if handle.type == DeviceType.GPU.value:
visible_device_list.append(str(handle.id))
tf_config.gpu_options.allow_growth = True
cpu_only = False
if cpu_only:
tf_config.device_count['GPU'] = 0
else:
tf_config.gpu_options.visible_device_list = ','.join(visible_device_list)
# TODO: wrap this in "with device"
self.config = config
self.tf_config = tf_config
self.graph = self.build_graph()
self.sess = tf.Session(config=self.tf_config, graph=self.graph)
self.sess.as_default()
self.protobufs = config.protobufs
def close(self):
self.sess.close()
def build_graph(self):
raise NotImplementedError
def execute(self):
raise NotImplementedError
|
Fix TF using all the GPU memory
|
Fix TF using all the GPU memory
|
Python
|
apache-2.0
|
scanner-research/scanner,scanner-research/scanner,scanner-research/scanner,scanner-research/scanner
|
9172166ee492e73865c69f76d76690034ef5a402
|
adcode/context_processors.py
|
adcode/context_processors.py
|
"Context processors for adding current sections and placements in the context."
import re
from .conf import SECTION_CONTEXT_KEY, PLACEMENTS_CONTEXT_KEY
from .models import Section, Placement
def current_placements(request):
"Match current section to request path and get related placements."
# TODO: Add caching
current = None
placements = Placement.objects.none()
sections = Section.objects.all()
for section in sections:
pattern = re.compile(section.pattern)
if pattern.search(request.path):
current = section
break
if current:
placements = Placement.objects.filter(sections=current).select_related('size')
return {SECTION_CONTEXT_KEY: current, PLACEMENTS_CONTEXT_KEY: placements}
|
"Context processors for adding current sections and placements in the context."
import re
from .conf import SECTION_CONTEXT_KEY, PLACEMENTS_CONTEXT_KEY
from .models import Section, Placement
def current_placements(request):
"Match current section to request path and get related placements."
# TODO: Add caching
current = None
placements = Placement.objects.none()
sections = Section.objects.order_by('-priority')
for section in sections:
pattern = re.compile(section.pattern)
if pattern.search(request.path):
current = section
break
if current:
placements = Placement.objects.filter(sections=current).select_related('size')
return {SECTION_CONTEXT_KEY: current, PLACEMENTS_CONTEXT_KEY: placements}
|
Order by priority in context_processor.
|
Order by priority in context_processor.
|
Python
|
bsd-2-clause
|
mlavin/django-ad-code,mlavin/django-ad-code
|
4426fe1b6c77bb3c38a4324470a3d46461ef4661
|
orchestrator/__init__.py
|
orchestrator/__init__.py
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.8'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.9'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
Prepare for next dev version
|
Prepare for next dev version
|
Python
|
mit
|
totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator
|
f26e6fb7e7fbf9eedd181bea65d091f24262a14f
|
backend/util.py
|
backend/util.py
|
"""General utilities."""
import urlparse
import logging
def ConstantTimeIsEqual(a, b):
"""Securely compare two strings without leaking timing information."""
if len(a) != len(b):
return False
acc = 0
for x, y in zip(a, b):
acc |= ord(x) ^ ord(y)
return acc == 0
# TODO(hjfreyer): Pull into some kind of middleware?
def EnableCors(handler):
"""Inside a request, set the headers to allow being called cross-domain."""
if 'Origin' in handler.request.headers:
origin = handler.request.headers['Origin']
_, netloc, _, _, _, _ = urlparse.urlparse(origin)
if not (handler.app.config['env'].app_name == 'local' or
netloc == 'mayone.us' or netloc.endswith('.mayone.us') or
netloc == 'mayday.us' or netloc.endswith('.mayday.us')):
logging.warning('Invalid origin: ' + origin)
handler.error(403)
return
handler.response.headers.add_header('Access-Control-Allow-Origin', origin)
handler.response.headers.add_header('Access-Control-Allow-Methods',
'GET, POST')
handler.response.headers.add_header('Access-Control-Allow-Headers',
'content-type, origin')
|
"""General utilities."""
import urlparse
import logging
def ConstantTimeIsEqual(a, b):
"""Securely compare two strings without leaking timing information."""
if len(a) != len(b):
return False
acc = 0
for x, y in zip(a, b):
acc |= ord(x) ^ ord(y)
return acc == 0
# TODO(hjfreyer): Pull into some kind of middleware?
def EnableCors(handler):
"""Inside a request, set the headers to allow being called cross-domain."""
if 'Origin' in handler.request.headers:
origin = handler.request.headers['Origin']
_, netloc, _, _, _, _ = urlparse.urlparse(origin)
if not (handler.app.config['env'].app_name == 'local' or
netloc == 'mayone.us' or netloc.endswith('.mayone.us') or
netloc == 'mayday.us' or netloc.endswith('.mayday.us') or
netloc == 'test-dot-mayday-pac-teams.appspot.com'):
logging.warning('Invalid origin: ' + origin)
handler.error(403)
return
handler.response.headers.add_header('Access-Control-Allow-Origin', origin)
handler.response.headers.add_header('Access-Control-Allow-Methods',
'GET, POST')
handler.response.headers.add_header('Access-Control-Allow-Headers',
'content-type, origin')
|
Allow test version in via CORS
|
Allow test version in via CORS
|
Python
|
apache-2.0
|
Rio517/pledgeservice,MayOneUS/pledgeservice,Rio517/pledgeservice,Rio517/pledgeservice,MayOneUS/pledgeservice
|
23d4081392f84f2d5359f44ed4dde41611bb4cd2
|
tests/race_deleting_keys_test.py
|
tests/race_deleting_keys_test.py
|
import nose.plugins.attrib
import time as _time
import subprocess
import sys
import redisdl
import unittest
import json
import os.path
from . import util
from . import big_data
@nose.plugins.attrib.attr('slow')
class RaceDeletingKeysTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_delete_race(self):
bd = big_data.BigData(self.r)
count = bd.determine_key_count()
# data is already inserted
big_data_path = os.path.join(os.path.dirname(__file__), 'big_data.py')
p = subprocess.Popen(
[sys.executable, big_data_path, 'delete', str(count)],
stdout=subprocess.PIPE,
)
_time.sleep(1)
start = _time.time()
dump = redisdl.dumps()
finish = _time.time()
out, err = p.communicate()
delete_start, delete_finish = [int(time) for time in out.split(' ')]
assert delete_start < start
assert delete_finish > finish
|
import nose.plugins.attrib
import time as _time
import subprocess
import sys
import redisdl
import unittest
import json
import os.path
from . import util
from . import big_data
@nose.plugins.attrib.attr('slow')
class RaceDeletingKeysTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_delete_race(self):
bd = big_data.BigData(self.r)
count = bd.determine_key_count()
# data is already inserted
big_data_path = os.path.join(os.path.dirname(__file__), 'big_data.py')
p = subprocess.Popen(
[sys.executable, big_data_path, 'delete', str(count)],
stdout=subprocess.PIPE,
)
_time.sleep(1)
start = _time.time()
dump = redisdl.dumps()
finish = _time.time()
out, err = p.communicate()
delete_start, delete_finish = [int(time) for time in out.split(' ')]
assert delete_start < start
assert finish > start + 5
assert delete_finish > start + 5
|
Replace finish order requirement with a duration requirement
|
Replace finish order requirement with a duration requirement
|
Python
|
bsd-2-clause
|
p/redis-dump-load,hyunchel/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load
|
93903d065cd1ff8f3f0c715668f05c804c5561f9
|
profile/linearsvc.py
|
profile/linearsvc.py
|
import cProfile
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
from sklearn.metrics import accuracy_score
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {})
svc.fit(X, y)
results = svc.predict(X)
accuracy = accuracy_score(y, results)
print("Accuracy: {}".format(accuracy))
|
import timeit
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
from sklearn.metrics import accuracy_score
setup = """
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
"""
time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1)
print("Time: {}".format(time))
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
svc.fit(X, y)
results = svc.predict(X)
accuracy = accuracy_score(y, results)
print("Accuracy: {}".format(accuracy))
|
Use timeit instead of cProfile
|
Use timeit instead of cProfile
|
Python
|
mit
|
JuliaPackageMirrors/SoftConfidenceWeighted.jl,IshitaTakeshi/SoftConfidenceWeighted.jl
|
7ee8bbc5cb1527c55a04aff1421f043fdfa292cf
|
sample-code/examples/python/android_web_view.py
|
sample-code/examples/python/android_web_view.py
|
import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(
os.path.dirname(__file__), '../../apps/WebViewDemo/target')
+ '/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName': "native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub',
desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(
os.path.dirname(__file__), '../../apps/WebViewDemo/target')
+ '/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName': "native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub',
desired_caps)
def test(self):
button = self.driver.find_element_by_id('buttonStartWebview')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
Update android web view example.
|
Update android web view example.
|
Python
|
apache-2.0
|
appium/appium,Sw0rdstream/appium,appium/appium,appium/appium,appium/appium,appium/appium,appium/appium
|
3ecfdf41da3eb3b881c112254b913ff907424bd7
|
Scripts/2-Upload.py
|
Scripts/2-Upload.py
|
import os
import json
# Get Steam settings
steamData = open("steam.json")
steamConfig = json.load(steamData)
steamSDKDir = steamConfig["sdkDir"]
steamBuilder = steamConfig["builder"]
steamCommand = steamConfig["command"]
steamAppFile = steamConfig["appFile"]
steamUser = steamConfig["user"]
steamPassword = steamConfig["password"]
steamData.close()
# Generate paths
buildAppFile = os.path.join("..", steamAppFile)
buildRootDir = os.path.join(steamSDKDir, "tools", "ContentBuilder")
# Generate full command line
commandLine = os.path.join(steamBuilder, steamCommand)
commandLine += " +login " + steamUser + " " + steamPassword
commandLine += " +run_app_build " + buildAppFile
commandLine += " +quit"
# Call
currentPath = os.getcwd()
os.chdir(buildRootDir)
os.system(commandLine)
os.chdir(currentPath)
|
#!/usr/bin/env python
import os
import json
# Get Steam settings
steamData = open("steam.json")
steamConfig = json.load(steamData)
steamSDKDir = steamConfig["sdkDir"]
steamBuilder = steamConfig["builder"]
steamCommand = steamConfig["command"]
steamAppFile = steamConfig["appFile"]
steamUser = steamConfig["user"]
steamPassword = steamConfig["password"]
steamData.close()
# Generate paths
buildAppFile = os.path.join("..", steamAppFile)
buildRootDir = os.path.join(steamSDKDir, "tools", "ContentBuilder")
# Generate full command line
commandLine = os.path.join(steamBuilder, steamCommand)
commandLine += " +login " + steamUser + " " + steamPassword
commandLine += " +run_app_build " + buildAppFile
commandLine += " +quit"
# Call
currentPath = os.getcwd()
os.chdir(buildRootDir)
os.system(commandLine)
os.chdir(currentPath)
|
Make steam upload script works for Linux
|
Make steam upload script works for Linux
|
Python
|
bsd-3-clause
|
arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain
|
92b7a5463e505f84862dd96e07c9caa5a97107a9
|
client/test/server_tests.py
|
client/test/server_tests.py
|
from nose.tools import *
from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class MyTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def test_get(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def test_get_command_not_found(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get)
|
from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get)
|
Change function names to camelCase
|
Change function names to camelCase
|
Python
|
mit
|
CaminsTECH/owncloud-test
|
52584725e462ab304bc2e976fa691f0d830e7efb
|
Speech/processor.py
|
Speech/processor.py
|
# Retrieve file from Facebook
import urllib, convert, re, os
# from speech_py import speech_to_text_offline as STT_o
# from speech_py import speech_to_text_google as STT
from speech_py import speech_to_text_ibm_rest as STT
def transcribe(audio_url):
if not os.path.isdir('./audio/retrieved_audio'):
os.makedirs('./audio/retrieved_audio')
reg_ex = '\w+.mp4'
file_name = re.search(reg_ex, audio_url).group(0)
urllib.urlretrieve(audio_url, './audio/retrieved_audio/{}'.format(file_name))
convert.convert('./audio/retrieved_audio/{}'.format(file_name))
# Converted in: ./converted/{name}.wav
return STT('./audio/converted/{}'.format(file_name[:-4]+".wav"))
|
# Retrieve file from Facebook
import urllib, convert, re, os
# from speech_py import speech_to_text_google as STT
from speech_py import speech_to_text_ibm_rest as STT
def transcribe(audio_url):
if not os.path.isdir('./audio/retrieved_audio'):
os.makedirs('./audio/retrieved_audio')
reg_ex = '\w+.mp4'
file_name = re.search(reg_ex, audio_url).group(0)
urllib.urlretrieve(audio_url, './audio/retrieved_audio/{}'.format(file_name))
convert.convert('./audio/retrieved_audio/{}'.format(file_name))
# Converted in: ./converted/{name}.wav
return STT('./audio/converted/{}'.format(file_name[:-4]+".wav"))
|
Modify ffmpeg path heroku 3
|
Modify ffmpeg path heroku 3
|
Python
|
mit
|
hungtraan/FacebookBot,hungtraan/FacebookBot,hungtraan/FacebookBot
|
14e000acafe7c374294a7de6ffe295c9d56df68f
|
tests/test_postgresql_specific.py
|
tests/test_postgresql_specific.py
|
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': 'dummy_data'})
assert tm.get_dirty_fields() == {}
tm.json_field = {'data': 'foo'}
assert tm.get_dirty_fields() == {'json_field': {'data': 'dummy_data'}}
|
import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': [1, 2, 3]})
data = tm.json_field['data']
data.append(4)
assert tm.get_dirty_fields(verbose=True) == {
'json_field': {
'current': {'data': [1, 2, 3, 4]},
'saved': {'data': [1, 2, 3]}
}
}
|
Update postgresql json_field to reflect deepcopy fix
|
Update postgresql json_field to reflect deepcopy fix
|
Python
|
bsd-3-clause
|
jdotjdot/django-dirtyfields,romgar/django-dirtyfields,smn/django-dirtyfields
|
db8e02661df65e1a50c5810968afef7ecd44db42
|
braid/bazaar.py
|
braid/bazaar.py
|
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
# FIXME (https://github.com/twisted-infra/braid/issues/5)
# We currently don't check that this the correct branch
run('bzr update {}'.format(location))
|
import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
run('bzr pull --overwrite -d {} {}'.format(location, branch))
|
Make bzr always pull from the specified remote.
|
Make bzr always pull from the specified remote.
Refs: #5.
|
Python
|
mit
|
alex/braid,alex/braid
|
3d5d6d093420294ed7b5fa834285d1d55da82d5d
|
pyroSAR/tests/test_snap_exe.py
|
pyroSAR/tests/test_snap_exe.py
|
import pytest
from contextlib import contextmanager
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_warn_snap(self):
with pytest.warns(UserWarning):
ExamineExe.examine('snap')
# def test_not_exception(self):
# SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
# with not_raises(ValueError):
# ExamineExe.examine(SNAP_EXECUTABLE)
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
# def test_not_exception(self):
# with not_raises(AssertionError):
# test_snap_exe = ExamineSnap()
|
from contextlib import contextmanager
import pytest
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_not_exception(self):
SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
with pytest.warns(None) as record:
ExamineExe.examine(SNAP_EXECUTABLE)
assert len(record) == 0
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineSnap(snap_executable='some_exe_file.exe')
def test_not_exception(self):
with pytest.warns(None) as record:
ExamineSnap()
assert len(record) == 0
|
Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly.
|
Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly.
Fixed a bug `assert len(record) == 1` in 'test_not_exception' method in class `TestExamineExe`.
|
Python
|
mit
|
johntruckenbrodt/pyroSAR,johntruckenbrodt/pyroSAR
|
b55c4c0536ca23484375d93f2ef011de0d5ce417
|
app/app.py
|
app/app.py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello Docker + Nginx + Gunicorn + Flask!'
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello Docker + Nginx + Gunicorn + Flask!'
|
Remove __name__ == __main__ becuase it'll never be used
|
Remove __name__ == __main__ becuase it'll never be used
|
Python
|
mit
|
everett-toews/guestbook,rackerlabs/guestbook,everett-toews/guestbook,rackerlabs/guestbook
|
ed326fba4f44552eeb206f3c5af9ad6f5e89ca44
|
localeurl/models.py
|
localeurl/models.py
|
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
|
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
if reverse_kwargs!=None:
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
else:
locale = translation.get_language()
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
|
Handle situation when kwargs is None
|
Handle situation when kwargs is None
|
Python
|
mit
|
eugena/django-localeurl
|
a247cd42a79fa96d6b61fcd131a6e0c8d8cf57fe
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
import os.path
import urllib.parse
def readme():
with open('README.md') as f:
return f.read()
setup(name='herc',
version='0.1',
description='Herc is a webservice that dispatches jobs to Apache Aurora.',
long_description=readme(),
url='http://github.com/broadinstitute/herc',
author='The Broad Institute',
packages=find_packages(exclude='tests'),
package_data={
# Include everything in data/, both schemas and examples.
'': ['data/*']
},
install_requires=[
'tornado>=4.0',
'jsonschema',
'futures',
'Jinja2>=2.2',
'jsonref',
'pyhocon',
'mock',
'arrow',
'nose',
'requests',
'thrift==1.0.0-py3',
'munch'
],
entry_points={'console_scripts': ['herc = herc.webservice:main']},
dependency_links = [urllib.parse.urljoin('file:', os.path.join(os.getcwd(), 'thrift-1.0.0-py3.tar.gz'))],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose']
)
|
from setuptools import setup, find_packages
import os
import os.path
import urllib.parse
def readme():
with open('README.md') as f:
return f.read()
setup(name='herc',
version='0.1',
description='Herc is a webservice that dispatches jobs to Apache Aurora.',
long_description=readme(),
url='http://github.com/broadinstitute/herc',
author='The Broad Institute',
packages=find_packages(exclude='tests'),
data_files=[('data/aurora', ['data/aurora/api.thrift']),
('data/schemas', ['data/schemas/jobsubmit.json'])],
install_requires=[
'tornado>=4.0',
'jsonschema',
'futures',
'Jinja2>=2.2',
'jsonref',
'pyhocon',
'mock',
'arrow',
'nose',
'requests',
'thrift==1.0.0-py3',
'munch'
],
entry_points={'console_scripts': ['herc = herc.webservice:main']},
dependency_links = [urllib.parse.urljoin('file:', os.path.join(os.getcwd(), 'thrift-1.0.0-py3.tar.gz'))],
zip_safe=False,
test_suite='nose.collector',
tests_require=['nose']
)
|
Fix copying of files in herc/data during installation.
|
Fix copying of files in herc/data during installation.
|
Python
|
bsd-3-clause
|
broadinstitute/herc,broadinstitute/herc,broadinstitute/herc
|
4b35247fe384d4b2b206fa7650398511a493253c
|
setup.py
|
setup.py
|
from distutils.core import setup
import sys
import os
import re
PACKAGENAME = 'OpSimSummary'
packageDir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'opsimsummary')
versionFile = os.path.join(packageDir, 'version.py')
# Obtain the package version
with open(versionFile, 'r') as f:
s = f.read()
# Look up the string value assigned to __version__ in version.py using regexp
versionRegExp = re.compile("__VERSION__ = \"(.*?)\"")
# Assign to __version__
__version__ = versionRegExp.findall(s)[0]
print(__version__)
setup(# package information
name=PACKAGENAME,
version=__version__,
description='simple repo to study OpSim output summaries',
long_description=''' ''',
# What code to include as packages
packages=[PACKAGENAME],
packagedir={PACKAGENAME: 'opsimsummary'},
# What data to include as packages
include_package_data=True,
package_data={PACKAGENAME:['example_data/*.dat', 'example_data/*.simlib']}
)
|
from distutils.core import setup
import sys
import os
import re
PACKAGENAME = 'OpSimSummary'
packageDir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
PACKAGENAME)
versionFile = os.path.join(packageDir, 'version.py')
# Obtain the package version
with open(versionFile, 'r') as f:
s = f.read()
# Look up the string value assigned to __version__ in version.py using regexp
versionRegExp = re.compile("__VERSION__ = \"(.*?)\"")
# Assign to __version__
__version__ = versionRegExp.findall(s)[0]
print(__version__)
setup(# package information
name=PACKAGENAME,
version=__version__,
description='simple repo to study OpSim output summaries',
long_description=''' ''',
# What code to include as packages
packages=[PACKAGENAME],
packagedir={PACKAGENAME: 'opsimsummary'},
# What data to include as packages
include_package_data=True,
package_data={PACKAGENAME:['example_data/*.dat', 'example_data/*.simlib']}
)
|
Revert "Revert "Changed back due to problems, will fix later""
|
Revert "Revert "Changed back due to problems, will fix later""
This reverts commit 5e92c0ef714dea823e1deeef21b5141d9e0111a0.
modified: setup.py
|
Python
|
mit
|
rbiswas4/simlib
|
83c0cb83a5eeaff693765c7d297b470adfdcec9e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
version = __import__('stale').__version__
setup(
name="stale",
version=version,
description="Identifies (and optionally removes) stale Delicious links",
author="Jon Parise",
author_email="[email protected]",
url="http://bitbucket.org/jparise/stale/",
scripts = ['stale.py'],
license = "MIT License",
classifiers = ['License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
)
|
#!/usr/bin/env python
from distutils.core import setup
version = __import__('stale').__version__
setup(
name="stale",
version=version,
description="Identifies (and optionally removes) stale Delicious links",
author="Jon Parise",
author_email="[email protected]",
url="https://github.com/jparise/stale",
scripts = ['stale.py'],
license = "MIT License",
classifiers = ['License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
)
|
Use the GitHub URL instead of the BitBucket URL
|
Use the GitHub URL instead of the BitBucket URL
|
Python
|
mit
|
jparise/stale
|
2aeeb23e9771b67234dd6fef338e57000412b784
|
setup.py
|
setup.py
|
import os
import sys
from setuptools import setup, Extension
with open("README.rst") as fp:
long_description = fp.read()
extensions = []
if os.name == 'nt':
ext = Extension(
'trollius._overlapped', ['overlapped.c'], libraries=['ws2_32'],
)
extensions.append(ext)
requirements = ['six']
if sys.version_info < (3,):
requirements.append('futures')
setup(
name="trollius",
version="2.2.post2.dev0",
license="Apache License 2.0",
author='Victor Stinner',
author_email='[email protected]',
description="Deprecated, unmaintained port of the asyncio module (PEP 3156) on Python 2",
long_description=long_description,
url="https://github.com/jamadden/trollius",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
],
packages=[
"trollius",
],
zip_safe=False,
keywords="Deprecated Unmaintained asyncio backport",
ext_modules=extensions,
install_requires=requirements,
python_requires=">=2.7, < 3",
)
|
import os
import sys
from setuptools import setup, Extension
with open("README.rst") as fp:
long_description = fp.read()
extensions = []
if os.name == 'nt':
ext = Extension(
'trollius._overlapped', ['overlapped.c'], libraries=['ws2_32'],
)
extensions.append(ext)
requirements = ['six']
if sys.version_info < (3,):
requirements.append('futures')
setup(
name="trollius",
version="2.2.post2.dev0",
license="Apache License 2.0",
author='Victor Stinner',
author_email='[email protected]',
description="Deprecated, unmaintained port of the asyncio module (PEP 3156) on Python 2",
long_description=long_description,
url="https://github.com/jamadden/trollius",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 7 - Inactive",
],
packages=[
"trollius",
],
zip_safe=False,
keywords="Deprecated Unmaintained asyncio backport",
ext_modules=extensions,
install_requires=requirements,
python_requires=">=2.7, < 3",
)
|
Add Development Status :: 7 - Inactive classifier.
|
Add Development Status :: 7 - Inactive classifier.
|
Python
|
apache-2.0
|
haypo/trollius,haypo/trollius,haypo/trollius
|
2266ca63ec23fd768c659ee4b3988fce7cd523c6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# Create wheel with: python setup.py bdist_wheel
# Install with: pip install dist/loadconfig-*-none-any.whl
from os import environ
from os.path import dirname, abspath
from setuptools import setup
from six.moves.configparser import ConfigParser
c = ConfigParser()
c.read('{}/setup.cfg'.format(dirname(abspath(__file__))))
environ["PBR_VERSION"] = c.get('metadata', 'version')
setup(setup_requires=['pbr'], pbr=True)
|
#!/usr/bin/env python
# Create wheel with: python setup.py bdist_wheel
# Install with: pip install dist/loadconfig-*-none-any.whl
from os import environ
from os.path import dirname, abspath
from setuptools import setup
import sys
if sys.version_info[0] == 3:
from configparser import ConfigParser
else:
from ConfigParser import ConfigParser
c = ConfigParser()
c.read('{}/setup.cfg'.format(dirname(abspath(__file__))))
environ["PBR_VERSION"] = c.get('metadata', 'version')
setup(setup_requires=['pbr'], pbr=True)
|
Remove six dependency when pip installing from sources
|
Remove six dependency when pip installing from sources
Signed-off-by: Daniel Mizyrycki <[email protected]>
|
Python
|
mit
|
mzdaniel/loadconfig,mzdaniel/loadconfig
|
11f6fd6e2401af03730afccb14f843928c27c37a
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='savu',
version='0.1',
description='Savu Python Tomography Pipeline',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'
],
author='Mark Basham',
author_email='[email protected]',
license='Apache License, Version 2.0',
packages=['savu',
'savu.core',
'savu.data',
'savu.mpi_test',
'savu.mpi_test.dls',
'savu.plugins',
'savu.test'],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='savu',
version='0.1.1',
description='Savu Python Tomography Pipeline',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering'
],
author='Mark Basham',
author_email='[email protected]',
license='Apache License, Version 2.0',
packages=['savu',
'savu.core',
'savu.data',
'savu.mpi_test',
'savu.mpi_test.dls',
'savu.plugins',
'savu.test'],
include_package_data=True,
zip_safe=False)
|
Update to version 0.1.1 for the next push
|
Update to version 0.1.1 for the next push
|
Python
|
apache-2.0
|
mjn19172/Savu,swtp1v07/Savu,mjn19172/Savu,swtp1v07/Savu,swtp1v07/Savu,swtp1v07/Savu,mjn19172/Savu,mjn19172/Savu,mjn19172/Savu
|
32240a553a456e03b18c23fc6c32dd65865aa372
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='[email protected]',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='[email protected]',
install_requires=[
'Django>=1.7',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Fix django version in requirements
|
Fix django version in requirements
|
Python
|
bsd-3-clause
|
diadzine/django-simple-multilingual-survey,diadzine/django-simple-multilingual-survey
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.