commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
52e8734ef54772b4c67e40b887ca7d0ad925c6e8
|
use project-version format for tag.
|
quantopian/ta-lib,quantopian/ta-lib
|
setup.py
|
setup.py
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
import os
import sys
if sys.platform == "darwin":
if os.path.exists("/opt/local/include/ta-lib"):
include_talib_dir = "/opt/local/include"
lib_talib_dir = "/opt/local/lib"
else:
include_talib_dir = "/usr/local/include/"
lib_talib_dir = "/usr/local/lib/"
elif sys.platform == "linux2" or "freebsd" in sys.platform:
include_talib_dir = "/usr/local/include/"
lib_talib_dir = "/usr/local/lib/"
elif sys.platform == "win32":
include_talib_dir = r"c:\msys\1.0\local\include"
lib_talib_dir = r"c:\msys\1.0\local\lib"
else:
raise NotImplementedError(sys.platform)
ext = Extension("talib", ["talib.pyx"],
include_dirs=[numpy.get_include(), include_talib_dir],
library_dirs=[lib_talib_dir],
libraries=["ta_lib"]
)
setup(
name = 'TA-Lib',
version = '0.4.1',
description = 'Python wrapper for TA-Lib',
author = 'John Benediktsson',
author_email = '[email protected]',
url = 'http://github.com/mrjbq7/ta-lib',
download_url = 'https://github.com/mrjbq7/ta-lib/archive/TA_Lib-0.4.1.zip',
classifiers = [
"Development Status :: 4 - Beta",
"Topic :: Scientific/Engineering :: Mathematics",
"License :: OSI Approved :: BSD License",
],
ext_modules=[ext],
cmdclass = {'build_ext': build_ext}
)
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
import os
import sys
if sys.platform == "darwin":
if os.path.exists("/opt/local/include/ta-lib"):
include_talib_dir = "/opt/local/include"
lib_talib_dir = "/opt/local/lib"
else:
include_talib_dir = "/usr/local/include/"
lib_talib_dir = "/usr/local/lib/"
elif sys.platform == "linux2" or "freebsd" in sys.platform:
include_talib_dir = "/usr/local/include/"
lib_talib_dir = "/usr/local/lib/"
elif sys.platform == "win32":
include_talib_dir = r"c:\msys\1.0\local\include"
lib_talib_dir = r"c:\msys\1.0\local\lib"
else:
raise NotImplementedError(sys.platform)
ext = Extension("talib", ["talib.pyx"],
include_dirs=[numpy.get_include(), include_talib_dir],
library_dirs=[lib_talib_dir],
libraries=["ta_lib"]
)
setup(
name = 'TA-Lib',
version = '0.4.1',
description = 'Python wrapper for TA-Lib',
author = 'John Benediktsson',
author_email = '[email protected]',
url = 'http://github.com/mrjbq7/ta-lib',
download_url = 'https://github.com/mrjbq7/ta-lib/archive/0.4.1.zip',
classifiers = [
"Development Status :: 4 - Beta",
"Topic :: Scientific/Engineering :: Mathematics",
"License :: OSI Approved :: BSD License",
],
ext_modules=[ext],
cmdclass = {'build_ext': build_ext}
)
|
bsd-2-clause
|
Python
|
4437ab3021d5d81c1e4f8682529456e17fe32e74
|
Fix package_data installation. Code remains to be written to discover the path for namebench.cfg and alexa data
|
catap/namebench,jimmsta/namebench-1
|
setup.py
|
setup.py
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""distutils configuration: python setup.py install"""
__author__ = '[email protected] (Thomas Stromberg)'
from namebench import VERSION
from distutils.core import setup
setup(name='namebench',
version=VERSION,
py_modules=['namebench'],
description='DNS service benchmarking tool',
author='Thomas Stromberg',
author_email='[email protected]',
url='http://namebench.googlecode.com/',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache 2.0',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Networking',
],
packages=['libnamebench'],
platforms=['Any'],
requires=['graphy', 'dnspython', 'jinja2'],
license='Apache 2.0',
scripts=['namebench.py'],
data_files=[
('namebench', ['namebench.cfg']),
('namebench/data', ['data/alexa-top-10000-global.txt'])
]
)
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""distutils configuration."""
__author__ = '[email protected] (Thomas Stromberg)'
from namebench import VERSION
from distutils.core import setup
setup(name='namebench',
version=VERSION,
py_modules=['namebench'],
description='DNS service benchmarking tool',
author='Thomas Stromberg',
author_email='[email protected]',
url='http://namebench.googlecode.com/',
packages=('libnamebench',),
platforms=('Any',),
requires=['graphy', 'dnspython', 'jinja2'],
license='Apache 2.0',
scripts=['namebench.py'],
package_data = {'libnamebench': ['data/alexa-top-10000-global.txt',
'templates/ascii.tmpl',
'templates/html.tmpl',
'namebench.cfg']},
# package_data=[('data', ['data/alexa-top-10000-global.txt']),
# ('templates', ['templates/ascii.tmpl',
# 'templates/html.tmpl']),
# ('config', ['namebench.cfg'])]
)
|
apache-2.0
|
Python
|
59cca112feb323630d4749c2a4fab9a2d59553b7
|
add psycopg2 dependency
|
habalux/pg_testenv,habalux/pg_testenv
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='pg_testenv',
version="0.1",
author="Teemu Haapoja",
author_email="[email protected]",
description="PostgreSQL test instance creator",
license="BSD",
install_requires=['psycopg2'],
scripts = [
'pg_testenv'
],
)
|
from setuptools import setup
setup(
name='pg_testenv',
version="0.1",
author="Teemu Haapoja",
author_email="[email protected]",
description="PostgreSQL test instance creator",
license="BSD",
scripts = [
'pg_testenv'
],
)
|
bsd-2-clause
|
Python
|
ec5cf5b306ea20051cf5b02983fdd356f04a9a5e
|
switch to pyannote.core 0.8
|
pyannote/pyannote-audio,pyannote/pyannote-audio,pyannote/pyannote-audio
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2016 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
import versioneer
from setuptools import setup, find_packages
setup(
# package
namespace_packages=['pyannote'],
packages=find_packages(),
install_requires=[
'pyannote.core >= 0.8',
'pyannote.metrics >= 0.10.2',
'pyannote.generators >= 0.1.1',
'pyannote.database >= 0.4',
'keras >= 1.1.0',
'theano >= 0.8.2',
'scikit-optimize >= 0.2',
],
# versioneer
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
# PyPI
name='pyannote.audio',
description=('Audio processing'),
author='Hervé Bredin',
author_email='[email protected]',
url='http://herve.niderb.fr/',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering"
],
)
|
#!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2016 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
import versioneer
from setuptools import setup, find_packages
setup(
# package
namespace_packages=['pyannote'],
packages=find_packages(),
install_requires=[
'pyannote.core >= 0.7.2',
'pyannote.metrics >= 0.10.2',
'pyannote.generators >= 0.1.1',
'pyannote.database >= 0.4',
'keras >= 1.1.0',
'theano >= 0.8.2',
'scikit-optimize >= 0.2',
],
# versioneer
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
# PyPI
name='pyannote.audio',
description=('Audio processing'),
author='Hervé Bredin',
author_email='[email protected]',
url='http://herve.niderb.fr/',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering"
],
)
|
mit
|
Python
|
c2e97163f1ccfb45b28239e151102ef0bf7070d3
|
Bump version, add classifiers.
|
jd-boyd/corker,vs-networks/corker
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='corker',
version='0.2',
description='Another WSGI Framework',
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
],
license='BSD',
author='Joshua D. Boyd',
author_email='[email protected]',
url='https://github.com/jd-boyd/corker',
packages=find_packages(),
package_data={'': ['README', 'LICENSE.txt']},
install_requires=['webob', 'routes'],
tests_require=['nose', 'webtest'],
)
|
from setuptools import setup, find_packages
setup(name='corker',
version='0.2-pre1',
description='Another WSGI Framework',
license='BSD',
author='Joshua D. Boyd',
author_email='[email protected]',
url='https://github.com/jd-boyd/corker',
packages=find_packages(),
package_data={'': ['README', 'LICENSE.txt']},
install_requires=['webob', 'routes'],
tests_require=['nose', 'webtest'],
)
|
bsd-2-clause
|
Python
|
c81fa8e41fb1411945ac0c045547c59a09a443c4
|
bump version
|
danvk/webdiff,danvk/webdiff,daytonb/webdiff,daytonb/webdiff,danvk/webdiff,daytonb/webdiff,danvk/webdiff,danvk/webdiff,daytonb/webdiff
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='webdiff',
version='0.4.3',
description='Two-column web-based git difftool',
author='Dan Vanderkam',
author_email='[email protected]',
url='https://github.com/danvk/webdiff/',
entry_points={
'console_scripts': [
'webdiff = webdiff.app:run',
'git-webdiff = webdiff.gitwebdiff:run'
],
},
packages=find_packages(exclude=['tests*']),
install_requires=['flask'],
include_package_data=True,
package_data = {
'static': 'webdiff/static/*',
'templates': 'webdiff/templates/*'
},
classifiers=[
'Environment :: Console',
'Environment :: Web Environment',
'Framework :: Flask',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Version Control'
],
)
|
from setuptools import setup, find_packages
setup(name='webdiff',
version='0.4.2',
description='Two-column web-based git difftool',
author='Dan Vanderkam',
author_email='[email protected]',
url='https://github.com/danvk/webdiff/',
entry_points={
'console_scripts': [
'webdiff = webdiff.app:run',
'git-webdiff = webdiff.gitwebdiff:run'
],
},
packages=find_packages(exclude=['tests*']),
install_requires=['flask'],
include_package_data=True,
package_data = {
'static': 'webdiff/static/*',
'templates': 'webdiff/templates/*'
},
classifiers=[
'Environment :: Console',
'Environment :: Web Environment',
'Framework :: Flask',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Version Control'
],
)
|
apache-2.0
|
Python
|
123d57e0ef22adb5ae5f09b435c69f0589f521d1
|
Correct setup.py description field error when it contains new lines
|
secnot/rectpack
|
setup.py
|
setup.py
|
from setuptools import setup
long_description = """A collection of heuristic algorithms for solving the 2D knapsack problem,
also known as the bin packing problem. In essence packing a set of rectangles into the
smallest number of bins."""
setup(
name="rectpack",
version="0.2.2",
description="2D Rectangle packing library",
long_description=long_description,
url="https://github.com/secnot/rectpack/",
author="SecNot",
keywords=["knapsack", "rectangle", "packing 2D", "bin", "binpacking"],
license="Apache-2.0",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
packages=["rectpack"],
zip_safe=False,
test_suite="nose.collector",
tests_require=["nose"],
)
|
from setuptools import setup
long_description = """A collection of heuristic algorithms for solving the 2D knapsack problem,
also known as the bin packing problem. In essence packing a set of rectangles into the
smallest number of bins."""
setup(
name="rectpack",
version="0.2.2",
description=long_description,
url="https://github.com/secnot/rectpack/",
author="SecNot",
keywords=["knapsack", "rectangle", "packing 2D", "bin", "binpacking"],
license="Apache-2.0",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
packages=["rectpack"],
zip_safe=False,
test_suite="nose.collector",
tests_require=["nose"],
)
|
apache-2.0
|
Python
|
72cb87ea53a294c2395818c25f42379293b37308
|
Bump version number
|
mwilliamson/mayo
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='blah',
version='0.1.5',
description='Thin wrapper around source control systems',
long_description=read("README"),
author='Michael Williamson',
url='http://github.com/mwilliamson/blah',
scripts=["scripts/blah"],
packages=['blah'],
install_requires=["argparse==1.2.1"],
)
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='blah',
version='0.1.4',
description='Thin wrapper around source control systems',
long_description=read("README"),
author='Michael Williamson',
url='http://github.com/mwilliamson/blah',
scripts=["scripts/blah"],
packages=['blah'],
install_requires=["argparse==1.2.1"],
)
|
bsd-2-clause
|
Python
|
e0993dcf8dc38462604e38d7a31b80d3880e696b
|
Kill get_version hack
|
DasIch/relief,DasIch/relief
|
setup.py
|
setup.py
|
# coding: utf-8
import os
import sys
from setuptools import setup
from relief import __version__
PACKAGE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "relief"
)
if sys.version_info[:2] < (2, 7):
install_requires = ['ordereddict>=1.1', 'Counter>=1.0.0']
else:
install_requires = []
setup(
name="Relief",
version=__version__,
author="Daniel Neuhäuser",
author_email="[email protected]",
license="BSD",
description="datastructure validation",
packages=['relief', 'relief.schema'],
install_requires=install_requires,
classifiers=[
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries"
]
)
|
# coding: utf-8
import os
import sys
from setuptools import setup
PACKAGE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "relief"
)
if sys.version_info[:2] < (2, 7):
install_requires = ['ordereddict>=1.1', 'Counter>=1.0.0']
else:
install_requires = []
def get_version():
path = os.path.join(PACKAGE_PATH, "__init__.py")
with open(path) as f:
for line in f:
if line.startswith("__version__"):
return line.split("=")[1].replace('"', '').strip()
else:
raise ValueError("__version__ not found in %s" % path)
setup(
name="Relief",
version=get_version(),
author="Daniel Neuhäuser",
author_email="[email protected]",
license="BSD",
description="datastructure validation",
packages=['relief', 'relief.schema'],
install_requires=install_requires,
classifiers=[
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries"
]
)
|
bsd-3-clause
|
Python
|
9232fa78e6a04f25f493fef91014384719c730ee
|
remove unused import from setup.py
|
twaldear/flask-secure-headers
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = 'flask-secure-headers',
packages = ['flask_secure_headers'],
include_package_data = True,
version = '0.2',
description = 'Secure Header Wrapper for Flask Applications',
long_description = """
Add security headers to a Flask application. This is intended to be a simplified version of the Twitter SecureHeaders Ruby Gem
""",
license='MIT',
author = 'Tristan Waldear',
author_email = '[email protected]',
url = 'https://github.com/twaldear/flask-secure-headers',
download_url = 'https://github.com/twaldear/flask-secure-headers/tarball/0.1',
keywords = ['flask', 'security', 'header'],
install_requires = ['flask'],
test_suite="nose.collector",
tests_require = ['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Flask',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup
import flask_secure_headers
setup(
name = 'flask-secure-headers',
packages = ['flask_secure_headers'],
include_package_data = True,
version = '0.2',
description = 'Secure Header Wrapper for Flask Applications',
long_description = """
Add security headers to a Flask application. This is intended to be a simplified version of the Twitter SecureHeaders Ruby Gem
""",
license='MIT',
author = 'Tristan Waldear',
author_email = '[email protected]',
url = 'https://github.com/twaldear/flask-secure-headers',
download_url = 'https://github.com/twaldear/flask-secure-headers/tarball/0.1',
keywords = ['flask', 'security', 'header'],
install_requires = ['flask'],
test_suite="nose.collector",
tests_require = ['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Flask',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
mit
|
Python
|
bd3942724c7a2f04d5eb6277a35e5ebf54ebf6e0
|
Add python 3.6 in the supported version
|
ningirsu/stepmania-server,ningirsu/stepmania-server
|
setup.py
|
setup.py
|
""" Setup script """
import shutil
import os
import sys
import glob
from setuptools import setup, find_packages
try:
import py2exe
except ImportError:
pass
import smserver
for filename in glob.glob("cfg/*.yml*"):
shutil.copy(filename, "smserver/_fallback_conf")
conf_dir = None
if os.path.splitdrive(sys.executable)[0] != "":
conf_dir = "conf"
if not conf_dir and os.path.isdir("/etc/smserver"):
conf_dir = "/etc/smserver"
if not conf_dir:
try:
os.mkdir("/etc/smserver")
conf_dir = "/etc/smserver"
except:
pass
if not conf_dir:
conf_dir = "conf"
setup(
name='smserver',
version=smserver.__version__,
packages=find_packages(),
author="Sélim Menouar",
author_email="[email protected]",
description="An implementation of a Stepmania server",
long_description=open('README.rst').read(),
include_package_data=True,
url='http://github.com/ningirsu/stepmania-server',
classifiers=[
'Programming Language :: Python',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Arcade',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
extras_require={
':python_version=="3.3"': ['asyncio', 'enum34'],
},
install_requires=[
'pyyaml',
'sqlalchemy',
'websockets',
],
scripts=['scripts/smserver'],
console=['scripts/smserver'],
options={
"py2exe": {
'packages': ['smserver'],
"bundle_files": 0,
"optimize": 2
}
},
zipfile=None,
license="MIT",
data_files=[(conf_dir, ['cfg/conf.yml.orig'])],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import os
import sys
import glob
from setuptools import setup, find_packages
try:
import py2exe
except ImportError:
pass
import smserver
for filename in glob.glob("cfg/*.yml*"):
shutil.copy(filename, "smserver/_fallback_conf")
conf_dir = None
if os.path.splitdrive(sys.executable)[0] != "":
conf_dir = "conf"
if not conf_dir and os.path.isdir("/etc/smserver"):
conf_dir = "/etc/smserver"
if not conf_dir:
try:
os.mkdir("/etc/smserver")
conf_dir = "/etc/smserver"
except:
pass
if not conf_dir:
conf_dir = "conf"
setup(
name='smserver',
version=smserver.__version__,
packages=find_packages(),
author="Sélim Menouar",
author_email="[email protected]",
description="An implementation of a Stepmania server",
long_description=open('README.rst').read(),
include_package_data=True,
url='http://github.com/ningirsu/stepmania-server',
classifiers=[
'Programming Language :: Python',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Arcade',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
extras_require={
':python_version=="3.3"': ['asyncio', 'enum34'],
},
install_requires=[
'pyyaml',
'sqlalchemy',
'websockets',
],
scripts=['scripts/smserver'],
console=['scripts/smserver'],
options={
"py2exe": {
'packages': ['smserver'],
"bundle_files": 0,
"optimize": 2
}
},
zipfile=None,
license="MIT",
data_files=[(conf_dir, ['cfg/conf.yml.orig'])],
)
|
mit
|
Python
|
235609b1b370fee8f03cdf0d9d6a70075bd6e50e
|
Update to next dev version
|
openfisca/openfisca-web-api,openfisca/openfisca-web-api,sgmap/openfisca-web-api,sgmap/openfisca-web-api
|
setup.py
|
setup.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
setup(
name = 'OpenFisca-Web-API',
version = '0.5.1.dev0',
author = 'OpenFisca Team',
author_email = '[email protected]',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
],
description = u'Web API for OpenFisca',
keywords = 'api benefit microsimulation server social tax web',
license = 'http://www.fsf.org/licensing/licenses/agpl-3.0.html',
url = 'https://github.com/openfisca/openfisca-web-api',
data_files = [
('share/locale/fr/LC_MESSAGES', ['openfisca_web_api/i18n/fr/LC_MESSAGES/openfisca-web-api.mo']),
],
entry_points = {
'paste.app_factory': 'main = openfisca_web_api.application:make_app',
},
include_package_data = True,
install_requires = [
'Babel >= 0.9.4',
'Biryani >= 0.10.4',
'OpenFisca-Core >= 0.5dev',
'OpenFisca-Parsers >= 0.5dev',
'PasteDeploy',
'PasteScript',
'WebError >= 0.10',
'WebOb >= 1.1',
],
message_extractors = {'openfisca_web_api': [
('**.py', 'python', None),
]},
packages = find_packages(),
test_suite = 'nose.collector',
)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
setup(
name = 'OpenFisca-Web-API',
version = '0.5.0',
author = 'OpenFisca Team',
author_email = '[email protected]',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
],
description = u'Web API for OpenFisca',
keywords = 'api benefit microsimulation server social tax web',
license = 'http://www.fsf.org/licensing/licenses/agpl-3.0.html',
url = 'https://github.com/openfisca/openfisca-web-api',
data_files = [
('share/locale/fr/LC_MESSAGES', ['openfisca_web_api/i18n/fr/LC_MESSAGES/openfisca-web-api.mo']),
],
entry_points = {
'paste.app_factory': 'main = openfisca_web_api.application:make_app',
},
include_package_data = True,
install_requires = [
'Babel >= 0.9.4',
'Biryani >= 0.10.4',
'OpenFisca-Core >= 0.5dev',
'OpenFisca-Parsers >= 0.5dev',
'PasteDeploy',
'PasteScript',
'WebError >= 0.10',
'WebOb >= 1.1',
],
message_extractors = {'openfisca_web_api': [
('**.py', 'python', None),
]},
packages = find_packages(),
test_suite = 'nose.collector',
)
|
agpl-3.0
|
Python
|
fdb461f000adefff0d1050464e5783c96222f364
|
Add minimum version for pycryptodome
|
SCUEvals/scuevals-api,SCUEvals/scuevals-api
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='scuevals-api',
packages=['scuevals_api'],
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'app=scuevals_api.cmd:cli'
]
},
install_requires=[
'alembic==0.9.7',
'beautifulsoup4==4.6.0',
'blinker==1.4',
'coveralls==1.2.0',
'Flask-Caching==1.3.3',
'Flask-Cors==3.0.3',
'Flask-JWT-Extended==3.6.0',
'Flask-Migrate==2.1.1',
'Flask-RESTful==0.3.6',
'Flask-Rollbar==1.0.1',
'Flask-SQLAlchemy==2.3.2',
'Flask==0.12.2',
'gunicorn==19.7.1',
'newrelic==2.100.0.84',
'psycopg2==2.7.3.2',
'pycryptodome>=3.4.7'
'python-jose==2.0.1',
'PyYAML==3.12',
'requests==2.18.4',
'rollbar==0.13.17',
'vcrpy==1.11.1',
'webargs==1.8.1',
],
)
|
from setuptools import setup
setup(
name='scuevals-api',
packages=['scuevals_api'],
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'app=scuevals_api.cmd:cli'
]
},
install_requires=[
'alembic==0.9.7',
'beautifulsoup4==4.6.0',
'blinker==1.4',
'coveralls==1.2.0',
'Flask-Caching==1.3.3',
'Flask-Cors==3.0.3',
'Flask-JWT-Extended==3.6.0',
'Flask-Migrate==2.1.1',
'Flask-RESTful==0.3.6',
'Flask-Rollbar==1.0.1',
'Flask-SQLAlchemy==2.3.2',
'Flask==0.12.2',
'gunicorn==19.7.1',
'newrelic==2.100.0.84',
'psycopg2==2.7.3.2',
'python-jose==2.0.1',
'PyYAML==3.12',
'requests==2.18.4',
'rollbar==0.13.17',
'vcrpy==1.11.1',
'webargs==1.8.1',
],
)
|
agpl-3.0
|
Python
|
3a279370ca4847abfe82f1be99111e04bf2de61b
|
Remove unnecessary import
|
DavidHowlett/pyserial-1,hoihu/pyserial
|
setup.py
|
setup.py
|
# setup.py for pySerial
#
# Windows installer:
# "python setup.py bdist_wininst"
#
# Direct install (all systems):
# "python setup.py install"
#
# For Python 3.x use the corresponding Python executable,
# e.g. "python3 setup.py ..."
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# importing version does not work with Python 3 as files have not yet been
# converted.
import serial
version = serial.VERSION
setup(
name = "pyserial",
description = "Python Serial Port Extension",
version = version,
author = "Chris Liechti",
author_email = "[email protected]",
url = "https://github.com/pyserial/pyserial",
packages = ['serial', 'serial.tools', 'serial.urlhandler'],
license = "Python",
long_description = "Python Serial Port Extension for Win32, Linux, BSD, Jython, IronPython",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Communications',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Terminals :: Serial',
],
platforms = 'any',
scripts = ['serial/tools/miniterm.py'],
)
|
# setup.py for pySerial
#
# Windows installer:
# "python setup.py bdist_wininst"
#
# Direct install (all systems):
# "python setup.py install"
#
# For Python 3.x use the corresponding Python executable,
# e.g. "python3 setup.py ..."
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# importing version does not work with Python 3 as files have not yet been
# converted.
import serial
version = serial.VERSION
setup(
name = "pyserial",
description = "Python Serial Port Extension",
version = version,
author = "Chris Liechti",
author_email = "[email protected]",
url = "https://github.com/pyserial/pyserial",
packages = ['serial', 'serial.tools', 'serial.urlhandler'],
license = "Python",
long_description = "Python Serial Port Extension for Win32, Linux, BSD, Jython, IronPython",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Communications',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Terminals :: Serial',
],
platforms = 'any',
scripts = ['serial/tools/miniterm.py'],
)
|
bsd-3-clause
|
Python
|
3121572d452a58161f9bb9e7f813254b592b680b
|
bump version
|
hopshadoop/hops-util-py,hopshadoop/hops-util-py
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='hops',
version='1.1.6',
author='Robin Andersson',
author_email='[email protected]',
description='A helper library for Hops that facilitates development by hiding the complexity of discovering services and setting up security.',
license='Apache License 2.0',
keywords='HOPS, Hadoop, TensorFlow, Spark',
url='https://github.com/hopshadoop/hops-util-py',
download_url = 'https://github.com/hopshadoop/hops-util-py/archive/1.1.6.tar.gz',
packages=['hops'],
long_description=read('README.rst'),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
install_requires=[]
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='hops',
version='1.1.5',
author='Robin Andersson',
author_email='[email protected]',
description='A helper library for Hops that facilitates development by hiding the complexity of discovering services and setting up security.',
license='Apache License 2.0',
keywords='HOPS, Hadoop, TensorFlow, Spark',
url='https://github.com/hopshadoop/hops-util-py',
download_url = 'https://github.com/hopshadoop/hops-util-py/archive/1.1.5.tar.gz',
packages=['hops'],
long_description=read('README.rst'),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
install_requires=[]
)
|
apache-2.0
|
Python
|
426247045dacf56fc5c7324fffbc140e9ea1b2e2
|
Bump tqdm from 4.37.0 to 4.38.0
|
glidernet/ogn-python,glidernet/ogn-python,Meisterschueler/ogn-python,glidernet/ogn-python,Meisterschueler/ogn-python,glidernet/ogn-python,Meisterschueler/ogn-python,Meisterschueler/ogn-python
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ogn-python',
version='0.5.0',
description='A database backend for the Open Glider Network',
long_description=long_description,
url='https://github.com/glidernet/ogn-python',
author='Konstantin Gründger aka Meisterschueler, Fabian P. Schmidt aka kerel, Dominic Spreitz',
author_email='[email protected]',
license='AGPLv3',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='gliding ogn',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'Flask==1.1.1',
'Flask-SQLAlchemy==2.4.1',
'Flask-Migrate==2.5.2',
'Flask-Bootstrap==3.3.7.1',
'Flask-WTF==0.14.2',
'Flask-Caching==1.7.2',
'geopy==1.20.0',
'celery==4.3.0',
'redis==3.3.11',
'aerofiles==1.0.0',
'geoalchemy2==0.6.3',
'shapely==1.6.4.post2',
'ogn-client==0.9.5',
'psycopg2-binary==2.8.4',
'mgrs==1.3.5',
'xmlunittest==0.5.0',
'tqdm==4.38.0',
'requests==2.22.0',
],
test_require=[
'pytest==5.0.1',
'flake8==1.1.1',
'xmlunittest==0.4.0',
],
zip_safe=False
)
|
#!/usr/bin/env python3
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ogn-python',
version='0.5.0',
description='A database backend for the Open Glider Network',
long_description=long_description,
url='https://github.com/glidernet/ogn-python',
author='Konstantin Gründger aka Meisterschueler, Fabian P. Schmidt aka kerel, Dominic Spreitz',
author_email='[email protected]',
license='AGPLv3',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='gliding ogn',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'Flask==1.1.1',
'Flask-SQLAlchemy==2.4.1',
'Flask-Migrate==2.5.2',
'Flask-Bootstrap==3.3.7.1',
'Flask-WTF==0.14.2',
'Flask-Caching==1.7.2',
'geopy==1.20.0',
'celery==4.3.0',
'redis==3.3.11',
'aerofiles==1.0.0',
'geoalchemy2==0.6.3',
'shapely==1.6.4.post2',
'ogn-client==0.9.5',
'psycopg2-binary==2.8.4',
'mgrs==1.3.5',
'xmlunittest==0.5.0',
'tqdm==4.37.0',
'requests==2.22.0',
],
test_require=[
'pytest==5.0.1',
'flake8==1.1.1',
'xmlunittest==0.4.0',
],
zip_safe=False
)
|
agpl-3.0
|
Python
|
2029b86059f538f6ab5ab3e472708a9632141010
|
add holoviews to dependencies
|
michaelaye/pyciss
|
setup.py
|
setup.py
|
from os import path
from setuptools import find_packages, setup
DISTNAME = 'pyciss'
DESCRIPTION = "Software for handling Cassini ISS data"
AUTHOR = "K.-Michael Aye"
AUTHOR_EMAIL = "[email protected]"
MAINTAINER_EMAIL = AUTHOR_EMAIL
URL = "https://github.com/michaelaye/pyciss"
LICENSE = "ISC"
KEYWORDS = ['CASSINI', 'science', 'saturn', 'imaging']
DOWNLOAD_URL = "https://github.com/michaelaye/pyciss"
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name=DISTNAME,
version="0.12.0",
packages=find_packages(),
install_requires=['pandas', 'numpy', 'matplotlib', 'pysis', 'astropy', 'xarray', 'holoviews'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
package_data={
'pyciss': ['data/*']
},
# metadata
author=AUTHOR,
maintainer=AUTHOR,
author_email=AUTHOR_EMAIL,
maintainer_email=AUTHOR_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license=LICENSE,
keywords=KEYWORDS,
url=URL,
download_url=DOWNLOAD_URL,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from os import path
from setuptools import find_packages, setup
DISTNAME = 'pyciss'
DESCRIPTION = "Software for handling Cassini ISS data"
AUTHOR = "K.-Michael Aye"
AUTHOR_EMAIL = "[email protected]"
MAINTAINER_EMAIL = AUTHOR_EMAIL
URL = "https://github.com/michaelaye/pyciss"
LICENSE = "ISC"
KEYWORDS = ['CASSINI', 'science', 'saturn', 'imaging']
DOWNLOAD_URL = "https://github.com/michaelaye/pyciss"
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name=DISTNAME,
version="0.12.0",
packages=find_packages(),
install_requires=['pandas', 'numpy', 'matplotlib', 'pysis', 'astropy', 'xarray'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
package_data={
'pyciss': ['data/*']
},
# metadata
author=AUTHOR,
maintainer=AUTHOR,
author_email=AUTHOR_EMAIL,
maintainer_email=AUTHOR_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license=LICENSE,
keywords=KEYWORDS,
url=URL,
download_url=DOWNLOAD_URL,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
isc
|
Python
|
0cae8f8e892bf40e30598cee3fe32dc496a8ec7b
|
Fix setup.py setuptools reference
|
tnewman/PyKazoo
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# noinspection PyUnresolvedReferences
import setuptools
from distutils.core import setup
setup(
name='PyKazoo',
version='0.0a1',
packages=['pykazoo'],
install_requires=['requests==2.7.0'],
url='https://github.com/tnewman/PyKazoo',
license='MIT',
author='Thomas Newman',
author_email='[email protected]',
description='PyKazoo is a Python API client for 2600hz Kazoo',
)
|
#!/usr/bin/env python3
from distutils.core import setup
setup(
name='PyKazoo',
version='0.0a1',
packages=['pykazoo'],
install_requires=['requests==2.7.0'],
url='https://github.com/tnewman/PyKazoo',
license='MIT',
author='Thomas Newman',
author_email='[email protected]',
description='PyKazoo is a Python API client for 2600hz Kazoo',
)
|
mit
|
Python
|
ea6a22678e8169ced9465269b68fbe9394aa2efb
|
Bump version to 0.2
|
jamescooke/factory_djoy
|
setup.py
|
setup.py
|
import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.2',
description="Wrappers over Factory Boy's Django Factories",
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='[email protected]',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.6',
'factory_boy>=2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import os
import setuptools
setuptools.setup(
name='factory_djoy',
version='0.1',
description="Wrappers over Factory Boy's Django Factories",
url='http://github.com/jamescooke/factory_djoy',
author='James Cooke',
author_email='[email protected]',
license='MIT',
packages=setuptools.find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'Django>=1.6',
'factory_boy>=2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
mit
|
Python
|
226654bfc455991f9f3134c18ac91fced704f09e
|
Bump version to 0.5.16.
|
wlanslovenija/datastream
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
VERSION = '0.5.16'
if __name__ == '__main__':
setup(
name='datastream',
version=VERSION,
description="Datastream API time-series library.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='wlan slovenija',
author_email='[email protected]',
url='https://github.com/wlanslovenija/datastream',
license='AGPLv3',
packages=find_packages(exclude=('*.tests', '*.tests.*', 'tests.*', 'tests')),
package_data={},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'pytz>=2012h',
],
extras_require={
'mongodb': [
'mongoengine>=0.8.1',
'pymongo>=2.7.1,<3.0.0',
],
'influxdb': [
'python-dateutil>=2.4.2',
'psycopg2>=2.6.1',
'influxdb>=2.10.0',
'backports.lzma>=0.0.6',
'cachetools>=1.1.6',
],
},
tests_require=[
'mongoengine>=0.8.1',
'pymongo>=2.7.1,<3.0.0',
'python-dateutil>=2.4.2',
'psycopg2>=2.6.1',
'influxdb>=2.10.0',
'backports.lzma>=0.0.6',
'cachetools>=1.1.6',
],
test_suite='tests',
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
VERSION = '0.5.15'
if __name__ == '__main__':
setup(
name='datastream',
version=VERSION,
description="Datastream API time-series library.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='wlan slovenija',
author_email='[email protected]',
url='https://github.com/wlanslovenija/datastream',
license='AGPLv3',
packages=find_packages(exclude=('*.tests', '*.tests.*', 'tests.*', 'tests')),
package_data={},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'pytz>=2012h',
],
extras_require={
'mongodb': [
'mongoengine>=0.8.1',
'pymongo>=2.7.1,<3.0.0',
],
'influxdb': [
'python-dateutil>=2.4.2',
'psycopg2>=2.6.1',
'influxdb>=2.10.0',
'backports.lzma>=0.0.6',
'cachetools>=1.1.6',
],
},
tests_require=[
'mongoengine>=0.8.1',
'pymongo>=2.7.1,<3.0.0',
'python-dateutil>=2.4.2',
'psycopg2>=2.6.1',
'influxdb>=2.10.0',
'backports.lzma>=0.0.6',
'cachetools>=1.1.6',
],
test_suite='tests',
)
|
agpl-3.0
|
Python
|
a3f150be3ea89a87eca9902da58aa93d6623da42
|
bump version to 0.2.6
|
CivicTechTO/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic,CivicTechTO/django-councilmatic,CivicTechTO/django-councilmatic,datamade/django-councilmatic,CivicTechTO/django-councilmatic
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-councilmatic',
version='0.2.6',
packages=['councilmatic_core'],
include_package_data=True,
license='MIT License', # example license
description='Core functions for councilmatic.org family',
long_description=README,
url='http://councilmatic.org/',
author='DataMade, LLC',
author_email='[email protected]',
install_requires=['requests==2.7.0',
'django-haystack==2.4.0',
'pysolr==3.3.2',
'python-dateutil==2.4.2'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-councilmatic',
version='0.2.5',
packages=['councilmatic_core'],
include_package_data=True,
license='MIT License', # example license
description='Core functions for councilmatic.org family',
long_description=README,
url='http://councilmatic.org/',
author='DataMade, LLC',
author_email='[email protected]',
install_requires=['requests==2.7.0',
'django-haystack==2.4.0',
'pysolr==3.3.2',
'python-dateutil==2.4.2'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
mit
|
Python
|
d9544b310f791493633b9d78cc5b257d041366c5
|
Bump up version
|
Pro-bit/Probit-RedisCeleryScheduler
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
REQUIREMENTS = ["redis", "redlock-py"]
README = """
probit-scheduler - redis backed scheduler for celery beat. This scheduler was made from https://github.com/SPSCommerce/swiss-chard.git with some modifications.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
setup(
name='probit-scheduler',
version='0.1.4',
license='MIT',
packages=find_packages(),
include_package_data=True,
description='probit scheduler - JSON redis backed scheduler for celery beat.',
long_description=README,
url='https://github.com/Pro-bit/Probit-RedisCeleryScheduler',
author='ProBitDeveloper',
author_email='[email protected]',
install_requires=REQUIREMENTS
)
|
import os
from setuptools import setup, find_packages
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
REQUIREMENTS = ["redis", "redlock-py"]
README = """
probit-scheduler - redis backed scheduler for celery beat. This scheduler was made from https://github.com/SPSCommerce/swiss-chard.git with some modifications.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
setup(
name='probit-scheduler',
version='0.1.3',
license='MIT',
packages=find_packages(),
include_package_data=True,
description='probit scheduler - JSON redis backed scheduler for celery beat.',
long_description=README,
url='https://github.com/Pro-bit/Probit-RedisCeleryScheduler',
author='ProBitDeveloper',
author_email='[email protected]',
install_requires=REQUIREMENTS
)
|
mit
|
Python
|
e19826d9da69dbdb704d3d366f864b9317ac2861
|
Disable unused django-setuptest.
|
bitsoffreedom/newspeak
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
README = open('README.rst').read()
except:
README = None
try:
REQUIREMENTS = open('requirements.txt').read()
except:
REQUIREMENTS = None
setup(
name='newspeak',
version="0.1",
description='Standalone Django based feed aggregator.',
long_description=README,
install_requires=REQUIREMENTS,
author='Mathijs de Bruin',
author_email='[email protected]',
url='http://github.com/bitsoffreedom/newspeak/',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
entry_points={
'console_scripts': [
'newspeak = newspeak.runner:main',
],
},
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
README = open('README.rst').read()
except:
README = None
try:
REQUIREMENTS = open('requirements.txt').read()
except:
REQUIREMENTS = None
setup(
name='newspeak',
version="0.1",
description='Standalone Django based feed aggregator.',
long_description=README,
install_requires=REQUIREMENTS,
author='Mathijs de Bruin',
author_email='[email protected]',
url='http://github.com/bitsoffreedom/newspeak/',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
test_suite='setuptest.setuptest.SetupTestSuite',
tests_require=(
'django-setuptest',
'argparse', # apparently needed by django-setuptest on python 2.6
),
entry_points={
'console_scripts': [
'newspeak = newspeak.runner:main',
],
},
)
|
bsd-3-clause
|
Python
|
105a23c0e07c737b9bbe7face42443a26409df32
|
Declare Python 3 support in setup.py.
|
glorpen/webassets,aconrad/webassets,0x1997/webassets,JDeuce/webassets,wijerasa/webassets,heynemann/webassets,heynemann/webassets,wijerasa/webassets,JDeuce/webassets,scorphus/webassets,heynemann/webassets,aconrad/webassets,glorpen/webassets,john2x/webassets,florianjacob/webassets,glorpen/webassets,aconrad/webassets,0x1997/webassets,florianjacob/webassets,scorphus/webassets,john2x/webassets
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
from webassets.six.moves import map
from webassets.six.moves import zip
try:
from sphinx.setup_command import BuildDoc
cmdclass = {'build_sphinx': BuildDoc}
except ImportError:
cmdclass = {}
# Figure out the version. This could also be done by importing the
# module, the parsing takes place for historical reasons.
import re
here = os.path.dirname(os.path.abspath(__file__))
version_re = re.compile(
r'__version__ = (\(.*?\))')
fp = open(os.path.join(here, 'src/webassets', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception("Cannot find version in __init__.py")
fp.close()
setup(
name='webassets',
version=".".join(map(str, version)),
description='Media asset management for Python, with glue code for '+\
'various web frameworks',
long_description='Merges, minifies and compresses Javascript and '
'CSS files, supporting a variety of different filters, including '
'YUI, jsmin, jspacker or CSS tidy. Also supports URL rewriting '
'in CSS files.',
author='Michael Elsdoerfer',
author_email='[email protected]',
license='BSD',
url='http://github.com/miracle2k/webassets/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries',
],
entry_points="""[console_scripts]\nwebassets = webassets.script:run\n""",
packages=find_packages('src'),
package_dir={'': 'src'},
cmdclass=cmdclass,
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
from webassets.six.moves import map
from webassets.six.moves import zip
try:
from sphinx.setup_command import BuildDoc
cmdclass = {'build_sphinx': BuildDoc}
except ImportError:
cmdclass = {}
# Figure out the version. This could also be done by importing the
# module, the parsing takes place for historical reasons.
import re
here = os.path.dirname(os.path.abspath(__file__))
version_re = re.compile(
r'__version__ = (\(.*?\))')
fp = open(os.path.join(here, 'src/webassets', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception("Cannot find version in __init__.py")
fp.close()
setup(
name='webassets',
version=".".join(map(str, version)),
description='Media asset management for Python, with glue code for '+\
'various web frameworks',
long_description='Merges, minifies and compresses Javascript and '
'CSS files, supporting a variety of different filters, including '
'YUI, jsmin, jspacker or CSS tidy. Also supports URL rewriting '
'in CSS files.',
author='Michael Elsdoerfer',
author_email='[email protected]',
license='BSD',
url='http://github.com/miracle2k/webassets/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries',
],
entry_points="""[console_scripts]\nwebassets = webassets.script:run\n""",
packages=find_packages('src'),
package_dir={'': 'src'},
cmdclass=cmdclass,
)
|
bsd-2-clause
|
Python
|
aa6bc481529022bb878fcd76903bb0ccb93d7048
|
Fix schema salad pin.
|
dleehr/cwltool,chapmanb/cwltool,dleehr/cwltool,common-workflow-language/cwltool,common-workflow-language/cwltool,chapmanb/cwltool,jeremiahsavage/cwltool,jeremiahsavage/cwltool,dleehr/cwltool,SciDAP/cwltool,chapmanb/cwltool,jeremiahsavage/cwltool,SciDAP/cwltool,chapmanb/cwltool,SciDAP/cwltool,jeremiahsavage/cwltool,common-workflow-language/cwltool,dleehr/cwltool,SciDAP/cwltool
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
import setuptools.command.egg_info as egg_info_cmd
import shutil
from setuptools import setup, find_packages
SETUP_DIR = os.path.dirname(__file__)
README = os.path.join(SETUP_DIR, 'README.rst')
try:
import gittaggers
tagger = gittaggers.EggInfoFromGit
except ImportError:
tagger = egg_info_cmd.egg_info
setup(name='cwltool',
version='1.0',
description='Common workflow language reference implementation',
long_description=open(README).read(),
author='Common workflow language working group',
author_email='[email protected]',
url="https://github.com/common-workflow-language/common-workflow-language",
download_url="https://github.com/common-workflow-language/common-workflow-language",
license='Apache 2.0',
packages=["cwltool"],
package_data={'cwltool': ['schemas/draft-3/*.yml',
'schemas/draft-3/*.md',
'schemas/draft-3/salad/schema_salad/metaschema/*.yml',
'schemas/draft-3/salad/schema_salad/metaschema/*.md']},
install_requires=[
'requests',
'PyYAML',
'rdflib >= 4.2.0',
'rdflib-jsonld >= 0.3.0',
'shellescape',
'schema_salad == 1.5.20160126190004'
],
test_suite='tests',
tests_require=[],
entry_points={
'console_scripts': [ "cwltool=cwltool.main:main", "cwltest=cwltool.cwltest:main" ]
},
zip_safe=True,
cmdclass={'egg_info': tagger},
)
|
#!/usr/bin/env python
import os
import sys
import setuptools.command.egg_info as egg_info_cmd
import shutil
from setuptools import setup, find_packages
SETUP_DIR = os.path.dirname(__file__)
README = os.path.join(SETUP_DIR, 'README.rst')
try:
import gittaggers
tagger = gittaggers.EggInfoFromGit
except ImportError:
tagger = egg_info_cmd.egg_info
setup(name='cwltool',
version='1.0',
description='Common workflow language reference implementation',
long_description=open(README).read(),
author='Common workflow language working group',
author_email='[email protected]',
url="https://github.com/common-workflow-language/common-workflow-language",
download_url="https://github.com/common-workflow-language/common-workflow-language",
license='Apache 2.0',
packages=["cwltool"],
package_data={'cwltool': ['schemas/draft-3/*.yml',
'schemas/draft-3/*.md',
'schemas/draft-3/salad/schema_salad/metaschema/*.yml',
'schemas/draft-3/salad/schema_salad/metaschema/*.md']},
install_requires=[
'requests',
'PyYAML',
'rdflib >= 4.2.0',
'rdflib-jsonld >= 0.3.0',
'shellescape',
'schema_salad == 1.5.20160126164443'
],
test_suite='tests',
tests_require=[],
entry_points={
'console_scripts': [ "cwltool=cwltool.main:main", "cwltest=cwltool.cwltest:main" ]
},
zip_safe=True,
cmdclass={'egg_info': tagger},
)
|
apache-2.0
|
Python
|
e03d38e2ca78aa82c8791b43beaa044e27fc4424
|
Bump version to 0.2.1
|
byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name='django-controlcenter',
version='0.2.1',
description='Set of widgets to build dashboards for your Django-project.',
long_description='',
url='https://github.com/byashimov/django-controlcenter',
author='Murad Byashimov',
author_email='[email protected]',
packages=find_packages(
exclude=['controlcenter.stylus', 'controlcenter.images']),
include_package_data=True,
license='BSD',
install_requires=['django-pkgconf'],
keywords='django admin dashboard',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Environment :: Web Environment',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
],
)
|
from setuptools import find_packages, setup
setup(
name='django-controlcenter',
version='0.2.0',
description='Set of widgets to build dashboards for your Django-project.',
long_description='',
url='https://github.com/byashimov/django-controlcenter',
author='Murad Byashimov',
author_email='[email protected]',
packages=find_packages(
exclude=['controlcenter.stylus', 'controlcenter.images']),
include_package_data=True,
license='BSD',
install_requires=['django-pkgconf'],
keywords='django admin dashboard',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Environment :: Web Environment',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
],
)
|
bsd-3-clause
|
Python
|
1c02453275d32109028aa3b915e56640404e74fe
|
Update setup.py
|
yunify/qingcloud-sdk-python
|
setup.py
|
setup.py
|
# coding:utf-8
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2, 6):
error = 'ERROR: qingcloud-sdk requires Python Version 2.6 or above.'
print >> sys.stderr, error
sys.exit(1)
setup(
name='qingcloud-sdk',
version='1.2',
description='Software Development Kit for QingCloud.',
long_description=open('README.rst', 'rb').read().decode('utf-8'),
keywords='qingcloud iaas qingstor sdk',
author='Yunify Team',
author_email='[email protected]',
url='https://docs.qingcloud.com/sdk/',
packages=['qingcloud', 'qingcloud.conn', 'qingcloud.iaas', 'qingcloud.iaas.actions',
'qingcloud.misc', 'qingcloud.qingstor'],
package_dir={'qingcloud-sdk': 'qingcloud'},
namespace_packages=['qingcloud'],
include_package_data=True,
install_requires=['future']
)
|
# coding:utf-8
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2, 6):
error = 'ERROR: qingcloud-sdk requires Python Version 2.6 or above.'
print >> sys.stderr, error
sys.exit(1)
setup(
name='qingcloud-sdk',
version='1.2',
description='Software Development Kit for QingCloud.',
long_description=open('README.rst', 'rb').read().decode('utf-8'),
keywords='qingcloud iaas qingstor sdk',
author='Yunify Team',
author_email='[email protected]',
url='https://docs.qingcloud.com/sdk/',
packages=['qingcloud', 'qingcloud.conn', 'qingcloud.iaas',
'qingcloud.misc', 'qingcloud.qingstor'],
package_dir={'qingcloud-sdk': 'qingcloud'},
namespace_packages=['qingcloud'],
include_package_data=True,
install_requires=['future']
)
|
apache-2.0
|
Python
|
b82e851a74c2a3ac4823723be2f6b0caf88cb7c8
|
use distutils for comming python 3.12
|
nakagami/CyMySQL
|
setup.py
|
setup.py
|
import sys
try:
from setuptools import setup, Command, Extension
except ImportError:
from distutils.core import setup, Command
from distutils.extension import Extension
try:
from Cython.Build import cythonize
ext_modules = cythonize([
Extension("cymysql.packet", ["cymysql/packet.pyx"]),
Extension("cymysql.charset", ["cymysql/charset.py"]),
Extension("cymysql.converters", ["cymysql/converters.py"]),
Extension("cymysql.connections", ["cymysql/connections.py"]),
Extension("cymysql.cursors", ["cymysql/cursors.py"]),
Extension("cymysql.err", ["cymysql/err.py"]),
Extension("cymysql.times", ["cymysql/times.py"]),
],
compiler_directives={'language_level': str(sys.version_info[0])},
)
except ImportError:
ext_modules = None
class TestCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
'''
Finds all the tests modules in tests/, and runs them.
'''
from cymysql import tests
import unittest
unittest.main(tests, argv=sys.argv[:1])
cmdclass = {'test': TestCommand}
version_tuple = __import__('cymysql').VERSION
if version_tuple[2] is not None:
version = "%d.%d.%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
]
setup(
name="cymysql",
version=version,
url='https://github.com/nakagami/CyMySQL/',
classifiers=classifiers,
keywords=['MySQL'],
author='Yutaka Matsubara',
author_email='[email protected]',
maintainer='Hajime Nakagami',
maintainer_email='[email protected]',
description='Python MySQL Driver using Cython',
long_description=open('README.rst').read(),
license="MIT",
packages=['cymysql', 'cymysql.constants', 'cymysql.tests'],
cmdclass=cmdclass,
ext_modules=ext_modules,
)
|
import sys
from distutils.core import setup, Command
from distutils.extension import Extension
try:
from Cython.Build import cythonize
ext_modules = cythonize([
Extension("cymysql.packet", ["cymysql/packet.pyx"]),
Extension("cymysql.charset", ["cymysql/charset.py"]),
Extension("cymysql.converters", ["cymysql/converters.py"]),
Extension("cymysql.connections", ["cymysql/connections.py"]),
Extension("cymysql.cursors", ["cymysql/cursors.py"]),
Extension("cymysql.err", ["cymysql/err.py"]),
Extension("cymysql.times", ["cymysql/times.py"]),
],
compiler_directives={'language_level': str(sys.version_info[0])},
)
except ImportError:
ext_modules = None
class TestCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
'''
Finds all the tests modules in tests/, and runs them.
'''
from cymysql import tests
import unittest
unittest.main(tests, argv=sys.argv[:1])
cmdclass = {'test': TestCommand}
version_tuple = __import__('cymysql').VERSION
if version_tuple[2] is not None:
version = "%d.%d.%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
]
setup(
name="cymysql",
version=version,
url='https://github.com/nakagami/CyMySQL/',
classifiers=classifiers,
keywords=['MySQL'],
author='Yutaka Matsubara',
author_email='[email protected]',
maintainer='Hajime Nakagami',
maintainer_email='[email protected]',
description='Python MySQL Driver using Cython',
long_description=open('README.rst').read(),
license="MIT",
packages=['cymysql', 'cymysql.constants', 'cymysql.tests'],
cmdclass=cmdclass,
ext_modules=ext_modules,
)
|
mit
|
Python
|
2998e76c9129ef5740ab177343db262c391594a6
|
Bump pytest-spec version.
|
marrow/cache,marrow/cache
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import os
import sys
import codecs
try:
from setuptools.core import setup, find_packages
except ImportError:
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
if sys.version_info < (2, 6):
raise SystemExit("Python 2.6 or later is required.")
elif sys.version_info > (3, 0) and sys.version_info < (3, 2):
raise SystemExit("Python 3.2 or later is required.")
exec(open(os.path.join("marrow", "cache", "release.py")).read())
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
here = os.path.abspath(os.path.dirname(__file__))
tests_require = ['pytest', 'pytest-cov', 'pytest-flakes', 'pytest-cagoule']
setup(
name = "marrow.cache",
version = version,
description = description,
long_description = codecs.open(os.path.join(here, 'README.rst'), 'r', 'utf8').read(),
url = url,
author = author.name,
author_email = author.email,
license = 'MIT',
keywords = '',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
packages = find_packages(exclude=['test', 'script', 'example']),
include_package_data = True,
namespace_packages = ['marrow'],
install_requires = ['mongoengine>=0.8.5', 'marrow.package<2.0', 'wrapt<2.0'],
extras_require = dict(
development = tests_require,
),
tests_require = tests_require,
dependency_links = [
],
zip_safe = False,
cmdclass = dict(
test = PyTest,
)
)
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import os
import sys
import codecs
try:
from setuptools.core import setup, find_packages
except ImportError:
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
if sys.version_info < (2, 6):
raise SystemExit("Python 2.6 or later is required.")
elif sys.version_info > (3, 0) and sys.version_info < (3, 2):
raise SystemExit("Python 3.2 or later is required.")
exec(open(os.path.join("marrow", "cache", "release.py")).read())
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
here = os.path.abspath(os.path.dirname(__file__))
tests_require = ['pytest', 'pytest-cov', 'pytest-flakes', 'pytest-cagoule', 'pytest-spec<=0.2.22']
setup(
name = "marrow.cache",
version = version,
description = description,
long_description = codecs.open(os.path.join(here, 'README.rst'), 'r', 'utf8').read(),
url = url,
author = author.name,
author_email = author.email,
license = 'MIT',
keywords = '',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
packages = find_packages(exclude=['test', 'script', 'example']),
include_package_data = True,
namespace_packages = ['marrow'],
install_requires = ['mongoengine>=0.8.5', 'marrow.package<2.0', 'wrapt<2.0'],
extras_require = dict(
development = tests_require,
),
tests_require = tests_require,
dependency_links = [
'git+https://github.com/illico/pytest-spec.git@feature/py26#egg=pytest-spec-0.2.22'
],
zip_safe = False,
cmdclass = dict(
test = PyTest,
)
)
|
mit
|
Python
|
8866c06ee9131cf0e52c08e0ae9e08db90347590
|
increment version
|
raintank/graphite-raintank,raintank/graphite-raintank,raintank/graphite-raintank
|
setup.py
|
setup.py
|
# coding: utf-8
from setuptools import setup
setup(
name='graphite_raintank',
version='0.2',
url='https://github.com/raintank/graphite_raintank',
license='apache2',
author='Anthony Woods',
author_email='[email protected]',
description=('Raintank backend plugin for graphite_api'),
long_description='',
py_modules=('graphite_raintank',),
zip_safe=False,
include_package_data=True,
platforms='any',
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: System :: Monitoring',
),
install_requires=(
'requests',
'elasticsearch<2.0.0',
'flask',
'graphite_api'
),
)
|
# coding: utf-8
from setuptools import setup
setup(
name='graphite_raintank',
version='0.1',
url='https://github.com/raintank/graphite_raintank',
license='apache2',
author='Anthony Woods',
author_email='[email protected]',
description=('Raintank backend plugin for graphite_api'),
long_description='',
py_modules=('graphite_raintank',),
zip_safe=False,
include_package_data=True,
platforms='any',
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: System :: Monitoring',
),
install_requires=(
'requests',
'elasticsearch<2.0.0',
'flask',
'graphite_api'
),
)
|
apache-2.0
|
Python
|
4744e2df3a99ddca3caae4359985daffb30bb94b
|
Update Flask-Z3950
|
alexandermendes/pybossa-z3950
|
setup.py
|
setup.py
|
# -*- coding: utf8 -*-
"""
pybossa-z3950
-------------
A PyBossa plugin for Z39.50 integration.
"""
import re
import os
from setuptools import setup
version = re.search('^__version__\s*=\s*"(.*)"',
open('pybossa_z3950/__init__.py').read(),
re.M).group(1)
try:
here = os.path.dirname(__file__)
long_description = open(os.path.join(here, 'docs', 'readme.rst')).read()
except:
long_description = ""
requirements = ["Flask-Z3950>=0.2.3"]
setup(
name="pybossa-z3950",
version=version,
author="Alexander Mendes",
author_email="[email protected]",
description="A PyBossa plugin that provides Z39.50 integration.",
license="BSD",
url="https://github.com/alexandermendes/pybossa-z3950",
packages=['pybossa_z3950'],
long_description=long_description,
zip_safe=False,
include_package_data=True,
platforms="any",
install_requires=requirements,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: Z39.50",
],
)
|
# -*- coding: utf8 -*-
"""
pybossa-z3950
-------------
A PyBossa plugin for Z39.50 integration.
"""
import re
import os
from setuptools import setup
version = re.search('^__version__\s*=\s*"(.*)"',
open('pybossa_z3950/__init__.py').read(),
re.M).group(1)
try:
here = os.path.dirname(__file__)
long_description = open(os.path.join(here, 'docs', 'readme.rst')).read()
except:
long_description = ""
requirements = ["Flask-Z3950>=0.2.2"]
setup(
name="pybossa-z3950",
version=version,
author="Alexander Mendes",
author_email="[email protected]",
description="A PyBossa plugin that provides Z39.50 integration.",
license="BSD",
url="https://github.com/alexandermendes/pybossa-z3950",
packages=['pybossa_z3950'],
long_description=long_description,
zip_safe=False,
include_package_data=True,
platforms="any",
install_requires=requirements,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: Z39.50",
],
)
|
bsd-3-clause
|
Python
|
43dc6dc0a9b33de0db1f79f7470d69519192dc1f
|
Put tests_require into extras_require also
|
rwstauner/dear_astrid,rwstauner/dear_astrid
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
try:
import nose.commands
extra_args = dict(
cmdclass={'test': nose.commands.nosetests},
)
except ImportError:
extra_args = dict()
# TODO: would this work? (is the file included in the dist?)
#tests_require = [l.strip() for l in open('test-requirements.txt').readlines()]
tests_require = ['mock']
setup(
name='dear_astrid',
version='0.1.0',
author='Randy Stauner',
author_email='[email protected]',
packages=find_packages(), #['dear_astrid', 'dear_astrid.test'],
#scripts=['bin/dear_astrid.py'],
url='http://github.com/rwstauner/dear_astrid/',
license='MIT',
description='Migrate tasks from Astrid backup xml',
long_description=open('README.rst').read(),
install_requires=[
'pyrtm>=0.4.1',
],
setup_requires=['nose>=1.0'],
tests_require=tests_require,
extras_require={
'test': tests_require,
},
**extra_args
)
|
from setuptools import setup, find_packages
try:
import nose.commands
extra_args = dict(
cmdclass={'test': nose.commands.nosetests},
)
except ImportError:
extra_args = dict()
setup(
name='dear_astrid',
version='0.1.0',
author='Randy Stauner',
author_email='[email protected]',
packages=find_packages(), #['dear_astrid', 'dear_astrid.test'],
#scripts=['bin/dear_astrid.py'],
url='http://github.com/rwstauner/dear_astrid/',
license='MIT',
description='Migrate tasks from Astrid backup xml',
long_description=open('README.rst').read(),
install_requires=[
'pyrtm>=0.4.1',
],
setup_requires=['nose>=1.0'],
tests_require=[
'nose',
'mock',
],
**extra_args
)
|
mit
|
Python
|
1c2e79fb67fb2d71e1d8714f47552af3b442fda8
|
bump version to 1.0.6
|
ishiland/python-geosupport
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
raise ImportError(
"setuptools module required, please go to "
"https://pypi.python.org/pypi/setuptools and follow the instructions "
"for installing setuptools"
)
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='python-geosupport',
version='1.0.6',
url='https://github.com/ishiland/python-geosupport',
description='Python bindings for NYC Geosupport Desktop Edition',
long_description=long_description,
long_description_content_type='text/markdown',
author='Ian Shiland, Jeremy Neiman',
author_email='[email protected]',
packages=['geosupport'],
include_package_data=True,
license='MIT',
keywords=['NYC', 'geocoder', 'python-geosupport', 'geosupport'],
classifiers=[
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
test_suite="tests",
extras_require={
'dev': [
'coverage',
'invoke>=1.1.1',
'nose'
]
}
)
|
try:
from setuptools import setup
except ImportError:
raise ImportError(
"setuptools module required, please go to "
"https://pypi.python.org/pypi/setuptools and follow the instructions "
"for installing setuptools"
)
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='python-geosupport',
version='1.0.5',
url='https://github.com/ishiland/python-geosupport',
description='Python bindings for NYC Geosupport Desktop Edition',
long_description=long_description,
long_description_content_type='text/markdown',
author='Ian Shiland, Jeremy Neiman',
author_email='[email protected]',
packages=['geosupport'],
include_package_data=True,
license='MIT',
keywords=['NYC', 'geocoder', 'python-geosupport', 'geosupport'],
classifiers=[
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
test_suite="tests",
extras_require={
'dev': [
'coverage',
'invoke>=1.1.1',
'nose'
]
}
)
|
mit
|
Python
|
797d2ea521b40bbbdab5430f1d08dbb3e5fb1eb1
|
Fix repo URL in setup.py
|
SeismicData/pyasdf,SeismicData/pyasdf
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup script for the pyasdf module.
:copyright:
Lion Krischer ([email protected]), 2014
:license:
BSD 3-Clause ("BSD New" or "BSD Simplified")
"""
import inspect
import os
from setuptools import setup, find_packages
def get_package_data():
"""
Returns a list of all files needed for the installation relative to the
'pyasdf' subfolder.
"""
filenames = []
# The lasif root dir.
root_dir = os.path.join(os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe()))), "pyasdf")
# Recursively include all files in these folders:
folders = [os.path.join(root_dir, "tests", "data")]
for folder in folders:
for directory, _, files in os.walk(folder):
for filename in files:
# Exclude hidden files.
if filename.startswith("."):
continue
filenames.append(os.path.relpath(
os.path.join(directory, filename),
root_dir))
return filenames
setup_config = dict(
name="pyasdf",
version="0.0.1a",
description="Module for creating and processing ASDF files.",
author="Lion Krischer",
author_email="[email protected]",
url="https://github.com/SeismicData/pyasdf",
packages=find_packages(),
license="BSD",
platforms="OS Independent",
install_requires=["numpy", "obspy>=0.10.1", "h5py", "colorama", "pytest",
"flake8", "prov"],
extras_require={"mpi": ["mpi4py"]},
package_data={
"pyasdf": get_package_data()},
)
if __name__ == "__main__":
setup(**setup_config)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup script for the pyasdf module.
:copyright:
Lion Krischer ([email protected]), 2014
:license:
BSD 3-Clause ("BSD New" or "BSD Simplified")
"""
import inspect
import os
from setuptools import setup, find_packages
def get_package_data():
"""
Returns a list of all files needed for the installation relative to the
'pyasdf' subfolder.
"""
filenames = []
# The lasif root dir.
root_dir = os.path.join(os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe()))), "pyasdf")
# Recursively include all files in these folders:
folders = [os.path.join(root_dir, "tests", "data")]
for folder in folders:
for directory, _, files in os.walk(folder):
for filename in files:
# Exclude hidden files.
if filename.startswith("."):
continue
filenames.append(os.path.relpath(
os.path.join(directory, filename),
root_dir))
return filenames
setup_config = dict(
name="pyasdf",
version="0.0.1a",
description="Module for creating and processing ASDF files.",
author="Lion Krischer",
author_email="[email protected]",
url="http: //github.com/SeismicData/pyasdf",
packages=find_packages(),
license="BSD",
platforms="OS Independent",
install_requires=["numpy", "obspy>=0.10.1", "h5py", "colorama", "pytest",
"flake8", "prov"],
extras_require={"mpi": ["mpi4py"]},
package_data={
"pyasdf": get_package_data()},
)
if __name__ == "__main__":
setup(**setup_config)
|
bsd-3-clause
|
Python
|
e31b93d1fdda810d70d3050c73c6638b29219d12
|
Increase the allowed version of aniso8601 (#1072)
|
graphql-python/graphene,graphql-python/graphene
|
setup.py
|
setup.py
|
import ast
import codecs
import re
import sys
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
_version_re = re.compile(r"VERSION\s+=\s+(.*)")
with open("graphene/__init__.py", "rb") as f:
version = ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1))
path_copy = sys.path[:]
sys.path.append("graphene")
try:
from pyutils.version import get_version
version = get_version(version)
except Exception:
version = ".".join([str(v) for v in version])
sys.path[:] = path_copy
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
tests_require = [
"pytest",
"pytest-benchmark",
"pytest-cov",
"pytest-mock",
"pytest-asyncio",
"snapshottest",
"coveralls",
"promise",
"six",
"mock",
"pytz",
"iso8601",
]
setup(
name="graphene",
version=version,
description="GraphQL Framework for Python",
long_description=codecs.open(
"README.rst", "r", encoding="ascii", errors="replace"
).read(),
url="https://github.com/graphql-python/graphene",
author="Syrus Akbary",
author_email="[email protected]",
license="MIT",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords="api graphql protocol rest relay graphene",
packages=find_packages(exclude=["tests", "tests.*", "examples"]),
install_requires=[
"graphql-core>=3.0.0a0,<4",
"graphql-relay>=3.0.0a0,<4",
"aniso8601>=6,<9",
],
tests_require=tests_require,
extras_require={"test": tests_require},
cmdclass={"test": PyTest},
)
|
import ast
import codecs
import re
import sys
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
_version_re = re.compile(r"VERSION\s+=\s+(.*)")
with open("graphene/__init__.py", "rb") as f:
version = ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1))
path_copy = sys.path[:]
sys.path.append("graphene")
try:
from pyutils.version import get_version
version = get_version(version)
except Exception:
version = ".".join([str(v) for v in version])
sys.path[:] = path_copy
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
tests_require = [
"pytest",
"pytest-benchmark",
"pytest-cov",
"pytest-mock",
"pytest-asyncio",
"snapshottest",
"coveralls",
"promise",
"six",
"mock",
"pytz",
"iso8601",
]
setup(
name="graphene",
version=version,
description="GraphQL Framework for Python",
long_description=codecs.open(
"README.rst", "r", encoding="ascii", errors="replace"
).read(),
url="https://github.com/graphql-python/graphene",
author="Syrus Akbary",
author_email="[email protected]",
license="MIT",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords="api graphql protocol rest relay graphene",
packages=find_packages(exclude=["tests", "tests.*", "examples"]),
install_requires=[
"graphql-core>=3.0.0a0,<4",
"graphql-relay>=3.0.0a0,<4",
"aniso8601>=6,<8",
],
tests_require=tests_require,
extras_require={"test": tests_require},
cmdclass={"test": PyTest},
)
|
mit
|
Python
|
9031637554fa0b844f33d208d2527825fdd629fc
|
Update pypi classifiers.
|
dmpayton/django-admin-honeypot,wujuguang/django-admin-honeypot,wujuguang/django-admin-honeypot,dmpayton/django-admin-honeypot
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
from admin_honeypot import __version__, __description__, __license__
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
setup(
name='django-admin-honeypot',
version=__version__,
description=__description__,
long_description=open('./README.rst', 'r').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
keywords='django admin honeypot trap',
maintainer='Derek Payton',
maintainer_email='[email protected]',
url='https://github.com/dmpayton/django-admin-honeypot',
download_url='https://github.com/dmpayton/django-admin-honeypot/tarball/v%s' % __version__,
license=__license__,
include_package_data=True,
packages=find_packages(),
zip_safe=False,
)
|
#!/usr/bin/env python
import sys
from admin_honeypot import __version__, __description__, __license__
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup, find_packages
setup(
name='django-admin-honeypot',
version=__version__,
description=__description__,
long_description=open('./README.rst', 'r').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
keywords='django admin honeypot trap',
maintainer='Derek Payton',
maintainer_email='[email protected]',
url='https://github.com/dmpayton/django-admin-honeypot',
download_url='https://github.com/dmpayton/django-admin-honeypot/tarball/v%s' % __version__,
license=__license__,
include_package_data=True,
packages=find_packages(),
zip_safe=False,
)
|
mit
|
Python
|
a387f0f91bd7fa8a86401de5060b073f1e9b2e82
|
fix packages in setup.py for PyPI
|
tanghaibao/jcvi,sgordon007/jcvi_062915
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from glob import glob
name = "jcvi"
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Scientific/Engineering :: Bio-Informatics',
]
setup(
name=name,
version='0.4.7',
author='Haibao Tang',
author_email='[email protected]',
package_dir={name: '.'},
packages=[name] + ['.'.join((name, x)) for x in find_packages()],
include_package_data=True,
data_files=[(name + '/data', glob("data/*"))],
classifiers=classifiers,
zip_safe=False,
license='BSD',
url='http://github.com/tanghaibao/jcvi',
description='Python utility libraries on genome assembly, '\
'annotation and comparative genomics',
long_description=open("README.rst").read(),
install_requires=['biopython', 'numpy', 'matplotlib']
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from glob import glob
name = "jcvi"
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Scientific/Engineering :: Bio-Informatics',
]
setup(
name=name,
version='0.4.7',
author='Haibao Tang',
author_email='[email protected]',
package_dir={name: '.'},
packages=[x for x in find_packages("../") if x.split('.')[0] == name],
include_package_data=True,
data_files=[(name + '/data', glob("data/*"))],
classifiers=classifiers,
zip_safe=False,
license='BSD',
url='http://github.com/tanghaibao/jcvi',
description='Python utility libraries on genome assembly, '\
'annotation and comparative genomics',
long_description=open("README.rst").read(),
install_requires=['biopython', 'numpy', 'matplotlib']
)
|
bsd-2-clause
|
Python
|
f428dace08e11cdba34767dea989380fa6d4e423
|
Add long_description
|
unnonouno/mrep
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
requires = [
'mecab-python3',
]
def read(name):
return open(os.path.join(os.path.dirname(__file__), name)).read()
setup(
name='miura',
version='0.1.0',
description='MIURA: pattern matcher for morpheme sequences',
long_description=read('README.rst'),
author='Yuya Unno',
author_email='[email protected]',
url='https://github.com/unnonouno/miura',
packages=['miura',
],
scripts=[
'command/miura',
],
install_requires=requires,
license='MIT',
test_suite='test',
classifiers = [
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Topic :: Utilities',
],
)
|
#!/usr/bin/env python
from setuptools import setup
requires = [
'mecab-python3',
]
setup(
name='miura',
version='0.1.0',
description='MIURA: pattern matcher for morpheme sequences',
author='Yuya Unno',
author_email='[email protected]',
url='https://github.com/unnonouno/miura',
packages=['miura',
],
scripts=[
'command/miura',
],
install_requires=requires,
license='MIT',
test_suite='test',
classifiers = [
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Topic :: Utilities',
],
)
|
mit
|
Python
|
d24daa18023d0d59d70a4328466613f3a03de039
|
add tests_require packages
|
pinax/pinax-comments,pinax/pinax-comments,eldarion/dialogos
|
setup.py
|
setup.py
|
import codecs
from os import path
from setuptools import find_packages, setup
def read(*parts):
filename = path.join(path.dirname(__file__), *parts)
with codecs.open(filename, encoding="utf-8") as fp:
return fp.read()
setup(
author="",
author_email="",
description="",
name="pinax-comments",
long_description=read("README.rst"),
version="0.1",
url="http://github.com/pinax/pinax-comments/",
license="MIT",
install_requires=[
"django-appconf>=1.0.1",
],
packages=find_packages(),
package_data={
"comments": []
},
test_suite="runtests.runtests",
tests_require=[
"django-test-plus>=1.0.11",
"django-appconf>=1.0.1",
"django-user-accounts>=1.3.1",
],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
zip_safe=False
)
|
import codecs
from os import path
from setuptools import find_packages, setup
def read(*parts):
filename = path.join(path.dirname(__file__), *parts)
with codecs.open(filename, encoding="utf-8") as fp:
return fp.read()
setup(
author="",
author_email="",
description="",
name="pinax-comments",
long_description=read("README.rst"),
version="0.1",
url="http://github.com/pinax/pinax-comments/",
license="MIT",
install_requires=[
"django-appconf>=1.0.1"
],
packages=find_packages(),
package_data={
"comments": []
},
test_suite="runtests.runtests",
tests_require=[
],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
],
zip_safe=False
)
|
mit
|
Python
|
a61b79a6d427745f3bf240554b3dd852d8e6ed65
|
Add classifiers
|
swarmer/fridge,swarmer/fridge
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='Fridge',
version='0.1',
py_modules=['fridge'],
description='Persistent JSON-encoded distionary',
author='Anton Barkovsky',
author_email='[email protected]',
url='http://fridge.readthedocs.org/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Software Development :: Libraries :: Python Modules'])
|
from distutils.core import setup
setup(name='Fridge',
version='0.1',
py_modules=['fridge'],
description='Persistent JSON-encoded distionary',
author='Anton Barkovsky',
author_email='[email protected]',
url='http://fridge.readthedocs.org/')
|
mit
|
Python
|
3bf4ce1f01d3e67702d91ccf4119ad6d956af99e
|
bump to 0.4.4
|
benoitc/socketpool,mgedmin/socketpool
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -
#
# This file is part of socketpool.
# See the NOTICE for more information.
import os
from setuptools import setup, find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries']
# read long description
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
DATA_FILES = [
('socketpool', ["LICENSE", "MANIFEST.in", "NOTICE", "README.rst",
"THANKS", "UNLICENSE"])
]
setup(name='socketpool',
version='0.4.4',
description = 'Python socket pool',
long_description = long_description,
classifiers = CLASSIFIERS,
license = 'BSD',
url = 'http://github.com/benoitc/socketpool',
packages=find_packages(),
data_files = DATA_FILES)
|
#!/usr/bin/env python
# -*- coding: utf-8 -
#
# This file is part of socketpool.
# See the NOTICE for more information.
import os
from setuptools import setup, find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries']
# read long description
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
DATA_FILES = [
('socketpool', ["LICENSE", "MANIFEST.in", "NOTICE", "README.rst",
"THANKS", "UNLICENSE"])
]
setup(name='socketpool',
version='0.4.3',
description = 'Python socket pool',
long_description = long_description,
classifiers = CLASSIFIERS,
license = 'BSD',
url = 'http://github.com/benoitc/socketpool',
packages=find_packages(),
data_files = DATA_FILES)
|
mit
|
Python
|
590a7b926af1e57d48a087f18556caa4f3e1170c
|
Remove the duplicated filter in PreferenceAdmin
|
kensonman/webframe,kensonman/webframe,kensonman/webframe
|
admin.py
|
admin.py
|
# -*- coding: utf-8 -*-
# File: src/webframe/admin.py
# Date: 2019-11-21 14:55
# Author: Kenson Man <[email protected]>
# Desc: The file provide the Admin-Tools in webframe module
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _, ugettext
from .models import *
import logging
logger=logging.getLogger('webframe.admin')
class PreferenceChildParentFilter(admin.SimpleListFilter):
title=_('webframe.models.Preference.childParentFilter')
parameter_name='child_parent_filter'
def lookups(self, req, modelAdmin):
rst=[
('parent', _('webframe.models.Preference.childParentFilter.parent')),
('child', _('webframe.models.Preference.childParentFilter.child')),
]
return rst
def queryset(self, req, q):
logger.info('Filtering by PreferenceChildParentFilter: {0}'.format(self.value()))
if self.value() is None:
return q
elif self.value()=='parent':
return q.filter(id__in=Preference.objects.filter(parent__isnull=False).values('parent__id'))
else:
return q.filter(parent__isnull=False)
@admin.register(Preference)
class PreferenceAdmin(admin.ModelAdmin):
list_display=('id', 'name', 'parent', 'reserved', 'lmb', 'lmd')
list_filter=('reserved', PreferenceChildParentFilter, 'tipe',)
search_fields=('name', 'value', 'owner__username')
ordering=('owner__username', 'name', 'value')
|
# -*- coding: utf-8 -*-
# File: src/webframe/admin.py
# Date: 2019-11-21 14:55
# Author: Kenson Man <[email protected]>
# Desc: The file provide the Admin-Tools in webframe module
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _, ugettext
from .models import *
import logging
logger=logging.getLogger('webframe.admin')
class PreferenceChildParentFilter(admin.SimpleListFilter):
title=_('webframe.models.Preference.childParentFilter')
parameter_name='child_parent_filter'
def lookups(self, req, modelAdmin):
rst=[
('parent', _('webframe.models.Preference.childParentFilter.parent')),
('child', _('webframe.models.Preference.childParentFilter.child')),
]
return rst
def queryset(self, req, q):
logger.info('Filtering by PreferenceChildParentFilter: {0}'.format(self.value()))
if self.value() is None:
return q
elif self.value()=='parent':
return q.filter(id__in=Preference.objects.filter(parent__isnull=False).values('parent__id'))
else:
return q.filter(parent__isnull=False)
@admin.register(Preference)
class PreferenceAdmin(admin.ModelAdmin):
list_display=('id', 'name', 'parent', 'reserved', 'lmb', 'lmd')
list_filter=('reserved', PreferenceChildParentFilter, 'tipe',)
search_fields=('name', 'value', 'owner__username', 'owner__username')
ordering=('owner__username', 'name', 'value')
|
apache-2.0
|
Python
|
83dd154759a430918931072e8e82db413c4f5741
|
Use proper settings
|
chrisseto/waterbutler,kwierman/waterbutler,Ghalko/waterbutler,hmoco/waterbutler,TomBaxter/waterbutler,rafaeldelucena/waterbutler,Johnetordoff/waterbutler,felliott/waterbutler,icereval/waterbutler,rdhyee/waterbutler,CenterForOpenScience/waterbutler,RCOSDP/waterbutler,cosenal/waterbutler
|
tasks.py
|
tasks.py
|
# encoding: utf-8
from invoke import task
from waterbutler import settings
@task
def tornado(port=settings.PORT, address=settings.ADDRESS, debug=settings.DEBUG):
from waterbutler.server import serve
serve(port, address, debug)
|
# encoding: utf-8
from invoke import task
from waterbutler.server import settings
@task
def tornado(port=settings.PORT, address=settings.ADDRESS, debug=settings.DEBUG):
from waterbutler.server import serve
serve(port, address, debug)
|
apache-2.0
|
Python
|
cae6d756294c90b93e5505ce5eec9f93ced83398
|
Improve test task; switch flake8->syntax
|
marshmallow-code/apispec,marshmallow-code/smore
|
tasks.py
|
tasks.py
|
# -*- coding: utf-8 -*-
import os
import sys
import webbrowser
from invoke import task
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task
def test(ctx, watch=False, last_failing=False):
"""Run the tests.
Note: --watch requires pytest-xdist to be installed.
"""
import pytest
syntax(ctx)
args = []
if watch:
args.append('-f')
if last_failing:
args.append('--lf')
args.append('tests')
retcode = pytest.main(args)
sys.exit(retcode)
@task
def syntax(ctx):
"""Run flake8 on codebase."""
ctx.run('pre-commit run --all-files', echo=True)
@task
def watch(ctx):
"""Run tests when a file changes. Requires pytest-xdist."""
import pytest
errcode = pytest.main(['-f'])
sys.exit(errcode)
@task
def clean(ctx):
ctx.run('rm -rf build')
ctx.run('rm -rf dist')
ctx.run('rm -rf apispec.egg-info')
clean_docs(ctx)
print('Cleaned up.')
@task
def clean_docs(ctx):
ctx.run('rm -rf %s' % build_dir)
@task
def browse_docs(ctx):
path = os.path.join(build_dir, 'index.html')
webbrowser.open_new_tab(path)
def build_docs(ctx, browse):
ctx.run('sphinx-build %s %s' % (docs_dir, build_dir), echo=True)
if browse:
browse_docs(ctx)
@task
def docs(ctx, clean=False, browse=False, watch=False):
"""Build the docs."""
if clean:
clean_docs(ctx)
if watch:
watch_docs(ctx, browse=browse)
else:
build_docs(ctx, browse=browse)
@task
def watch_docs(ctx, browse=False):
"""Run build the docs when a file changes."""
try:
import sphinx_autobuild # noqa
except ImportError:
print('ERROR: watch task requires the sphinx_autobuild package.')
print('Install it with:')
print(' pip install sphinx-autobuild')
sys.exit(1)
ctx.run(
'sphinx-autobuild {0} {1} {2} -z apispec'.format(
'--open-browser' if browse else '', docs_dir, build_dir,
), echo=True, pty=True,
)
@task
def readme(ctx, browse=False):
ctx.run('rst2html.py README.rst > README.html')
if browse:
webbrowser.open_new_tab('README.html')
|
# -*- coding: utf-8 -*-
import os
import sys
import webbrowser
from invoke import task
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task
def test(ctx):
flake(ctx)
import pytest
errcode = pytest.main(['tests'])
sys.exit(errcode)
@task
def flake(ctx):
"""Run flake8 on codebase."""
ctx.run('flake8 .', echo=True)
@task
def watch(ctx):
"""Run tests when a file changes. Requires pytest-xdist."""
import pytest
errcode = pytest.main(['-f'])
sys.exit(errcode)
@task
def clean(ctx):
ctx.run('rm -rf build')
ctx.run('rm -rf dist')
ctx.run('rm -rf apispec.egg-info')
clean_docs(ctx)
print('Cleaned up.')
@task
def clean_docs(ctx):
ctx.run('rm -rf %s' % build_dir)
@task
def browse_docs(ctx):
path = os.path.join(build_dir, 'index.html')
webbrowser.open_new_tab(path)
def build_docs(ctx, browse):
ctx.run('sphinx-build %s %s' % (docs_dir, build_dir), echo=True)
if browse:
browse_docs(ctx)
@task
def docs(ctx, clean=False, browse=False, watch=False):
"""Build the docs."""
if clean:
clean_docs(ctx)
if watch:
watch_docs(ctx, browse=browse)
else:
build_docs(ctx, browse=browse)
@task
def watch_docs(ctx, browse=False):
"""Run build the docs when a file changes."""
try:
import sphinx_autobuild # noqa
except ImportError:
print('ERROR: watch task requires the sphinx_autobuild package.')
print('Install it with:')
print(' pip install sphinx-autobuild')
sys.exit(1)
ctx.run('sphinx-autobuild {0} {1} {2} -z apispec'.format(
'--open-browser' if browse else '', docs_dir, build_dir), echo=True, pty=True)
@task
def readme(ctx, browse=False):
ctx.run('rst2html.py README.rst > README.html')
if browse:
webbrowser.open_new_tab('README.html')
|
mit
|
Python
|
1235589dae5cf5dc1a8bf1114f65f0b36bb7bca1
|
Simplify tests
|
rmgorman/django-guardian,rmgorman/django-guardian,benkonrath/django-guardian,benkonrath/django-guardian,benkonrath/django-guardian,lukaszb/django-guardian,lukaszb/django-guardian,rmgorman/django-guardian,lukaszb/django-guardian
|
tests.py
|
tests.py
|
"""
Unit tests runner for ``django-guardian`` based on boundled example project.
Tests are independent from this example application but setuptools need
instructions how to interpret ``test`` command when we run::
python setup.py test
"""
import os
import sys
def main():
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "guardian.testapp.testsettings")
import django
from django.core.management import call_command
django.setup()
call_command('test')
sys.exit(0)
if __name__ == '__main__':
main()
|
"""
Unit tests runner for ``django-guardian`` based on boundled example project.
Tests are independent from this example application but setuptools need
instructions how to interpret ``test`` command when we run::
python setup.py test
"""
import os
import sys
import django
os.environ["DJANGO_SETTINGS_MODULE"] = 'guardian.testapp.testsettings'
from guardian.testapp import testsettings as settings
def run_tests(settings):
from django.test.utils import get_runner
from utils import show_settings
show_settings(settings, 'tests')
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(interactive=False)
failures = test_runner.run_tests(['guardian'])
return failures
def main():
failures = run_tests(settings)
sys.exit(failures)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
e6f14f8ef1bb0ab247d331b6ef023d35543663be
|
Update tests.
|
dschwertfeger/partitioned-hash-join
|
tests.py
|
tests.py
|
from io import open
import unittest
from partitioned_hash_join import (
build_hash_table,
h1,
is_duplicate,
join,
letters_for_result,
value_for_letter,
LETTERS
)
class PartitionedHashJoinTests(unittest.TestCase):
def test_h1(self):
self.assertEqual(h1('H1234567890'), 123)
def test_is_duplicate(self):
self.assertTrue(is_duplicate(100, 100))
self.assertTrue(is_duplicate(10, 1010))
self.assertFalse(is_duplicate(100, 1010))
def test_join(self):
r = open('r_test_bucket.txt', 'r')
s = open('s_test_bucket.txt', 'r')
hash_table = build_hash_table(r)
result = join(hash_table, s)
self.assertEqual(result.get('9019095166'), 100010001)
def test_value_for_letter(self):
for idx, l in enumerate(LETTERS):
self.assertEqual(10**idx, value_for_letter(l))
def test_letters_for_result(self):
self.assertTrue(x in letters_for_result(100100010) for x in ['F', 'B', 'I'])
if __name__=='__main__':
unittest.main()
|
from io import open
import unittest
from partitioned_hash_join import (
build_hash_table,
h1,
join,
write
)
class PartitionedHashJoinTests(unittest.TestCase):
def test_h1(self):
self.assertEqual(h1('H1234567890'), 12)
def test_join(self):
r = open('r_test_bucket.txt', 'r')
s = open('s_test_bucket.txt', 'r')
hash_table = build_hash_table(r)
result = join(hash_table, s)
self.assertEqual(len(result.get('9019095166')), 3)
def test_create_result_file(self):
results = {488552576: set([u'D488552576\n', u'B488552576\n']),
482241448: set([u'G482241448\n']),
486356299: set([u'B486356299\n',
u'D486356299\n',
u'A486356299\n'])}
write(results)
expected_results = ['D488552576\n',
'B488552576\n',
'G482241448\n',
'B486356299\n',
'D486356299\n',
'A486356299\n']
with open('result.txt') as f:
for line in f:
self.assertTrue(line in expected_results)
if __name__=='__main__':
unittest.main()
|
mit
|
Python
|
2a9f27c46810cb14d25ddb3282c72de4303ee5bd
|
raise KeyError on getitem
|
mgax/kv
|
tests.py
|
tests.py
|
import unittest
class KV(object):
def __len__(self):
return 0
def __getitem__(self, key):
raise KeyError
class KVTest(unittest.TestCase):
def test_new_kv_is_empty(self):
self.assertEqual(len(KV()), 0)
def test_get_missing_value_raises_key_error(self):
with self.assertRaises(KeyError):
KV()['missing']
|
import unittest
class KV(object):
def __len__(self):
return 0
class KVTest(unittest.TestCase):
def test_new_kv_is_empty(self):
self.assertEqual(len(KV()), 0)
|
bsd-2-clause
|
Python
|
561340c241dcbd9021e27dda44675ff8eaed9ad3
|
add unix_socket argument
|
innogames/igcollect
|
src/mysql.py
|
src/mysql.py
|
#!/usr/bin/env python
#
# igcollect - Mysql Status
#
# Copyright (c) 2016, InnoGames GmbH
#
try:
from mysql.connector import connect
except ImportError:
from MySQLdb import connect
from argparse import ArgumentParser
from time import time
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='mysql')
parser.add_argument('--host', default='localhost')
parser.add_argument('--user')
parser.add_argument('--password')
parser.add_argument(
'--unix-socket',
default='/var/run/mysqld/mysqld.sock',
)
return parser.parse_args()
def main():
args = parse_args()
template = args.prefix + '.{}.{} {} ' + str(int(time()))
db = connect(
user=args.user,
passwd=args.password,
host=args.host,
unix_socket=args.unix_socket,
)
cur = db.cursor()
# Check for global status
cur.execute('SHOW GLOBAL STATUS')
for row in cur.fetchall():
if row[1].isdigit():
print(template.format('status', row[0], row[1]))
cur.execute('SHOW VARIABLES')
for row in cur.fetchall():
if row[1].isdigit():
print(template.format('variables', row[0], row[1]))
# Find out how much space we can recover by Optimize
sysdbs = {
'information_schema',
'performance_schema',
'mysql',
'sys',
'test',
}
free = 0
cur.execute('SHOW DATABASES')
for row in cur.fetchall():
if row[0] in sysdbs:
continue
cur.execute(
'SELECT table_name, '
'ROUND(data_free / 1024 / 1024), '
'ROUND((data_length + index_length), 2) '
'FROM information_schema.tables '
'WHERE table_schema = %s',
[row[0]]
)
for value in cur.fetchall():
print(template.format('table_size', value[0], value[2]))
free += value[1]
print(template.format('status', 'optimize_freeable', free))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# igcollect - Mysql Status
#
# Copyright (c) 2016, InnoGames GmbH
#
try:
from mysql.connector import connect
except ImportError:
from MySQLdb import connect
from argparse import ArgumentParser
from time import time
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='mysql')
parser.add_argument('--host', default='localhost')
parser.add_argument('--user')
parser.add_argument('--password')
return parser.parse_args()
def main():
args = parse_args()
template = args.prefix + '.{}.{} {} ' + str(int(time()))
db = connect(
user=args.user,
passwd=args.password,
host=args.host,
)
cur = db.cursor()
# Check for global status
cur.execute('SHOW GLOBAL STATUS')
for row in cur.fetchall():
if row[1].isdigit():
print(template.format('status', row[0], row[1]))
cur.execute('SHOW VARIABLES')
for row in cur.fetchall():
if row[1].isdigit():
print(template.format('variables', row[0], row[1]))
# Find out how much space we can recover by Optimize
sysdbs = {
'information_schema',
'performance_schema',
'mysql',
'sys',
'test',
}
free = 0
cur.execute('SHOW DATABASES')
for row in cur.fetchall():
if row[0] in sysdbs:
continue
cur.execute(
'SELECT table_name, '
'ROUND(data_free / 1024 / 1024), '
'ROUND((data_length + index_length), 2) '
'FROM information_schema.tables '
'WHERE table_schema = %s',
[row[0]]
)
for value in cur.fetchall():
print(template.format('table_size', value[0], value[2]))
free += value[1]
print(template.format('status', 'optimize_freeable', free))
if __name__ == '__main__':
main()
|
mit
|
Python
|
b33ffb6d15c29697c158743f89c2adf5a2e19e32
|
Update working draft URL.
|
googlearchive/pywebsocket,GoogleChromeLabs/pywebsocket3,google/pywebsocket,google/pywebsocket,googlearchive/pywebsocket,GoogleChromeLabs/pywebsocket3,google/pywebsocket,GoogleChromeLabs/pywebsocket3,googlearchive/pywebsocket
|
src/setup.py
|
src/setup.py
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Set up script for mod_pywebsocket.
"""
from distutils.core import setup
import sys
_PACKAGE_NAME = 'mod_pywebsocket'
if sys.version < '2.3':
print >>sys.stderr, '%s requires Python 2.3 or later.' % _PACKAGE_NAME
sys.exit(1)
setup(author='Yuzo Fujishima',
author_email='[email protected]',
description='Web Socket extension for Apache HTTP Server.',
long_description=(
'mod_pywebsocket is an Apache HTTP Server extension for '
'Web Socket (http://tools.ietf.org/html/'
'draft-ietf-hybi-thewebsocketprotocol). '
'See mod_pywebsocket/__init__.py for more detail.'),
license='See COPYING',
name=_PACKAGE_NAME,
packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'],
url='http://code.google.com/p/pywebsocket/',
version='0.5.2',
)
# vi:sts=4 sw=4 et
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Set up script for mod_pywebsocket.
"""
from distutils.core import setup
import sys
_PACKAGE_NAME = 'mod_pywebsocket'
if sys.version < '2.3':
print >>sys.stderr, '%s requires Python 2.3 or later.' % _PACKAGE_NAME
sys.exit(1)
setup(author='Yuzo Fujishima',
author_email='[email protected]',
description='Web Socket extension for Apache HTTP Server.',
long_description=(
'mod_pywebsocket is an Apache HTTP Server extension for '
'Web Socket (http://tools.ietf.org/html/'
'draft-hixie-thewebsocketprotocol). '
'See mod_pywebsocket/__init__.py for more detail.'),
license='See COPYING',
name=_PACKAGE_NAME,
packages=[_PACKAGE_NAME, _PACKAGE_NAME + '.handshake'],
url='http://code.google.com/p/pywebsocket/',
version='0.5.2',
)
# vi:sts=4 sw=4 et
|
bsd-3-clause
|
Python
|
51084b951d7d878d5400a6bfca1c8da1793b0a17
|
Modify init method of staffto take in staff person type only
|
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
|
src/staff.py
|
src/staff.py
|
from .person import Person
class Staff(Person):
def __init__(self, first_name, last_name, person_id, has_living_space = None, has_office = None):
super(Staff, self).__init__(first_name, last_name, "staff", "N", person_id, has_living_space, has_office)
|
from .person import Person
class Staff(Person):
def __init__(self, first_name, last_name, person_type, person_id, has_living_space = None, has_office = None):
super(Staff, self).__init__(first_name, last_name, person_type, "N", person_id, has_living_space, has_office)
|
mit
|
Python
|
74c294d11c3ba98f497df73fdd5d5061da601975
|
add the -j argument
|
azverkan/scons,azverkan/scons,datalogics-robb/scons,azverkan/scons,datalogics-robb/scons,datalogics-robb/scons,datalogics/scons,datalogics/scons,azverkan/scons,datalogics/scons,datalogics/scons,azverkan/scons,datalogics-robb/scons
|
src/scons.py
|
src/scons.py
|
#!/usr/bin/env python
import getopt
import os.path
import string
import sys
def PrintUsage():
print "Usage: scons [OPTION]... TARGET..."
print "Build TARGET or multiple TARGET(s)"
print " "
print ' -f CONSCRIPT execute CONSCRIPT instead of "SConstruct"'
print " -j N execute N parallel jobs"
print " --help print this message and exit"
try:
opts, targets = getopt.getopt(sys.argv[1:], 'f:j:', ['help'])
except getopt.GetoptError, x:
print x
PrintUsage()
sys.exit()
Scripts = []
num_jobs = 1
for o, a in opts:
if o == '-f': Scripts.append(a)
if o == '-j':
try:
num_jobs = int(a)
except:
PrintUsage()
sys.exit(1)
if num_jobs <= 0:
PrintUsage()
sys.exit(1)
if o == '--help':
PrintUsage()
sys.exit(0)
if not Scripts:
Scripts.append('SConstruct')
# XXX The commented-out code here adds any "scons" subdirs in anything
# along sys.path to sys.path. This was an attempt at setting up things
# so we can import "node.FS" instead of "scons.Node.FS". This doesn't
# quite fit our testing methodology, though, so save it for now until
# the right solutions pops up.
#
#dirlist = []
#for dir in sys.path:
# scons = os.path.join(dir, 'scons')
# if os.path.isdir(scons):
# dirlist = dirlist + [scons]
# dirlist = dirlist + [dir]
#
#sys.path = dirlist
from scons.Node.FS import init, Dir, File, lookup
from scons.Environment import Environment
import scons.Job
from scons.Builder import Builder
init()
def Conscript(filename):
Scripts.append(filename)
while Scripts:
file, Scripts = Scripts[0], Scripts[1:]
execfile(file)
class Task:
"this is here only until the build engine is implemented"
def __init__(self, target):
self.target = target
def execute(self):
self.target.build()
class Taskmaster:
"this is here only until the build engine is implemented"
def __init__(self, targets):
self.targets = targets
self.num_iterated = 0
def next_task(self):
if self.num_iterated == len(self.targets):
return None
else:
current = self.num_iterated
self.num_iterated = self.num_iterated + 1
return Task(self.targets[current])
def is_blocked(self):
return 0
def executed(self, task):
pass
taskmaster = Taskmaster(map(lambda x: lookup(File, x), targets))
jobs = scons.Job.Jobs(num_jobs, taskmaster)
jobs.start()
jobs.wait()
|
#!/usr/bin/env python
import getopt
import os.path
import string
import sys
opts, targets = getopt.getopt(sys.argv[1:], 'f:')
Scripts = []
for o, a in opts:
if o == '-f': Scripts.append(a)
if not Scripts:
Scripts.append('SConstruct')
# XXX The commented-out code here adds any "scons" subdirs in anything
# along sys.path to sys.path. This was an attempt at setting up things
# so we can import "node.FS" instead of "scons.Node.FS". This doesn't
# quite fit our testing methodology, though, so save it for now until
# the right solutions pops up.
#
#dirlist = []
#for dir in sys.path:
# scons = os.path.join(dir, 'scons')
# if os.path.isdir(scons):
# dirlist = dirlist + [scons]
# dirlist = dirlist + [dir]
#
#sys.path = dirlist
from scons.Node.FS import init, Dir, File, lookup
from scons.Environment import Environment
init()
def Conscript(filename):
Scripts.append(filename)
while Scripts:
file, Scripts = Scripts[0], Scripts[1:]
execfile(file)
for path in targets:
target = lookup(File, path)
target.build()
|
mit
|
Python
|
194687d9b3809bb2e976c194c2245264c395000a
|
add some doctest cases of anyconfig.tests.common.MaskedImportLoader
|
ssato/python-anyconfig,pmquang/python-anyconfig,ssato/python-anyconfig,pmquang/python-anyconfig
|
anyconfig/tests/common.py
|
anyconfig/tests/common.py
|
#
# Copyright (C) 2011 - 2014 Satoru SATOH <ssato at redhat.com>
#
import imp
import os.path
import sys
import tempfile
def selfdir():
return os.path.dirname(__file__)
def setup_workdir():
return tempfile.mkdtemp(dir="/tmp", prefix="python-anyconfig-tests-")
def cleanup_workdir(workdir):
"""
FIXME: Danger!
"""
os.system("rm -rf " + workdir)
class MaskedImportLoader(object):
"""
Mask specified module[s] and block importing that module / these modules to
raise ImportError on purpose.
see also: http://pymotw.com/2/sys/imports.html
"""
def __init__(self, *modules):
"""
:param modules: A list of name of modules to mask
>>> ms = ["lxml", "yaml", "json"]
>>> mil = MaskedImportLoader(*ms)
>>> mil.masked == ms
True
"""
self.masked = modules
def find_module(self, fullname, path=None):
"""
>>> mil = MaskedImportLoader("lxml", "yaml")
>>> mil.find_module("lxml.etree") is None
True
"""
if fullname in self.masked:
return self
return None
def load_module(self, fullname):
"""
:param fullname: Full name of the module to load
>>> mil = MaskedImportLoader("os.path")
>>> try:
... mil.load_module("os.path")
... except ImportError:
... pass
>>> mil.load_module("os") is not None
>>> mil.load_module("platform") is not None
"""
if fullname in self.masked:
raise ImportError("Could not import %s as it's masked" % fullname)
# Stallen from NoisyMetaImportLoader.load_module.
if fullname in sys.modules:
mod = sys.modules[fullname]
else:
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
# Set a few properties required by PEP 302
mod.__file__ = fullname
mod.__name__ = fullname
mod.__loader__ = self
mod.__package__ = '.'.join(fullname.split('.')[:-1])
return mod
def mask_modules(*modules):
sys.meta_path.append(MaskedImportLoader(*modules))
# vim:sw=4:ts=4:et:
|
#
# Copyright (C) 2011 - 2014 Satoru SATOH <ssato at redhat.com>
#
import imp
import os.path
import sys
import tempfile
def selfdir():
return os.path.dirname(__file__)
def setup_workdir():
return tempfile.mkdtemp(dir="/tmp", prefix="python-anyconfig-tests-")
def cleanup_workdir(workdir):
"""
FIXME: Danger!
"""
os.system("rm -rf " + workdir)
class MaskedImportLoader(object):
"""
Mask specified module[s] and block importing that module / these modules to
raise ImportError on purpose.
see also: http://pymotw.com/2/sys/imports.html
"""
def __init__(self, *modules):
"""
:param modules: A list of name of modules to mask
"""
self.masked = modules
def find_module(self, fullname, path=None):
if fullname in self.masked:
return self
return None
def load_module(self, fullname):
"""
:param fullname: Full name of the module to load
"""
if fullname in self.masked:
raise ImportError("Could not import %s as it's masked" % fullname)
# Stallen from NoisyMetaImportLoader.load_module.
if fullname in sys.modules:
mod = sys.modules[fullname]
else:
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
# Set a few properties required by PEP 302
mod.__file__ = fullname
mod.__name__ = fullname
mod.__loader__ = self
mod.__package__ = '.'.join(fullname.split('.')[:-1])
return mod
def mask_modules(*modules):
sys.meta_path.append(MaskedImportLoader(*modules))
# vim:sw=4:ts=4:et:
|
mit
|
Python
|
73f20bde3e0d66b9b6bd787b0c2a0a581e132faa
|
Add `list` command to show available instances
|
cloudboss/bossimage,cloudboss/bossimage
|
bossimage/cli.py
|
bossimage/cli.py
|
# Copyright 2016 Joseph Wright <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import click
import os
import yaml
import bossimage.core as bc
@click.group()
def main(): pass
@main.command()
@click.argument('instance')
@click.option('-v', '--verbosity', count=True,
help='Verbosity, may be repeated up to 4 times')
def run(instance, verbosity):
config = load_config()
bc.create_working_dir()
instance_info = bc.load_or_create_instance(config, instance)
bc.wait_for_ssh(instance_info['ip'])
bc.run(instance, verbosity)
@main.command()
@click.argument('instance')
def image(instance):
bc.image(instance)
@main.command()
@click.argument('instance')
def delete(instance):
bc.delete(instance)
@main.command('list')
def lst():
platforms = list_of('platforms')
profiles = list_of('profiles')
instances = ['{}-{}'.format(pl, pr) for pl in platforms for pr in profiles]
for i in instances: click.echo(i)
def list_of(key):
config = load_config()
return [k['name'] for k in config[key]]
@bc.cached
def load_config():
try:
with open('.boss.yml') as f:
c = yaml.load(f)
return c
except IOError as e:
click.echo('Error loading .boss.yml: {}'.format(e.strerror))
raise click.Abort()
|
# Copyright 2016 Joseph Wright <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import click
import os
import yaml
import bossimage.core as bc
@click.group()
def main(): pass
@main.command()
@click.argument('instance')
@click.option('-v', '--verbosity', count=True,
help='Verbosity, may be repeated up to 4 times')
def run(instance, verbosity):
config = load_config()
bc.create_working_dir()
instance_info = bc.load_or_create_instance(config, instance)
bc.wait_for_ssh(instance_info['ip'])
bc.run(instance, verbosity)
@main.command()
@click.argument('instance')
def image(instance):
bc.image(instance)
@main.command()
@click.argument('instance')
def delete(instance):
bc.delete(instance)
@bc.cached
def load_config():
try:
with open('.boss.yml') as f:
c = yaml.load(f)
return c
except IOError as e:
click.echo('Error loading .boss.yml: {}'.format(e.strerror))
raise click.Abort()
|
mit
|
Python
|
0f6272aef4fd37ca1b6cf1a0a86ccaab6ff90f82
|
Add year-month parsing to monthfield in admin.
|
datakortet/dkmodelfields
|
dkmodelfields/adminforms/monthfield.py
|
dkmodelfields/adminforms/monthfield.py
|
# -*- coding: utf-8 -*-
"""Admin support code for MonthFields.
"""
from dk import ttcal
from django.forms.fields import CharField
from django.forms import ValidationError
from django.forms.util import flatatt
from django.forms.widgets import TextInput
from django.utils.safestring import mark_safe
class MonthInput(TextInput):
"Month input widget."
def render(self, name, value, attrs=None):
if value is None:
value = u''
final_attrs = self.build_attrs(attrs, type='month', name=name)
if value != u'':
if isinstance(value, (int, long)):
value = ttcal.Month(value)
if isinstance(value, unicode):
parts = value.split('-')
y = int(parts[0], 10)
m = int(parts[1], 10)
value = ttcal.Month(y, m)
assert isinstance(value, ttcal.Month), type(value)
final_attrs['value'] = unicode(value.format("Y-m"))
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class MonthField(CharField):
"Month field widget."
widget = MonthInput
def __init__(self, *args, **kwargs):
super(MonthField, self).__init__(*args, **kwargs)
def _str_to_month(self, sval): # pylint:disable=R0201
# 2008-01
if not isinstance(sval, (str, unicode)):
print "NOT ISINSTANCE:", repr(sval)
if not sval.strip():
return None
parts = sval.split('-')
y = int(parts[0])
m = int(parts[1])
return ttcal.Month(y, m)
def clean(self, value):
super(MonthField, self).clean(value)
try:
return self._str_to_month(value)
except:
raise ValidationError('Invalid month: %r' % value)
def to_python(self, value): # pylint:disable=R0201
"convert value to ttcal.Year()."
try:
return self._str_to_month(value)
except:
raise ValidationError('Invalid month: %r' % value)
|
# -*- coding: utf-8 -*-
"""Admin support code for MonthFields.
"""
from dk import ttcal
from django.forms.fields import CharField
from django.forms import ValidationError
from django.forms.util import flatatt
from django.forms.widgets import TextInput
from django.utils.safestring import mark_safe
class MonthInput(TextInput):
"Month input widget."
def render(self, name, value, attrs=None):
if value is None:
value = u''
final_attrs = self.build_attrs(attrs, type='month', name=name)
if value != u'':
#if isinstance(value, (int, long)):
# value = ttcal.Month(value)
assert isinstance(value, ttcal.Month), type(value)
final_attrs['value'] = unicode(value.format("Y-m"))
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class MonthField(CharField):
"Month field widget."
widget = MonthInput
def __init__(self, *args, **kwargs):
super(MonthField, self).__init__(*args, **kwargs)
def _str_to_month(self, sval): # pylint:disable=R0201
# 2008-01
if not isinstance(sval, (str, unicode)):
print "NOT ISINSTANCE:", repr(sval)
if not sval.strip():
return None
parts = sval.split('-')
y = int(parts[0])
m = int(parts[1])
return ttcal.Month(y, m)
def clean(self, value):
super(MonthField, self).clean(value)
try:
return self._str_to_month(value)
except:
raise ValidationError('Invalid month: %r' % value)
def to_python(self, value): # pylint:disable=R0201
"convert value to ttcal.Year()."
try:
return self._str_to_month(value)
except:
raise ValidationError('Invalid month: %r' % value)
|
mit
|
Python
|
6bd9d8de1066bcd4b63ad41da676cf764b4ff00d
|
Update reference to sv-benchmarks
|
ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec
|
benchexec/tools/sv_benchmarks_util.py
|
benchexec/tools/sv_benchmarks_util.py
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
"""
This module contains some useful functions related to tasks in the sv-benchmarks
repository: https://gitlab.com/sosy-lab/benchmarking/sv-benchmarks
Note the following points before using any function in this util:
1. This is not a part of stable benchexec API.
We do not provide any guarantee of backward compatibility of this module.
2. Out-of-tree modules should not use this util
3. Any function in this util may change at any point in time
"""
import benchexec.tools.template
# Defining constants for data model.
ILP32 = "ILP32"
LP64 = "LP64"
def get_data_model_from_task(task, param_dict):
"""
This function tries to extract tool parameter for data model
depending on the data model in the task.
@param task: An instance of of class Task, e.g., with the input files
@param param_dict: Dictionary mapping data model to the tool param value
"""
if isinstance(task.options, dict) and task.options.get("language") == "C":
data_model = task.options.get("data_model")
if data_model:
try:
return param_dict[data_model]
except KeyError:
raise benchexec.tools.template.UnsupportedFeatureException(
f"Unsupported data_model '{data_model}' defined for task '{task}'"
)
return None
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
"""
This module contains some useful functions related to tasks in the sv-benchmarks
repository: https://github.com/sosy-lab/sv-benchmarks
Note the following points before using any function in this util:
1. This is not a part of stable benchexec API.
We do not provide any guarantee of backward compatibility of this module.
2. Out-of-tree modules should not use this util
3. Any function in this util may change at any point in time
"""
import benchexec.tools.template
# Defining constants for data model.
ILP32 = "ILP32"
LP64 = "LP64"
def get_data_model_from_task(task, param_dict):
"""
This function tries to extract tool parameter for data model
depending on the data model in the task.
@param task: An instance of of class Task, e.g., with the input files
@param param_dict: Dictionary mapping data model to the tool param value
"""
if isinstance(task.options, dict) and task.options.get("language") == "C":
data_model = task.options.get("data_model")
if data_model:
try:
return param_dict[data_model]
except KeyError:
raise benchexec.tools.template.UnsupportedFeatureException(
f"Unsupported data_model '{data_model}' defined for task '{task}'"
)
return None
|
apache-2.0
|
Python
|
e34d437fb9ede1c5a547bbabe99978207e2a389b
|
Make paths manipulation stuff private
|
samdroid-apps/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,Daksh/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,godiard/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,sugarlabs/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3
|
sugar/env.py
|
sugar/env.py
|
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
import sugar.setup
def setup():
for path in sugar_python_path:
sys.path.insert(0, path)
if os.environ.has_key('PYTHONPATH'):
old_path = os.environ['PYTHONPATH']
os.environ['PYTHONPATH'] = path + ':' + old_path
else:
os.environ['PYTHONPATH'] = path
for path in sugar_bin_path:
if os.environ.has_key('PATH'):
old_path = os.environ['PATH']
os.environ['PATH'] = path + ':' + old_path
else:
os.environ['PATH'] = path
if sugar_source_dir:
source = os.path.join(sugar_source_dir, 'activities')
runner = os.path.join(sugar_source_dir, 'shell/sugar-activity-factory')
sugar.setup.setup_activities(source, sugar_activities_dir, runner)
bin = os.path.join(sugar_source_dir, 'shell/sugar-presence-service')
sugar.setup.write_service('org.laptop.Presence', bin,
sugar_activities_dir)
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.expanduser('~/.sugar')
return os.path.join(path, profile_id)
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_dbus_config():
return sugar_dbus_config
|
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
import sugar.setup
def add_to_python_path(path):
sys.path.insert(0, path)
if os.environ.has_key('PYTHONPATH'):
old_path = os.environ['PYTHONPATH']
os.environ['PYTHONPATH'] = path + ':' + old_path
else:
os.environ['PYTHONPATH'] = path
def add_to_bin_path(path):
if os.environ.has_key('PATH'):
old_path = os.environ['PATH']
os.environ['PATH'] = path + ':' + old_path
else:
os.environ['PATH'] = path
def setup():
for path in sugar_python_path:
add_to_python_path(path)
for path in sugar_bin_path:
add_to_bin_path(path)
if sugar_source_dir:
source = os.path.join(sugar_source_dir, 'activities')
runner = os.path.join(sugar_source_dir, 'shell/sugar-activity-factory')
sugar.setup.setup_activities(source, sugar_activities_dir, runner)
bin = os.path.join(sugar_source_dir, 'shell/sugar-presence-service')
sugar.setup.write_service('org.laptop.Presence', bin,
sugar_activities_dir)
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.expanduser('~/.sugar')
return os.path.join(path, profile_id)
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_dbus_config():
return sugar_dbus_config
|
lgpl-2.1
|
Python
|
0e2d9b496ab12d512e56041d9f4ffbadf7fab4ab
|
Remove unused method
|
tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,Daksh/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,sugarlabs/sugar-toolkit,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,ceibal-tatu/sugar-toolkit,Daksh/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,manuq/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,sugarlabs/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit
|
sugar/env.py
|
sugar/env.py
|
# Copyright (C) 2006, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.join(os.path.expanduser('~/.sugar'), profile_id)
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError, exc:
print "Could not create user directory."
return path
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_activity_info_dir():
return sugar_activity_info_dir
def get_services_dir():
return sugar_services_dir
def get_dbus_config():
return sugar_dbus_config
def get_shell_bin_dir():
return sugar_shell_bin_dir
# http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html
def get_data_dirs():
if os.environ.has_key('XDG_DATA_DIRS'):
return os.environ['XDG_DATA_DIRS'].split(':')
else:
return [ '/usr/local/share/', '/usr/share/' ]
|
# Copyright (C) 2006, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.join(os.path.expanduser('~/.sugar'), profile_id)
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError, exc:
print "Could not create user directory."
return path
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_activity_info_dir():
return sugar_activity_info_dir
def get_services_dir():
return sugar_services_dir
def get_dbus_config():
return sugar_dbus_config
def get_shell_bin_dir():
return sugar_shell_bin_dir
# http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html
def get_data_dirs():
if os.environ.has_key('XDG_DATA_DIRS'):
return os.environ['XDG_DATA_DIRS'].split(':')
else:
return [ '/usr/local/share/', '/usr/share/' ]
_dbus_version = None
def get_dbus_version():
global _dbus_version
if _dbus_version == None:
f = os.popen('dbus-daemon --version')
version_line = f.readline()
if version_line:
splitted_line = version_line.split()
_dbus_version = splitted_line[len(splitted_line) - 1]
f.close()
return _dbus_version
|
lgpl-2.1
|
Python
|
29b0f18a3b7dcc6e0123889c1c845d7511677c96
|
fix indentation
|
terceiro/squad,terceiro/squad,terceiro/squad,terceiro/squad
|
squad/run.py
|
squad/run.py
|
import os
import sys
from squad.version import __version__
from squad.manage import main as manage
__usage__ = """usage: squad [OPTIONS]
Options:
-f, --fast Fast startup: skip potentially slow operations, such as
running database migrations and compiling static assets
-h, --help show this help message and exit
-v, --version show program's version number and exit
ALL other options are passed as-is to gunicorn. See gunicorn(1),
gunicorn3(1), or http://docs.gunicorn.org/ for details.
gunicorn options:\
"""
def usage():
print(__usage__)
sys.stdout.flush()
os.system(r'%s -m gunicorn.app.wsgiapp --help | sed -e "/^\S/d"' % sys.executable)
def main():
argv = sys.argv
fast = False
for f in ['--fast', '-f']:
if f in argv:
argv.remove(f)
fast = True
if '--help' in argv or '-h' in argv:
usage()
return
if '--version' in argv or '-v' in argv:
print('squad (version %s)' % __version__)
return
os.environ.setdefault("ENV", "production")
if not fast:
sys.argv = ['squad-admin', 'migrate']
manage()
sys.argv = ['squad-admin', 'collectstatic', '--no-input', '-v', '0']
manage()
argv = [sys.executable, '-m', 'gunicorn.app.wsgiapp', 'squad.wsgi'] + argv[1:]
os.execv(sys.executable, argv)
if __name__ == "__main__":
main()
|
import os
import sys
from squad.version import __version__
from squad.manage import main as manage
__usage__ = """usage: squad [OPTIONS]
Options:
-f, --fast Fast startup: skip potentially slow operations, such as
running database migrations and compiling static assets
-h, --help show this help message and exit
-v, --version show program's version number and exit
ALL other options are passed as-is to gunicorn. See gunicorn(1),
gunicorn3(1), or http://docs.gunicorn.org/ for details.
gunicorn options:\
"""
def usage():
print(__usage__)
sys.stdout.flush()
os.system(r'%s -m gunicorn.app.wsgiapp --help | sed -e "/^\S/d"' % sys.executable)
def main():
argv = sys.argv
fast = False
for f in ['--fast', '-f']:
if f in argv:
argv.remove(f)
fast = True
if '--help' in argv or '-h' in argv:
usage()
return
if '--version' in argv or '-v' in argv:
print('squad (version %s)' % __version__)
return
os.environ.setdefault("ENV", "production")
if not fast:
sys.argv = ['squad-admin', 'migrate']
manage()
sys.argv = ['squad-admin', 'collectstatic', '--no-input', '-v', '0']
manage()
argv = [sys.executable, '-m', 'gunicorn.app.wsgiapp', 'squad.wsgi'] + argv[1:]
os.execv(sys.executable, argv)
if __name__ == "__main__":
main()
|
agpl-3.0
|
Python
|
0e0096e664997ffa935273ba66b46a1e943a685a
|
add json support to dump_lol
|
zbraniecki/l20n,zbraniecki/fluent.js,mail-apps/l20n.js,l20n/l20n.js,zbraniecki/l20n.js,stasm/python-l20n,Pike/l20n.js,projectfluent/fluent.js,zbraniecki/fluent.js,Pike/l20n.js,stasm/l20n.js,mail-apps/l20n.js,projectfluent/fluent.js,projectfluent/fluent.js,zbraniecki/l20n,Swaven/l20n.js
|
python/tools/dump_lol.py
|
python/tools/dump_lol.py
|
#!/usr/bin/python
import argparse
from l20n.format.lol.parser import Parser
import pyast.dump.raw, pyast.dump.json
def read_file(filename, charset='utf-8', errors='strict'):
with open(filename, 'rb') as f:
return f.read().decode(charset, errors)
def dump_lol(path, t):
source = read_file(path)
p = Parser()
lol = p.parse(source)
if t == 'raw':
print(pyast.dump.raw.dump(lol))
else:
print(pyast.dump.json.dump(lol))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Dump LOL\'s AST.',
prog="dump_lol")
parser.add_argument('path', type=str,
help='path to lol file')
parser.add_argument('--type', '-t',
type=str,
choices=('json', 'raw'),
default='raw',
help='path to lol file')
args = parser.parse_args()
dump_lol(args.path, args.type)
|
#!/usr/bin/python
import argparse
from l20n.format.lol.parser import Parser
import pyast
def read_file(filename, charset='utf-8', errors='strict'):
with open(filename, 'rb') as f:
return f.read().decode(charset, errors)
def dump_lol(path):
source = read_file(path)
p = Parser()
lol = p.parse(source)
print(pyast.dump(lol))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Dump LOL\'s AST.',
prog="dump_lol")
parser.add_argument('path', type=str,
help='path to lol file')
args = parser.parse_args()
dump_lol(args.path)
|
apache-2.0
|
Python
|
9e7137c241684d450e8ec62fc365fd21bd20b38d
|
Fix gunicorn socket path
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
docker/usr/local/etc/gunicorn/pixel.py
|
docker/usr/local/etc/gunicorn/pixel.py
|
# Gunicorn-django settings
bind = ['unix:/app/pixel/run/gunicorn.sock']
graceful_timeout = 90
loglevel = 'error'
name = 'pixel'
python_path = '/app/pixel'
timeout = 90
workers = 3
|
# Gunicorn-django settings
bind = ['unix:/app/run/gunicorn.sock']
graceful_timeout = 90
loglevel = 'error'
name = 'pixel'
python_path = '/app/pixel'
timeout = 90
workers = 3
|
bsd-3-clause
|
Python
|
4d1a33462e73111f2507c4fd1e990af2952ad3df
|
Fix serializer tests
|
novafloss/django-formidable
|
demo/tests/serializers/tests_validations.py
|
demo/tests/serializers/tests_validations.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from formidable.models import Formidable
from formidable.serializers.validation import (
MinLengthSerializer, RegexpSerializer,
ValidationSerializer
)
class ValidationSerializerTest(TestCase):
increment = 0
def setUp(self):
self.form = Formidable.objects.create(
label='test', description='test'
)
self.increment += 1
self.text_field = self.form.fields.create(
type_id='text',
slug='input-text-{}'.format(self.increment),
label='name',
order=1,
)
def test_int_value(self):
data = {
'field_id': self.text_field.id,
'type': 'MINLENGTH',
'value': 5,
}
serializer = MinLengthSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_non_int_value(self):
data = {
'field_id': self.text_field.id,
'type': 'MINLENGTH',
'value': 'test',
}
serializer = MinLengthSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_regexp_value(self):
data = {
'field_id': self.text_field.id,
'type': 'REGEXP',
'value': '\w+ly',
}
serializer = RegexpSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_invalid_regexp_value(self):
data = {
'field_id': self.text_field.id,
'type': 'REGEXP',
'value': '\w+ly(',
}
serializer = RegexpSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_update_validations(self):
list_serializer = ValidationSerializer(many=True)
self.text_field.validations.create(
type='MINLENGTH',
value='5',
)
list_serializer.update(
self.text_field.validations,
self.text_field,
[{
'type': 'MINLENGTH',
'value': '12'
}],
)
self.assertEquals(self.text_field.validations.count(), 1)
validation = self.text_field.validations.first()
self.assertEquals(validation.value, '12')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from formidable.models import Formidable
from formidable.serializers.validation import (
MinLengthSerializer, RegexpSerializer,
ValidationSerializer
)
class ValidationSerializerTest(TestCase):
def setUp(self):
super(ValidationSerializerTest, self).setUp()
self.form = Formidable.objects.create(
label='test', description='test'
)
self.text = self.form.fields.create(
type_id='text', slug='input-text', label='name',
)
def test_int_value(self):
data = {'field_id': self.text.id, 'value': 5, 'type': 'minlength'}
serializer = MinLengthSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_non_int_value(self):
data = {'field_id': self.text.id, 'value': 'test', 'type': 'minlength'}
serializer = MinLengthSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_regexp_value(self):
data = {
'field_id': self.text.id, 'value': '\w+ly', 'type': 'minlength'
}
serializer = RegexpSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_invalid_regexp_value(self):
data = {
'field_id': self.text.id, 'value': '\w+ly(', 'type': 'minlength'
}
serializer = RegexpSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_update_validations(self):
list_serializer = ValidationSerializer(many=True)
self.text.validations.create(
value='5', type='minlength'
)
list_serializer.update(
self.text.validations,
[{'type': 'minlength', 'value': '12'}],
self.text
)
self.assertEquals(self.text.validations.count(), 1)
validation = self.text.validations.first()
self.assertEquals(validation.value, '12')
|
mit
|
Python
|
74a9cfe1206e3314890af165e5c8193c687844a0
|
Add files via upload
|
mphor/Tweet-bote
|
post.py
|
post.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 16:23:07 2017
@author: mojod
"""
import random
import tweepy
import time
consumer_key='L3MsyCOoqgSPc4jzZV8wero0d'
consumer_secret='ZCOI3x1f8GZ9c2cJ8kPYyyBW4gRX4MJBbyHijGE1UObnAow6ka'
access_token='3789452353-dmM75KVaDGqIPz6ZtzP8b5Q6VkvzQQo9Sn34ZOZ'
access_token_secret='JvYlzlqM3AHj7IXSMMoIgS0A8auqzI1KyMjKnJeT3gn8w'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
f = open("speech.txt")
while True:
sentence = f.read().split('.')
status = random.choice(sentence) + "."
api.update_status(status)
time.sleep(3600)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 16:23:07 2017
@author: mojod
"""
import random
import tweepy
consumer_key='L3MsyCOoqgSPc4jzZV8wero0d'
consumer_secret='ZCOI3x1f8GZ9c2cJ8kPYyyBW4gRX4MJBbyHijGE1UObnAow6ka'
access_token='3789452353-dmM75KVaDGqIPz6ZtzP8b5Q6VkvzQQo9Sn34ZOZ'
access_token_secret='JvYlzlqM3AHj7IXSMMoIgS0A8auqzI1KyMjKnJeT3gn8w'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
f = open("speech.txt")
sentence = f.read().split('.')
status = random.choice(sentence) + "."
print(status)
api.update_status(status)
f.close
|
mit
|
Python
|
85d2c012bfaeeb04fa8dd31cd05a04a8dc43c14e
|
Add tests that have and get of nonterms raise exceptions
|
PatrikValkovic/grammpy
|
tests/grammar_term-nonterm_test/NonterminalsInvalidTest.py
|
tests/grammar_term-nonterm_test/NonterminalsInvalidTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.RawGrammar import RawGrammar as Grammar
from grammpy import Nonterminal
from grammpy.exceptions import NotNonterminalException
class TempClass(Nonterminal):
pass
class NonterminalsInvalidTest(TestCase):
def test_invalidAddNumber(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.add_nonterm(0)
def test_invalidAddString(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.add_nonterm("string")
def test_invalidAddAfterCorrectAdd(self):
gr = Grammar()
gr.add_nonterm(TempClass)
with self.assertRaises(NotNonterminalException):
gr.add_nonterm("asdf")
def test_invalidAddInArray(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.add_nonterm([TempClass, "asdf"])
def test_invalidHaveNumber(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.have_nonterm(0)
def test_invalidHaveString(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.have_nonterm("string")
def test_invalidHaveAfterCorrectAdd(self):
gr = Grammar()
gr.add_nonterm(TempClass)
with self.assertRaises(NotNonterminalException):
gr.have_nonterm("asdf")
def test_invalidHaveInArray(self):
gr = Grammar()
with self.assertRaises(NotNonterminalException):
gr.have_nonterm([TempClass, "asdf"])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.RawGrammar import RawGrammar
class NonterminalsInvalidTest(TestCase):
pass
if __name__ == '__main__':
main()
|
mit
|
Python
|
25737b0d0389d0ccbd12d01f9076a889891f0a22
|
Update XENIFACE and XENVIF
|
benchalmers/win-installer,OwenSmith/win-installer,cheng--zhang/win-installer,cheng--zhang/win-installer,cheng--zhang/win-installer,kostaslamda/win-installer,xenserver/win-installer,kostaslamda/win-installer,xenserver/win-installer,benchalmers/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,benchalmers/win-installer,xenserver/win-installer,xenserver/win-installer,xenserver/win-installer,cheng--zhang/win-installer,benchalmers/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,cheng--zhang/win-installer,kostaslamda/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,benchalmers/win-installer
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/13/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/18/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/7/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/7/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/8/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/33/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/6/artifact/xenvss.tar",
}
all_drivers_signed = False
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/13/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/16/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/7/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/5/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/8/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/33/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/6/artifact/xenvss.tar",
}
all_drivers_signed = False
|
bsd-2-clause
|
Python
|
b6a09c80d349adc91e2a05de8864b75bcb4b71dc
|
Put whqled xenvif #56 into trunk
|
xenserver/win-installer,cheng--zhang/win-installer,OwenSmith/win-installer,xenserver/win-installer,benchalmers/win-installer,benchalmers/win-installer,benchalmers/win-installer,xenserver/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,kostaslamda/win-installer,benchalmers/win-installer,cheng--zhang/win-installer,cheng--zhang/win-installer,cheng--zhang/win-installer,OwenSmith/win-installer,benchalmers/win-installer,kostaslamda/win-installer,xenserver/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,xenserver/win-installer,cheng--zhang/win-installer,OwenSmith/win-installer
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
secureserver = r'\\10.80.13.10\distfiles\distfiles\WindowsBuilds'
localserver = r'\\camos.uk.xensource.com\build\windowsbuilds\WindowsBuilds'
build_tar_source_files = {
"xenbus" : r'standard-lcm\10\xenbus-7-2-0-47.tar',
"xenvif" : r'xenvif.git.whql\56\xenvif-7-2-0-56.tar',
"xennet" : r'standard-lcm\13\xennet-7-2-0-14.tar',
"xeniface" : r'standard-lcm\12\xeniface-7-2-0-14.tar',
"xenvbd" : r'standard-lcm\14\xenvbd-7-2-0-40.tar',
"xenguestagent" : r'standard-lcm\11\xenguestagent-34.tar',
"xenvss" : r'standard-lcm\16\xenvss-7.tar',
}
all_drivers_signed = True
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
secureserver = r'\\10.80.13.10\distfiles\distfiles\WindowsBuilds'
localserver = r'\\camos.uk.xensource.com\build\windowsbuilds\WindowsBuilds'
build_tar_source_files = {
"xenbus" : r'standard-lcm\10\xenbus-7-2-0-47.tar',
"xenvif" : r'xenvif.git\56\xenvif.tar',
"xennet" : r'standard-lcm\13\xennet-7-2-0-14.tar',
"xeniface" : r'standard-lcm\12\xeniface-7-2-0-14.tar',
"xenvbd" : r'standard-lcm\14\xenvbd-7-2-0-40.tar',
"xenguestagent" : r'standard-lcm\11\xenguestagent-34.tar',
"xenvss" : r'standard-lcm\16\xenvss-7.tar',
}
all_drivers_signed = False
|
bsd-2-clause
|
Python
|
5d749f1d3e69ce233bd5ac81b39e535c0d02a954
|
Move back to last merged tools versions, to overcome buildnumber issue
|
xenserver/win-installer,OwenSmith/win-installer,benchalmers/win-installer,cheng--zhang/win-installer,OwenSmith/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,kostaslamda/win-installer,cheng--zhang/win-installer,cheng--zhang/win-installer,xenserver/win-installer,benchalmers/win-installer,OwenSmith/win-installer,benchalmers/win-installer,cheng--zhang/win-installer,kostaslamda/win-installer,benchalmers/win-installer,cheng--zhang/win-installer,OwenSmith/win-installer,xenserver/win-installer,xenserver/win-installer,benchalmers/win-installer,xenserver/win-installer,OwenSmith/win-installer
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/18/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/24/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/12/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/14/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/9/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/34/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/7/artifact/xenvss.tar",
}
all_drivers_signed = False
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/35/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/37/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/14/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/14/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/18/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/34/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/7/artifact/xenvss.tar",
}
all_drivers_signed = False
|
bsd-2-clause
|
Python
|
516c18a74f1b606b03ab07091cb0004e75c0a49b
|
Fix kate plugin
|
leepa/isort,AbsoluteMSTR/isort,wimglenn/isort,tomviner/isort,jmullan/isort,adamchainz/isort,adamchainz/isort,graingert/isort,pahko/isort,leepa/isort,PyCQA/isort,aequitas/isort,tomviner/isort,gforcada/isort,jmullan/isort,jamesbroadhead/isort,pahko/isort,levic/isort,wimglenn/isort,gforcada/isort,AbsoluteMSTR/isort,jamesbroadhead/isort,PyCQA/isort,graingert/isort,levic/isort,aequitas/isort
|
kate_plugin.py
|
kate_plugin.py
|
"""
isort/kate_plugin.py
Provides a simple kate plugin that enables the use of isort to sort Python imports
in the currently open kate file.
Copyright (C) 2013 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from isort import SortImports
from PyKDE4.ktexteditor import KTextEditor
import kate
@kate.action(text="Sort Imports", shortcut="Ctrl+[", menu="Python")
def sortImports():
document = kate.activeDocument()
document.setText(SortImports(file_contents=document.text()).output)
document.activeView().setCursorPosition(KTextEditor.Cursor(0, 0))
|
"""
isort/kate_plugin.py
Provides a simple kate plugin that enables the use of isort to sort Python imports
in the currently open kate file.
Copyright (C) 2013 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from PyKDE4.ktexteditor import KTextEditor
import kate
from isort import SortImports
@kate.action(text="Sort Imports", shortcut="Ctrl+[", menu="Python")
def sortImports():
document = kate.activeDocument()
document.setText(SortImports(file_contents=document.text()).output)
document.activeView().setCursorPosition(KTextEditor.Cursor(0, 0))
|
mit
|
Python
|
f2dfbfbee1cd87f2e6f499b78eae1a8ca39dd529
|
create a category form
|
alpsayin/django-qanda,alpsayin/django-qanda,alpsayin/django-qanda,alpsayin/django-qanda
|
qanda/qanda_app/forms.py
|
qanda/qanda_app/forms.py
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply, Category
from django import forms
from django.conf import settings
from django.utils.translation import ugettext as _
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 16, 'class':'span8'}),
# 'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class QuestionCloseForm(forms.Form):
message = forms.CharField(max_length=511, widget=Textarea(attrs={'rows': 10, 'id':'questionCloseForm',}), initial=_("This question is closed by the administrators. ")+getattr(settings, 'ROOT_URL', 'http://127.0.0.1:8000/qanda'))
class CategoryForm(ModelForm):
class Meta:
model = Category
fields = ('name', 'about', )
widgets = {
'name' : TextInput(attrs={'size': 180, 'class':'span6', 'id':'addCategoryName',}),
'about' : Textarea(attrs={'rows': 6, 'cols':80, 'class':'span6', 'id':'addCategoryAbout',}),
}
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply
from django import forms
from django.conf import settings
from django.utils.translation import ugettext as _
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 16, 'class':'span8'}),
# 'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class QuestionCloseForm(forms.Form):
message = forms.CharField(max_length=511, widget=Textarea(attrs={'rows': 10, 'id':'questionCloseForm',}), initial=_("This question is closed by the administrators. ")+getattr(settings, 'ROOT_URL', 'http://127.0.0.1:8000/qanda'))
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
mit
|
Python
|
5d69fa2a169274c65bfd047199a2df9c88f188e3
|
use the taggit widget in question form
|
alpsayin/django-qanda,alpsayin/django-qanda,alpsayin/django-qanda,alpsayin/django-qanda
|
qanda/qanda_app/forms.py
|
qanda/qanda_app/forms.py
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply
from django import forms
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 20, 'class':'span8'}),
# 'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply
from django import forms
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 20, 'class':'span8'}),
'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
mit
|
Python
|
174a374a685829ede49236f820122b442b9ec920
|
Fix taichi_dynamic example (#4767)
|
yuanming-hu/taichi,yuanming-hu/taichi,yuanming-hu/taichi,yuanming-hu/taichi
|
python/taichi/examples/features/sparse/taichi_dynamic.py
|
python/taichi/examples/features/sparse/taichi_dynamic.py
|
import taichi as ti
ti.init()
x = ti.field(ti.i32)
l = ti.field(ti.i32)
n = 16
ti.root.dense(ti.i, n).dynamic(ti.j, n).place(x)
ti.root.dense(ti.i, n).place(l)
@ti.kernel
def make_lists():
for i in range(n):
for j in range(i):
ti.append(x.parent(), i, j * j)
l[i] = ti.length(x.parent(), i)
make_lists()
for i in range(n):
assert l[i] == i
for j in range(n):
assert x[i, j] == (j * j if j < i else 0)
|
import taichi as ti
x = ti.field(ti.i32)
l = ti.field(ti.i32)
n = 16
ti.init()
ti.root.dense(ti.i, n).dynamic(ti.j, n).place(x)
ti.root.dense(ti.i, n).place(l)
@ti.kernel
def make_lists():
for i in range(n):
for j in range(i):
ti.append(x.parent(), i, j * j)
l[i] = ti.length(x.parent(), i)
make_lists()
for i in range(n):
assert l[i] == i
for j in range(n):
assert x[i, j] == (j * j if j < i else 0)
|
apache-2.0
|
Python
|
d57161b9449faa1218e4dab55fe4b2bd6f0c3436
|
Remove unused code and get rid of flake8 errors
|
swesterveld/udacity-nd004-p4-conference-organization-app,swesterveld/udacity-nd004-p4-conference-organization-app,swesterveld/udacity-nd004-p4-conference-organization-app
|
utils.py
|
utils.py
|
import json
import os
import time
from google.appengine.api import urlfetch
def getUserId(user, id_type="email"):
if id_type == "email":
return user.email()
if id_type == "oauth":
"""A workaround implementation for getting userid."""
auth = os.getenv('HTTP_AUTHORIZATION')
bearer, token = auth.split()
token_type = 'id_token'
if 'OAUTH_USER_ID' in os.environ:
token_type = 'access_token'
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% (token_type, token))
user = {}
wait = 1
for i in range(3):
resp = urlfetch.fetch(url)
if resp.status_code == 200:
user = json.loads(resp.content)
break
elif resp.status_code == 400 and 'invalid_token' in resp.content:
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% ('access_token', token))
else:
time.sleep(wait)
wait = wait + i
return user.get('user_id', '')
|
import json
import os
import time
import uuid
from google.appengine.api import urlfetch
from models import Profile
def getUserId(user, id_type="email"):
if id_type == "email":
return user.email()
if id_type == "oauth":
"""A workaround implementation for getting userid."""
auth = os.getenv('HTTP_AUTHORIZATION')
bearer, token = auth.split()
token_type = 'id_token'
if 'OAUTH_USER_ID' in os.environ:
token_type = 'access_token'
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% (token_type, token))
user = {}
wait = 1
for i in range(3):
resp = urlfetch.fetch(url)
if resp.status_code == 200:
user = json.loads(resp.content)
break
elif resp.status_code == 400 and 'invalid_token' in resp.content:
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% ('access_token', token))
else:
time.sleep(wait)
wait = wait + i
return user.get('user_id', '')
if id_type == "custom":
# implement your own user_id creation and getting algorythm
# this is just a sample that queries datastore for an existing profile
# and generates an id if profile does not exist for an email
profile = Conference.query(Conference.mainEmail == user.email())
if profile:
return profile.id()
else:
return str(uuid.uuid1().get_hex())
|
apache-2.0
|
Python
|
f6d7707abcd80524857386d96495cc79795cd5d5
|
use htmlparser to get a word meaning in yahoo dictionary
|
breeze101792/list_vocabulary,breeze101792/list_vocabulary
|
ydict.py
|
ydict.py
|
import urllib.request
from html.parser import HTMLParser
class DictParser(HTMLParser):
def __init__(self):
super().__init__()
self.content = False
# self.query_string = None
self.li_counter = 0
self.ignore_flag = False
def handle_starttag(self, tag, attrs):
if self.content and tag == "li" and len(attrs) == 0:
self.li_counter += 1
elif len(attrs) == 0:
pass
elif tag == "li" and attrs[0][1] == "first":
print("first" + self.li_counter.__str__())
self.li_counter = 0
self.content = True
elif tag == "li" and attrs[0][1] == "last":
print("last" + self.li_counter.__str__())
self.content = False
elif tag == "span" and len(attrs) > 2:
if attrs[1][1] == "iconStyle":
self.ignore_flag = True
pass
# if self.content and (tag == "a" or tag == "span" or tag == "b"):
# print("Start tag: " + tag)
# for a in attrs:
# print(", " + str(a))
# print("Encountered a start tag:", tag)
def handle_endtag(self, tag):
pass
# if self.content and tag == "li" and len(attrs) == 0:
# self.li_counter -= 1
# print("Encountered an end tag :", tag)
def handle_data(self, data):
if self.content == False or self.ignore_flag:
if self.ignore_flag:
self.ignore_flag = False
elif self.li_counter == 0:
print("query word:" + data)
elif self.li_counter == 1:
print("content:" + data)
elif self.li_counter == 2:
print("forms:" + data)
elif self.li_counter == 3:
print("sym:" + data)
# if data == self.query_string:
# self.content = True
# if self.content and (self.lasttag == "a" or self.lasttag == "span" or self.lasttag == "b"):
# print("data :", data)
# def set_query_str(self, query_str):
# self.query_string = query_str
class ydict:
def __init__(self):
self.url = "http://tw.dictionary.search.yahoo.com/search?p="
def search(self, word):
opener = urllib.request.FancyURLopener({})
f = opener.open(self.url+word)
content = f.read()
dp = DictParser()
# dp.set_query_str("test")
dp.feed(content.decode('UTF-8'))
# print(content.decode('UTF-8'))
|
import urllib.request
from html.parser import HTMLParser
class DictParser(HTMLParser):
# def __init__(self):
# super.__init__()
def handle_starttag(self, tag, attrs):
print("Encountered a start tag:", tag)
def handle_endtag(self, tag):
print("Encountered an end tag :", tag)
def handle_data(self, data):
print("Encountered some data :", data)
class ydict:
def __init__(self):
self.url = "http://tw.dictionary.search.yahoo.com/search?p="
def search(self, word):
opener = urllib.request.FancyURLopener({})
f = opener.open(self.url+word)
content = f.read()
dp = DictParser()
dp.feed(content.decode('UTF-8'))
# print(content.decode('UTF-8'))
|
agpl-3.0
|
Python
|
0545539a6d3df83af57f973a82cff2961cbe32ec
|
Test db login
|
tamasgal/km3pipe,tamasgal/km3pipe
|
km3pipe/tests/test_db.py
|
km3pipe/tests/test_db.py
|
# coding=utf-8
# Filename: test_core.py
# pylint: disable=C0111,E1003,R0904,C0103,R0201,C0102
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase, MagicMock
from km3pipe.db import DBManager, DOMContainer
from km3pipe.logger import logging
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "[email protected]"
__status__ = "Development"
DET_ID = 'det_id1'
JSON_DOMS = [{'DOMId': 1, 'Floor': 10, 'CLBUPI': '100', 'DetOID': DET_ID},
{'DOMId': 2, 'Floor': 20, 'CLBUPI': '200', 'DetOID': DET_ID},
{'DOMId': 3, 'Floor': 30, 'CLBUPI': '300', 'DetOID': DET_ID},
{'DOMId': 4, 'Floor': 40, 'CLBUPI': '400', 'DetOID': 'det_id2'}]
log = logging.getLogger('db')
class TestDBManager(TestCase):
def test_login_called_on_init_when_credentials_are_provided(self):
user = 'user'
pwd = 'god'
DBManager.login = MagicMock()
db = DBManager(username=user, password=pwd)
self.assertEqual(1, DBManager.login.call_count)
self.assertTupleEqual((user, pwd), DBManager.login.call_args[0])
class TestDOMContainer(TestCase):
def test_init(self):
DOMContainer(None)
def setUp(self):
self.dc = DOMContainer(JSON_DOMS)
def test_ids_returns_dom_ids(self):
self.assertListEqual([1, 2, 3], self.dc.ids(DET_ID))
def test_json_list_lookup(self):
lookup = self.dc._json_list_lookup('DOMId', 1, 'Floor', DET_ID)
self.assertEqual(10, lookup)
def test_clbupi2floor(self):
self.assertEqual(10, self.dc.clbupi2floor('100', DET_ID))
self.assertEqual(20, self.dc.clbupi2floor('200', DET_ID))
self.assertEqual(30, self.dc.clbupi2floor('300', DET_ID))
def test_clbupi2domid(self):
self.assertEqual(1, self.dc.clbupi2domid('100', DET_ID))
self.assertEqual(2, self.dc.clbupi2domid('200', DET_ID))
self.assertEqual(3, self.dc.clbupi2domid('300', DET_ID))
|
# coding=utf-8
# Filename: test_core.py
# pylint: disable=C0111,E1003,R0904,C0103,R0201,C0102
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase
from km3pipe.db import DOMContainer
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "[email protected]"
__status__ = "Development"
DET_ID = 'det_id1'
JSON_DOMS = [{'DOMId': 1, 'Floor': 10, 'CLBUPI': '100', 'DetOID': DET_ID},
{'DOMId': 2, 'Floor': 20, 'CLBUPI': '200', 'DetOID': DET_ID},
{'DOMId': 3, 'Floor': 30, 'CLBUPI': '300', 'DetOID': DET_ID},
{'DOMId': 4, 'Floor': 40, 'CLBUPI': '400', 'DetOID': 'det_id2'}]
class TestDBManager(TestCase):
pass
class TestDOMContainer(TestCase):
def test_init(self):
DOMContainer(None)
def setUp(self):
self.dc = DOMContainer(JSON_DOMS)
def test_ids_returns_dom_ids(self):
self.assertListEqual([1, 2, 3], self.dc.ids(DET_ID))
def test_json_list_lookup(self):
lookup = self.dc._json_list_lookup('DOMId', 1, 'Floor', DET_ID)
self.assertEqual(10, lookup)
def test_clbupi2floor(self):
self.assertEqual(10, self.dc.clbupi2floor('100', DET_ID))
self.assertEqual(20, self.dc.clbupi2floor('200', DET_ID))
self.assertEqual(30, self.dc.clbupi2floor('300', DET_ID))
def test_clbupi2domid(self):
self.assertEqual(1, self.dc.clbupi2domid('100', DET_ID))
self.assertEqual(2, self.dc.clbupi2domid('200', DET_ID))
self.assertEqual(3, self.dc.clbupi2domid('300', DET_ID))
|
mit
|
Python
|
ffbc39b4eeb4a3e4850f83faa13c1ddf616d2328
|
Add mail to administrators
|
morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto
|
tools/wcloud/wcloud/utils.py
|
tools/wcloud/wcloud/utils.py
|
import sys
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
DEFAULT_EMAIL_HOST = 'mail.deusto.es'
EMAILS_SENT = []
def send_email(app, body_text, subject, from_email, to_email, body_html=None):
email_host = app.config.get('EMAIL_HOST', DEFAULT_EMAIL_HOST)
if isinstance(to_email, basestring):
to_email = [ to_email ]
if app.config.get('TESTING', False) or app.config.get('DEBUG', False):
print "Faking request (%s, %s)" %( app.config.get('TESTING', False), app.config.get('DEBUG', False))
sys.stdout.flush()
EMAILS_SENT.append(body_html)
else:
print "Sending mail using %s" % email_host
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = ', '.join(to_email)
part1 = MIMEText(body_text, 'text')
msg.attach(part1)
if body_text is not None:
part2 = MIMEText(body_html, 'html')
msg.attach(part2)
total_to_email = []
total_to_email.extend(to_email)
total_to_email.extend(app.config['ADMINISTRATORS'])
total_to_email.append(from_email)
s = smtplib.SMTP(email_host)
s.sendmail(from_email, tuple(total_to_email), msg.as_string())
print "Mail sent using %s" % email_host
sys.stdout.flush()
if __name__ == '__main__':
class Fake(): pass
fake_app = Fake()
fake_app.config = {}
send_email(fake_app, "Hi there. This is a test", "Test", "[email protected]", "[email protected]", """<b>Esto es negrita</b>""")
|
import sys
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
DEFAULT_EMAIL_HOST = 'mail.deusto.es'
EMAILS_SENT = []
def send_email(app, body_text, subject, from_email, to_email, body_html=None):
email_host = app.config.get('EMAIL_HOST', DEFAULT_EMAIL_HOST)
if app.config.get('TESTING', False) or app.config.get('DEBUG', False):
print "Faking request (%s, %s)" %( app.config.get('TESTING', False), app.config.get('DEBUG', False))
sys.stdout.flush()
EMAILS_SENT.append(body_html)
else:
print "Sending mail using %s" % email_host
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = to_email
part1 = MIMEText(body_text, 'text')
msg.attach(part1)
if body_text is not None:
part2 = MIMEText(body_html, 'html')
msg.attach(part2)
s = smtplib.SMTP(email_host)
s.sendmail(from_email, (to_email, from_email), msg.as_string())
print "Mail sent using %s" % email_host
sys.stdout.flush()
if __name__ == '__main__':
class Fake(): pass
fake_app = Fake()
fake_app.config = {}
send_email(fake_app, "Hi there. This is a test", "Test", "[email protected]", "[email protected]", """<b>Esto es negrita</b>""")
|
bsd-2-clause
|
Python
|
ae3f9fbcf2bedba6798460569b10260c9acaa1bf
|
fix url to match filter
|
rrooij/sitewatcher_telegram
|
watcher/tweakerswatcher.py
|
watcher/tweakerswatcher.py
|
import requests
import json
import os.path
from watcher.watcher import Watcher
class TweakersWatcher(Watcher):
watcher_name = 'Tweakers Pricewatch'
filename = 'site_tweakers.txt'
def parse_site(self):
url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&dayOffset=1&minRelativePriceDrop=0.4&maxRelativePriceDrop=1&minAbsolutePriceDrop=30&maxAbsolutePriceDrop=&minCurrentPrice=0&maxCurrentPrice=&minPrices=3&minViews=0&of=absolutePriceDrop&od=desc&output=json'
request = requests.get(url)
json_object = json.loads(request.text)
return json_object['data']['html']
def check_price_error(self):
url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgbgGWcS4uKUvNKwBJKVhAxMKcYqATMw2KogZ4JWCosM7W8GKwrvygltcgtMzUnRclKKRHDtloA'
message_text = 'Mogelijke prijsfout, check: {0}'.format(url)
html = self.parse_site()
if not os.path.isfile(self.filename):
self.write_to_file(self.filename, html)
exit(0)
else:
with open(self.filename, 'r') as f:
file_content = f.read()
if file_content != html:
self.send_telegram(self.watcher_name, message_text)
self.write_to_file(self.filename, html)
|
import requests
import json
import os.path
from watcher.watcher import Watcher
class TweakersWatcher(Watcher):
watcher_name = 'Tweakers Pricewatch'
filename = 'site_tweakers.txt'
def parse_site(self):
url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&dayOffset=1&minRelativePriceDrop=0.4&maxRelativePriceDrop=1&minAbsolutePriceDrop=30&maxAbsolutePriceDrop=&minCurrentPrice=0&maxCurrentPrice=&minPrices=3&minViews=0&of=absolutePriceDrop&od=desc&output=json'
request = requests.get(url)
json_object = json.loads(request.text)
return json_object['data']['html']
def check_price_error(self):
url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA'
message_text = 'Mogelijke prijsfout, check: {0}'.format(url)
html = self.parse_site()
if not os.path.isfile(self.filename):
self.write_to_file(self.filename, html)
exit(0)
else:
with open(self.filename, 'r') as f:
file_content = f.read()
if file_content != html:
self.send_telegram(self.watcher_name, message_text)
self.write_to_file(self.filename, html)
|
agpl-3.0
|
Python
|
d3effa1b80c8d56c98451f335b8099b72fa1f61b
|
Remove orderdict
|
anthonysandrin/kafka-utils,anthonysandrin/kafka-utils,Yelp/kafka-utils,Yelp/kafka-utils
|
yelp_kafka_tool/kafka_cluster_manager/cluster_info/util.py
|
yelp_kafka_tool/kafka_cluster_manager/cluster_info/util.py
|
from collections import Counter
def get_partitions_per_broker(brokers):
"""Return partition count for each broker."""
return dict(
(broker, len(broker.partitions))
for broker in brokers
)
def get_leaders_per_broker(brokers, partitions):
"""Return count for each broker the number of times
it is assigned as preferred leader.
"""
leaders_per_broker = dict(
(broker, 0)
for broker in brokers
)
for partition in partitions:
leaders_per_broker[partition.leader] += 1
return leaders_per_broker
def get_per_topic_partitions_count(broker):
"""Return partition-count of each topic on given broker."""
return Counter((partition.topic for partition in broker.partitions))
def get_optimal_metrics(total_elements, total_groups):
"""Return optimal count and extra-elements allowed based on base
total count of elements and groups.
"""
opt_element_cnt = total_elements // total_groups
extra_elements_allowed_cnt = total_elements % total_groups
evenly_distribute = bool(not extra_elements_allowed_cnt)
return opt_element_cnt, extra_elements_allowed_cnt, evenly_distribute
|
from collections import Counter, OrderedDict
def get_partitions_per_broker(brokers):
"""Return partition count for each broker."""
return dict(
(broker, len(broker.partitions))
for broker in brokers
)
def get_leaders_per_broker(brokers, partitions):
"""Return count for each broker the number of times
it is assigned as preferred leader.
"""
leaders_per_broker = dict(
(broker, 0)
for broker in brokers
)
for partition in partitions:
leaders_per_broker[partition.leader] += 1
return leaders_per_broker
def get_per_topic_partitions_count(broker):
"""Return partition-count of each topic on given broker."""
return Counter((partition.topic for partition in broker.partitions))
def get_optimal_metrics(total_elements, total_groups):
"""Return optimal count and extra-elements allowed based on base
total count of elements and groups.
"""
opt_element_cnt = total_elements // total_groups
extra_elements_allowed_cnt = total_elements % total_groups
evenly_distribute = bool(not extra_elements_allowed_cnt)
return opt_element_cnt, extra_elements_allowed_cnt, evenly_distribute
|
apache-2.0
|
Python
|
b0212d5489b10956976365c862470e338c45509a
|
Test twisted and cares resolvers in netutil_test.
|
lujinda/tornado,mr-ping/tornado,nbargnesi/tornado,lsanotes/tornado,ListFranz/tornado,bdarnell/tornado,cyrilMargaria/tornado,ms7s/tornado,z-fork/tornado,insflow/tornado,akalipetis/tornado,lilydjwg/tornado,ListFranz/tornado,hzruandd/tornado,wsyzxcn/tornado,tornadoweb/tornado,coderhaoxin/tornado,eklitzke/tornado,kevinge314gh/tornado,fengsp/tornado,jonashagstedt/tornado,mivade/tornado,jsjohnst/tornado,gwillem/tornado,allenl203/tornado,coderhaoxin/tornado,bywbilly/tornado,kaushik94/tornado,arthurdarcet/tornado,InverseLina/tornado,liqueur/tornado,Aaron1992/tornado,sunjeammy/tornado,ifduyue/tornado,ifduyue/tornado,ydaniv/tornado,bufferx/tornado,johan--/tornado,QuanZag/tornado,drewmiller/tornado,ms7s/tornado,whip112/tornado,zhuochenKIDD/tornado,shashankbassi92/tornado,fengsp/tornado,Fydot/tornado,ifduyue/tornado,yuezhonghua/tornado,SuminAndrew/tornado,zguangyu/tornado,frtmelody/tornado,chenxiaba/tornado,mehmetkose/tornado,elijah513/tornado,Lancher/tornado,hhru/tornado,jsjohnst/tornado,VShangxiao/tornado,takeshineshiro/tornado,nordaux/tornado,MjAbuz/tornado,ColorFuzzy/tornado,ydaniv/tornado,ajdavis/tornado,mlyundin/tornado,importcjj/tornado,ovidiucp/tornado,eXcomm/tornado,mehmetkose/tornado,elelianghh/tornado,fengshao0907/tornado,ymero/tornado,wujuguang/tornado,Windsooon/tornado,Drooids/tornado,andyaguiar/tornado,obsh/tornado,jehiah/tornado,mlyundin/tornado,jarrahwu/tornado,insflow/tornado,erichuang1994/tornado,0x73/tornado,leekchan/tornado_test,AlphaStaxLLC/tornado,zguangyu/tornado,sevenguin/tornado,xinyu7/tornado,VShangxiao/tornado,304471720/tornado,lujinda/tornado,wxhzk/tornado-1,kaushik94/tornado,lujinda/tornado,chenxiaba/tornado,legnaleurc/tornado,bywbilly/tornado,yangkf1985/tornado,Acidburn0zzz/tornado,ajdavis/tornado,yuezhonghua/tornado,wechasing/tornado,0x73/tornado,fengsp/tornado,jonashagstedt/tornado,Windsooon/tornado,bywbilly/tornado,BencoLee/tornado,bdarnell/tornado,kaushik94/tornado,jparise/tornado,chenxiaba/tornado,dsseter/tornado,ydaniv/tornado,z-fork/tornado,ListFranz/tornado,nbargnesi/tornado,tornadoweb/tornado,insflow/tornado,obsh/tornado,fengshao0907/tornado,shaohung001/tornado,jonashagstedt/tornado,Acidburn0zzz/tornado,shashankbassi92/tornado,Lancher/tornado,bywbilly/tornado,QuanZag/tornado,chenxiaba/tornado,Acidburn0zzz/tornado,mivade/tornado,ListFranz/tornado,AlphaStaxLLC/tornado,kippandrew/tornado,wujuguang/tornado,cyrilMargaria/tornado,jsjohnst/tornado,johan--/tornado,shashankbassi92/tornado,gitchs/tornado,kaushik94/tornado,InverseLina/tornado,fengsp/tornado,elijah513/tornado,allenl203/tornado,Drooids/tornado,MjAbuz/tornado,bufferx/tornado,yuyangit/tornado,Drooids/tornado,shaohung001/tornado,shaohung001/tornado,BencoLee/tornado,kevinge314gh/tornado,sxfmol/tornado,Fydot/tornado,ovidiucp/tornado,hzruandd/tornado,kangbiao/tornado,ubear/tornado,icejoywoo/tornado,Geoion/tornado,yuezhonghua/tornado,whip112/tornado,chenxiaba/tornado,dongpinglai/my_tornado,Geoion/tornado,Callwoola/tornado,hhru/tornado,elelianghh/tornado,sxfmol/tornado,sunjeammy/tornado,Polyconseil/tornado,noxiouz/tornado,lsanotes/tornado,shaohung001/tornado,ColorFuzzy/tornado,kippandrew/tornado,jparise/tornado,frtmelody/tornado,jsjohnst/tornado,futurechallenger/tornado,Polyconseil/tornado,cyrusin/tornado,allenl203/tornado,yuezhonghua/tornado,NoyaInRain/tornado,coderhaoxin/tornado,johan--/tornado,fengshao0907/tornado,lilydjwg/tornado,ColorFuzzy/tornado,lsanotes/tornado,ZhuPeng/tornado,icejoywoo/tornado,obsh/tornado,xinyu7/tornado,zhuochenKIDD/tornado,jarrahwu/tornado,erichuang1994/tornado,importcjj/tornado,codecov/tornado,dsseter/tornado,johan--/tornado,bywbilly/tornado,wsyzxcn/tornado,ZhuPeng/tornado,elelianghh/tornado,icejoywoo/tornado,eXcomm/tornado,zhuochenKIDD/tornado,jonashagstedt/tornado,zguangyu/tornado,importcjj/tornado,mehmetkose/tornado,andyaguiar/tornado,Callwoola/tornado,BencoLee/tornado,wujuguang/tornado,Aaron1992/tornado,ms7s/tornado,Geoion/tornado,z-fork/tornado,Callwoola/tornado,codecov/tornado,SuminAndrew/tornado,gitchs/tornado,wxhzk/tornado-1,arthurdarcet/tornado,xinyu7/tornado,andyaguiar/tornado,dongpinglai/my_tornado,anandology/tornado,ListFranz/tornado,Batterfii/tornado,ifduyue/tornado,leekchan/tornado_test,liqueur/tornado,0xkag/tornado,kangbiao/tornado,ubear/tornado,QuanZag/tornado,VShangxiao/tornado,akalipetis/tornado,futurechallenger/tornado,sunjeammy/tornado,yangkf1985/tornado,kevinge314gh/tornado,dongpinglai/my_tornado,allenl203/tornado,leekchan/tornado_test,kevinge314gh/tornado,z-fork/tornado,elijah513/tornado,Snamint/tornado,wujuguang/tornado,lsanotes/tornado,yangkf1985/tornado,gwillem/tornado,johan--/tornado,jparise/tornado,304471720/tornado,Aaron1992/tornado,eXcomm/tornado,ymero/tornado,0x73/tornado,ms7s/tornado,z-fork/tornado,allenl203/tornado,hhru/tornado,shashankbassi92/tornado,fengshao0907/tornado,gitchs/tornado,0x73/tornado,insflow/tornado,yuyangit/tornado,icejoywoo/tornado,andyaguiar/tornado,wxhzk/tornado-1,eXcomm/tornado,pombredanne/tornado,ZhuPeng/tornado,ubear/tornado,fengshao0907/tornado,wxhzk/tornado-1,AlphaStaxLLC/tornado,elelianghh/tornado,liqueur/tornado,tianyk/tornado-research,codeb2cc/tornado,elijah513/tornado,lujinda/tornado,eklitzke/tornado,xinyu7/tornado,arthurdarcet/tornado,Acidburn0zzz/tornado,ajdavis/tornado,gwillem/tornado,0xkag/tornado,sxfmol/tornado,cyrusin/tornado,mr-ping/tornado,ifduyue/tornado,ubear/tornado,Lancher/tornado,Snamint/tornado,mlyundin/tornado,cyrilMargaria/tornado,dsseter/tornado,SuminAndrew/tornado,lsanotes/tornado,legnaleurc/tornado,eXcomm/tornado,InverseLina/tornado,leekchan/tornado_test,wsyzxcn/tornado,bdarnell/tornado,jarrahwu/tornado,sunjeammy/tornado,mlyundin/tornado,ZhuPeng/tornado,zhuochenKIDD/tornado,noxiouz/tornado,lilydjwg/tornado,ms7s/tornado,whip112/tornado,ajdavis/tornado,nordaux/tornado,jsjohnst/tornado,tianyk/tornado-research,sxfmol/tornado,cyrusin/tornado,kangbiao/tornado,chenxiaba/tornado,ovidiucp/tornado,coderhaoxin/tornado,MjAbuz/tornado,yuezhonghua/tornado,yuyangit/tornado,jampp/tornado,BencoLee/tornado,Windsooon/tornado,Snamint/tornado,bdarnell/tornado,noxiouz/tornado,dsseter/tornado,yangkf1985/tornado,wsyzxcn/tornado,kippandrew/tornado,hhru/tornado,mr-ping/tornado,cyrusin/tornado,pombredanne/tornado,kippandrew/tornado,Aaron1992/tornado,mehmetkose/tornado,anandology/tornado,jarrahwu/tornado,whip112/tornado,noxiouz/tornado,LTD-Beget/tornado,codeb2cc/tornado,erichuang1994/tornado,leekchan/tornado_test,MjAbuz/tornado,sevenguin/tornado,Acidburn0zzz/tornado,0xkag/tornado,Lancher/tornado,NoyaInRain/tornado,zguangyu/tornado,johan--/tornado,jsjohnst/tornado,gitchs/tornado,SuminAndrew/tornado,zhuochenKIDD/tornado,djt5019/tornado,zguangyu/tornado,jehiah/tornado,AlphaStaxLLC/tornado,Snamint/tornado,cyrusin/tornado,bufferx/tornado,tianyk/tornado-research,codeb2cc/tornado,kevinge314gh/tornado,yangkf1985/tornado,nephics/tornado,elijah513/tornado,cyrilMargaria/tornado,LTD-Beget/tornado,wxhzk/tornado-1,felixonmars/tornado,ydaniv/tornado,yangkf1985/tornado,jparise/tornado,LTD-Beget/tornado,sevenguin/tornado,takeshineshiro/tornado,jarrahwu/tornado,tornadoweb/tornado,jonashagstedt/tornado,gwillem/tornado,arthurdarcet/tornado,Lancher/tornado,mehmetkose/tornado,anandology/tornado,insflow/tornado,jampp/tornado,Windsooon/tornado,304471720/tornado,mehmetkose/tornado,cyrilMargaria/tornado,jparise/tornado,hhru/tornado,mivade/tornado,dongpinglai/my_tornado,shashankbassi92/tornado,Geoion/tornado,eXcomm/tornado,pombredanne/tornado,lilydjwg/tornado,akalipetis/tornado,icejoywoo/tornado,icejoywoo/tornado,mivade/tornado,legnaleurc/tornado,ColorFuzzy/tornado,yuyangit/tornado,QuanZag/tornado,wsyzxcn/tornado,VShangxiao/tornado,tianyk/tornado-research,futurechallenger/tornado,304471720/tornado,frtmelody/tornado,wechasing/tornado,codecov/tornado,andyaguiar/tornado,VShangxiao/tornado,AlphaStaxLLC/tornado,sxfmol/tornado,jampp/tornado,wsyzxcn/tornado,BencoLee/tornado,noxiouz/tornado,Callwoola/tornado,sunjeammy/tornado,hzruandd/tornado,legnaleurc/tornado,ZhuPeng/tornado,kangbiao/tornado,liqueur/tornado,fengsp/tornado,kangbiao/tornado,yuyangit/tornado,ovidiucp/tornado,Aaron1992/tornado,akalipetis/tornado,zguangyu/tornado,QuanZag/tornado,Batterfii/tornado,Drooids/tornado,lujinda/tornado,coderhaoxin/tornado,wsyzxcn/tornado,Callwoola/tornado,SuminAndrew/tornado,0x73/tornado,AlphaStaxLLC/tornado,nephics/tornado,eklitzke/tornado,mr-ping/tornado,xinyu7/tornado,jampp/tornado,elelianghh/tornado,nephics/tornado,drewmiller/tornado,erichuang1994/tornado,Windsooon/tornado,mlyundin/tornado,djt5019/tornado,andyaguiar/tornado,anjan-srivastava/tornado,ydaniv/tornado,Batterfii/tornado,kaushik94/tornado,tianyk/tornado-research,futurechallenger/tornado,felixonmars/tornado,Batterfii/tornado,304471720/tornado,gwillem/tornado,xinyu7/tornado,erichuang1994/tornado,304471720/tornado,ubear/tornado,whip112/tornado,dongpinglai/my_tornado,hzruandd/tornado,elelianghh/tornado,noxiouz/tornado,bufferx/tornado,z-fork/tornado,pombredanne/tornado,InverseLina/tornado,Batterfii/tornado,Snamint/tornado,fengsp/tornado,whip112/tornado,wechasing/tornado,gitchs/tornado,jehiah/tornado,nordaux/tornado,NoyaInRain/tornado,ovidiucp/tornado,codeb2cc/tornado,drewmiller/tornado,Polyconseil/tornado,wechasing/tornado,kevinge314gh/tornado,felixonmars/tornado,obsh/tornado,akalipetis/tornado,lujinda/tornado,pombredanne/tornado,felixonmars/tornado,legnaleurc/tornado,ovidiucp/tornado,liqueur/tornado,ColorFuzzy/tornado,sevenguin/tornado,cyrilMargaria/tornado,Snamint/tornado,nbargnesi/tornado,takeshineshiro/tornado,gwillem/tornado,arthurdarcet/tornado,nbargnesi/tornado,ymero/tornado,obsh/tornado,akalipetis/tornado,eklitzke/tornado,ZhuPeng/tornado,anjan-srivastava/tornado,hzruandd/tornado,takeshineshiro/tornado,NoyaInRain/tornado,nephics/tornado,mlyundin/tornado,elijah513/tornado,Geoion/tornado,erichuang1994/tornado,mivade/tornado,LTD-Beget/tornado,kippandrew/tornado,ajdavis/tornado,drewmiller/tornado,BencoLee/tornado,jehiah/tornado,Fydot/tornado,jehiah/tornado,Drooids/tornado,kippandrew/tornado,VShangxiao/tornado,NoyaInRain/tornado,nordaux/tornado,lsanotes/tornado,wujuguang/tornado,MjAbuz/tornado,zhuochenKIDD/tornado,jparise/tornado,Fydot/tornado,futurechallenger/tornado,Geoion/tornado,frtmelody/tornado,insflow/tornado,mr-ping/tornado,ymero/tornado,Acidburn0zzz/tornado,anjan-srivastava/tornado,ymero/tornado,NoyaInRain/tornado,anandology/tornado,importcjj/tornado,sxfmol/tornado,eklitzke/tornado,pombredanne/tornado,bdarnell/tornado,anandology/tornado,anjan-srivastava/tornado,jarrahwu/tornado,takeshineshiro/tornado,0xkag/tornado,MjAbuz/tornado,Polyconseil/tornado,felixonmars/tornado,codeb2cc/tornado,shaohung001/tornado,fengshao0907/tornado,ColorFuzzy/tornado,LTD-Beget/tornado,anandology/tornado,obsh/tornado,coderhaoxin/tornado,dongpinglai/my_tornado,Polyconseil/tornado,importcjj/tornado,hzruandd/tornado,Polyconseil/tornado,shaohung001/tornado,wechasing/tornado,0xkag/tornado,ydaniv/tornado,arthurdarcet/tornado,codecov/tornado,Batterfii/tornado,codeb2cc/tornado,nephics/tornado,Callwoola/tornado,Drooids/tornado,nordaux/tornado,InverseLina/tornado,futurechallenger/tornado,wxhzk/tornado-1,LTD-Beget/tornado,djt5019/tornado,dsseter/tornado,drewmiller/tornado,gitchs/tornado,liqueur/tornado,frtmelody/tornado,bywbilly/tornado,drewmiller/tornado,wechasing/tornado,sevenguin/tornado,tornadoweb/tornado,anjan-srivastava/tornado,takeshineshiro/tornado,jampp/tornado,dsseter/tornado,Fydot/tornado,nbargnesi/tornado,Fydot/tornado,ListFranz/tornado,jampp/tornado,frtmelody/tornado,InverseLina/tornado,djt5019/tornado,bufferx/tornado,shashankbassi92/tornado,ubear/tornado,djt5019/tornado,cyrusin/tornado,nbargnesi/tornado,Windsooon/tornado,djt5019/tornado,yuezhonghua/tornado,ymero/tornado,importcjj/tornado,anjan-srivastava/tornado,QuanZag/tornado,ms7s/tornado,mr-ping/tornado,kangbiao/tornado,sevenguin/tornado
|
tornado/test/netutil_test.py
|
tornado/test/netutil_test.py
|
from __future__ import absolute_import, division, print_function, with_statement
import socket
from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip
from tornado.testing import AsyncTestCase, gen_test
from tornado.test.util import unittest
try:
from concurrent import futures
except ImportError:
futures = None
try:
import pycares
except ImportError:
pycares = None
else:
from tornado.platform.caresresolver import CaresResolver
try:
import twisted
except ImportError:
twisted = None
else:
from tornado.platform.twisted import TwistedResolver
class _ResolverTestMixin(object):
def test_localhost(self):
self.resolver.resolve('localhost', 80, callback=self.stop)
future = self.wait()
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
future.result())
@gen_test
def test_future_interface(self):
addrinfo = yield self.resolver.resolve('localhost', 80,
socket.AF_UNSPEC)
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
addrinfo)
class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(BlockingResolverTest, self).setUp()
self.resolver = BlockingResolver(io_loop=self.io_loop)
@unittest.skipIf(futures is None, "futures module not present")
class ThreadedResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(ThreadedResolverTest, self).setUp()
self.resolver = ThreadedResolver(io_loop=self.io_loop)
def tearDown(self):
self.resolver.executor.shutdown()
super(ThreadedResolverTest, self).tearDown()
@unittest.skipIf(pycares is None, "pycares module not present")
class CaresResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(CaresResolverTest, self).setUp()
self.resolver = CaresResolver(io_loop=self.io_loop)
@unittest.skipIf(twisted is None, "twisted module not present")
class TwistedResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(TwistedResolverTest, self).setUp()
self.resolver = TwistedResolver(io_loop=self.io_loop)
class IsValidIPTest(unittest.TestCase):
def test_is_valid_ip(self):
self.assertTrue(is_valid_ip('127.0.0.1'))
self.assertTrue(is_valid_ip('4.4.4.4'))
self.assertTrue(is_valid_ip('::1'))
self.assertTrue(is_valid_ip('2620:0:1cfe:face:b00c::3'))
self.assertTrue(not is_valid_ip('www.google.com'))
self.assertTrue(not is_valid_ip('localhost'))
self.assertTrue(not is_valid_ip('4.4.4.4<'))
self.assertTrue(not is_valid_ip(' 127.0.0.1'))
|
from __future__ import absolute_import, division, print_function, with_statement
import socket
from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip
from tornado.testing import AsyncTestCase, gen_test
from tornado.test.util import unittest
try:
from concurrent import futures
except ImportError:
futures = None
class _ResolverTestMixin(object):
def test_localhost(self):
self.resolver.resolve('localhost', 80, callback=self.stop)
future = self.wait()
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
future.result())
@gen_test
def test_future_interface(self):
addrinfo = yield self.resolver.resolve('localhost', 80,
socket.AF_UNSPEC)
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
addrinfo)
class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(BlockingResolverTest, self).setUp()
self.resolver = BlockingResolver(io_loop=self.io_loop)
@unittest.skipIf(futures is None, "futures module not present")
class ThreadedResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(ThreadedResolverTest, self).setUp()
self.resolver = ThreadedResolver(io_loop=self.io_loop)
def tearDown(self):
self.resolver.executor.shutdown()
super(ThreadedResolverTest, self).tearDown()
class IsValidIPTest(unittest.TestCase):
def test_is_valid_ip(self):
self.assertTrue(is_valid_ip('127.0.0.1'))
self.assertTrue(is_valid_ip('4.4.4.4'))
self.assertTrue(is_valid_ip('::1'))
self.assertTrue(is_valid_ip('2620:0:1cfe:face:b00c::3'))
self.assertTrue(not is_valid_ip('www.google.com'))
self.assertTrue(not is_valid_ip('localhost'))
self.assertTrue(not is_valid_ip('4.4.4.4<'))
self.assertTrue(not is_valid_ip(' 127.0.0.1'))
|
apache-2.0
|
Python
|
738fc28922e0807bd292c8257ac251f5f743c237
|
Fix pep8 errors.
|
chrneumann/kotti_dkbase
|
kotti_dkbase/__init__.py
|
kotti_dkbase/__init__.py
|
from pyramid.httpexceptions import HTTPError
from pyramid.httpexceptions import HTTPNotFound
from kotti_dkbase.views import error_view
from kotti_dkbase.views import exception_decorator
def includeme(config):
config.include('pyramid_zcml')
config.load_zcml('configure.zcml')
config.add_view(
error_view,
context=HTTPNotFound,
renderer='kotti_dkbase:templates/view/error-404.pt',
)
config.add_view(
error_view,
context=HTTPError,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_view(
error_view,
decorator=exception_decorator,
context=Exception,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_static_view('static-kotti_dkbase', 'kotti_dkbase:static')
config.override_asset('kotti', 'kotti_dkbase:kotti-overrides/')
|
from pyramid.httpexceptions import HTTPError
from pyramid.httpexceptions import HTTPNotFound
from kotti_dkbase.views import error_view
from kotti_dkbase.views import exception_decorator
def includeme(config):
config.include('pyramid_zcml')
config.load_zcml('configure.zcml')
config.add_view(
error_view,
context=HTTPNotFound,
renderer='kotti_dkbase:templates/view/error-404.pt',
)
config.add_view(
error_view,
context=HTTPError,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_view(
error_view,
decorator=exception_decorator,
context=Exception,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_static_view('static-kotti_dkbase', 'kotti_dkbase:static')
config.override_asset('kotti', 'kotti_dkbase:kotti-overrides/')
|
bsd-2-clause
|
Python
|
9a861757011e2f8ba17bc30b0e874d087f5afd7b
|
Bump version to 6.0.1b1
|
platformio/platformio-core,platformio/platformio-core
|
platformio/__init__.py
|
platformio/__init__.py
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 0, "1b1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.1",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 0, "1a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.1",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
|
apache-2.0
|
Python
|
491d7eca2137613978a7d88ad74fcdda9dcb5e5c
|
add find_packages to setup.py
|
msupino/alerta-contrib,msupino/alerta-contrib,alerta/alerta-contrib,alerta/alerta-contrib,alerta/alerta-contrib
|
plugins/geoip/setup.py
|
plugins/geoip/setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
version = '0.1.0'
setup(
name="alerta-geoip",
version=version,
description='Alerta plugin for GeoIP Lookup',
url='https://github.com/alerta/alerta-contrib',
license='Apache License 2.0',
author='Nick Satterly',
author_email='[email protected]',
packages=find_packages(),
py_modules=['geoip'],
install_requires=[
'requests',
'alerta-server'
],
include_package_data=True,
zip_safe=False,
entry_points={
'alerta.plugins': [
'geoip = geoip:GeoLocation'
]
}
)
|
#!/usr/bin/env python
import setuptools
version = '0.1.0'
setuptools.setup(
name="alerta-geoip",
version=version,
description='Alerta plugin for GeoIP Lookup',
url='https://github.com/alerta/alerta-contrib',
license='Apache License 2.0',
author='Nick Satterly',
author_email='[email protected]',
py_modules=['geoip'],
install_requires=[
'requests',
'alerta-server'
],
include_package_data=True,
zip_safe=False,
entry_points={
'alerta.plugins': [
'geoip = geoip:GeoLocation'
]
}
)
|
mit
|
Python
|
e3b71c58a409239845588ed9f20970243db45dba
|
add delay to slow balls movement in pygame1_sample
|
joeryan/interact-python
|
pong/pygame1_sample.py
|
pong/pygame1_sample.py
|
import sys, pygame
import time
pygame.init()
size = width, height = 640, 480
speed = [1, 1]
black = 0, 0, 0
screen = pygame.display.set_mode(size)
ball = pygame.image.load("ball.gif")
ballrect = ball.get_rect()
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
ballrect = ballrect.move(speed)
time.sleep(0.001)
if ballrect.left < 0 or ballrect.right > width:
speed[0] = -speed[0]
if ballrect.top < 0 or ballrect.bottom > height:
speed[1] = -speed[1]
screen.fill(black)
screen.blit(ball, ballrect)
pygame.display.flip()
|
import sys, pygame
pygame.init()
size = width, height = 640, 480
speed = [2, 2]
black = 0, 0, 0
screen = pygame.display.set_mode(size)
ball = pygame.image.load("ball.gif")
ballrect = ball.get_rect()
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
ballrect = ballrect.move(speed)
if ballrect.left < 0 or ballrect.right > width:
speed[0] = -speed[0]
if ballrect.top < 0 or ballrect.bottom > height:
speed[1] = -speed[1]
screen.fill(black)
screen.blit(ball, ballrect)
pygame.display.flip()
|
mit
|
Python
|
126e6be2dd7b61809656ada1adfe3c64cbe24c47
|
Add couchbase/spock to branch merge set.
|
daverigby/kv_engine,daverigby/kv_engine,daverigby/kv_engine,daverigby/kv_engine
|
engines/ep/scripts/unmerged-commits.py
|
engines/ep/scripts/unmerged-commits.py
|
#!/usr/bin/env python2.7
# Script to show which commit(s) are not yet merged between our release branches.
from __future__ import print_function
import subprocess
import sys
class bcolors:
"""Define ANSI color codes, if we're running under a TTY."""
if sys.stdout.isatty():
HEADER = '\033[36m'
WARNING = '\033[33m'
ENDC = '\033[0m'
else:
HEADER = ''
WARNING = ''
ENDC = ''
# Branches to check for unmerged patches. Each toplevel element is a series
# of branches (ordered by ancestory) which should be merged into each other.
# i.e. the oldest supported branch to the newest, which is the order
# patches should be merged.
branches = (('couchbase/watson_ep',
'couchbase/spock'),
('couchbase/watson_mc',
'couchbase/spock'),
('couchbase/spock',
'couchbase/master'))
total_unmerged = 0
for series in branches:
for downstream, upstream in zip(series, series[1:]):
commits = subprocess.check_output(['git', 'cherry', '-v',
upstream, downstream])
count = len(commits.splitlines())
total_unmerged += count
if count > 0:
print((bcolors.HEADER +
"{} commits in '{}' not present in '{}':" +
bcolors.ENDC).format(count, downstream, upstream))
print(commits)
if total_unmerged:
print((bcolors.WARNING + "Total of {} commits outstanding" +
bcolors.ENDC).format(total_unmerged))
sys.exit(total_unmerged)
|
#!/usr/bin/env python2.7
# Script to show which commit(s) are not yet merged between our release branches.
from __future__ import print_function
import subprocess
import sys
class bcolors:
"""Define ANSI color codes, if we're running under a TTY."""
if sys.stdout.isatty():
HEADER = '\033[36m'
WARNING = '\033[33m'
ENDC = '\033[0m'
else:
HEADER = ''
WARNING = ''
ENDC = ''
# Branches to check for unmerged patches. Each toplevel element is a series
# of branches (ordered by ancestory) which should be merged into each other.
# i.e. the oldest supported branch to the newest, which is the order
# patches should be merged.
branches = (('couchbase/watson_ep',
'couchbase/master'),
('couchbase/watson_mc',
'couchbase/master'))
total_unmerged = 0
for series in branches:
for downstream, upstream in zip(series, series[1:]):
commits = subprocess.check_output(['git', 'cherry', '-v',
upstream, downstream])
count = len(commits.splitlines())
total_unmerged += count
if count > 0:
print((bcolors.HEADER +
"{} commits in '{}' not present in '{}':" +
bcolors.ENDC).format(count, downstream, upstream))
print(commits)
if total_unmerged:
print((bcolors.WARNING + "Total of {} commits outstanding" +
bcolors.ENDC).format(total_unmerged))
sys.exit(total_unmerged)
|
bsd-3-clause
|
Python
|
c94960b8c42ab46331cf1f5b76c2c2f4deb33b9d
|
fix KeyError on small word set
|
baverman/typetrainer,baverman/typetrainer
|
typetrainer/tutors/common.py
|
typetrainer/tutors/common.py
|
import random
import collections
import itertools
from typetrainer.generator import make_char_chain, generate_word
class Filler(object):
def __init__(self, words, make_lengths_seq):
self.dist = {}
self.first, self.other, self.word_chars = make_char_chain(words, 3, self.dist)
self.lengths = list(make_lengths_seq(words))
self.old_generated = collections.deque([], 100)
pos = random.randint(0, len(self.lengths) - 1)
left = itertools.islice(self.lengths, pos, None)
right = itertools.islice(self.lengths, 0, pos)
self.liter = itertools.cycle(itertools.chain(left, right))
def __iter__(self):
skip_to_word = False
while True:
t, l = self.liter.next()
if skip_to_word:
while t != 'w':
t, l = self.liter.next()
skip_to_word = False
if t == 'w':
word = None
for _ in range(50):
try:
word = generate_word(self.first, self.other, l, 3)
except KeyError:
break
if word not in self.old_generated:
break
if not word:
skip_to_word = True
continue
self.old_generated.append(word)
yield word
else:
yield l
def change_distribution(self, seq, prob_factor, replace=False):
if replace:
self.dist.clear()
self.dist[seq] = prob_factor
self.reset_parts()
def reset_distribution(self):
self.dist.clear()
self.reset_parts()
def reset_parts(self):
for p in self.other.values():
p.reset()
for p in self.first.values():
p.reset()
def strip_non_word_chars(self, string):
result = ''
for c in string:
if c in self.word_chars:
result += c
return result
|
import random
import collections
import itertools
from typetrainer.generator import make_char_chain, generate_word
class Filler(object):
def __init__(self, words, make_lengths_seq):
self.dist = {}
self.first, self.other, self.word_chars = make_char_chain(words, 3, self.dist)
self.lengths = list(make_lengths_seq(words))
self.old_generated = collections.deque([], 100)
pos = random.randint(0, len(self.lengths) - 1)
left = itertools.islice(self.lengths, pos, None)
right = itertools.islice(self.lengths, 0, pos)
self.liter = itertools.cycle(itertools.chain(left, right))
def __iter__(self):
while True:
t, l = self.liter.next()
if t == 'w':
for _ in range(50):
word = generate_word(self.first, self.other, l, 3)
if word not in self.old_generated:
break
else:
continue
self.old_generated.append(word)
yield word
else:
yield l
def change_distribution(self, seq, prob_factor, replace=False):
if replace:
self.dist.clear()
self.dist[seq] = prob_factor
self.reset_parts()
def reset_distribution(self):
self.dist.clear()
self.reset_parts()
def reset_parts(self):
for p in self.other.values():
p.reset()
for p in self.first.values():
p.reset()
def strip_non_word_chars(self, string):
result = ''
for c in string:
if c in self.word_chars:
result += c
return result
|
mit
|
Python
|
078e409d3c09e9ec0699ea95a2786c2342474bba
|
Return timestamp as a float in JSON.
|
openxc/web-logging-example,openxc/web-logging-example
|
views.py
|
views.py
|
import json
from collections import deque
from flask import request, render_template
from flask import current_app as app, abort
from util import make_status_response, generate_filename, jsonify
RECORDS_QUEUE = deque(maxlen=100)
def _prime_records_queue(q):
filename = generate_filename(app.config)
try:
with open(filename, 'r') as trace_file:
for line in trace_file:
if len(RECORDS_QUEUE) == RECORDS_QUEUE.maxlen:
break
timestamp, record = line.split(':', 1)
record = _massage_record(json.loads(record), float(timestamp))
RECORDS_QUEUE.append(record)
except IOError:
app.logger.warn("No active trace file found at %s" % filename)
def _massage_record(record, timestamp):
record['timestamp'] = int(timestamp * 1000)
return record
def add_record():
if not request.json:
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
records = request.json.get('records', None)
if records is None or not hasattr(records, '__iter__'):
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
with open(generate_filename(app.config), 'a') as trace_file:
for record in records:
timestamp = record.pop('timestamp')
trace_file.write("%s: %s\r\n" % (timestamp, json.dumps(record)))
record = _massage_record(record, timestamp)
RECORDS_QUEUE.append(record)
return make_status_response(201)
def show_records():
_prime_records_queue(RECORDS_QUEUE)
return jsonify(records=list(RECORDS_QUEUE))
def visualization():
return render_template('visualization.html')
|
import json
from collections import deque
from flask import request, render_template
from flask import current_app as app, abort
from util import make_status_response, generate_filename, jsonify
RECORDS_QUEUE = deque(maxlen=100)
def _prime_records_queue(q):
with open(generate_filename(app.config), 'r') as trace_file:
for line in trace_file:
if len(RECORDS_QUEUE) == RECORDS_QUEUE.maxlen:
break
timestamp, record = line.split(':', 1)
record = json.loads(record)
record['timestamp'] = timestamp
RECORDS_QUEUE.append(record)
def add_record():
if not request.json:
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
records = request.json.get('records', None)
if records is None or not hasattr(records, '__iter__'):
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
with open(generate_filename(app.config), 'a') as trace_file:
for record in records:
timestamp = record.pop('timestamp')
trace_file.write("%s: %s\r\n" % (timestamp, json.dumps(record)))
record['timestamp'] = timestamp
RECORDS_QUEUE.append(record)
return make_status_response(201)
def show_records():
_prime_records_queue(RECORDS_QUEUE)
return jsonify(records=list(RECORDS_QUEUE))
def visualization():
return render_template('visualization.html')
|
bsd-3-clause
|
Python
|
2a7ed7c2d6f37c3b6965ad92b21cecc0a4abd91a
|
Add first verion to upload via BioBlend
|
pajanne/galaxy-kickstart,pajanne/galaxy-kickstart
|
upload_datasets_to_galaxy.py
|
upload_datasets_to_galaxy.py
|
#!/usr/bin/python3
import argparse
from bioblend.galaxy import GalaxyInstance
import configparser
import os
def upload_datasets_to_galaxy():
# Arguments initialization
parser = argparse.ArgumentParser(description="Script to upload a folder into"
"Galaxy Data Libraries")
parser.add_argument('--folder', help='Folder to add in Data Libraries of Galaxy')
args = parser.parse_args()
# Fetch arguments
folder_path = args.folder
# Launch config
config = configparser.ConfigParser()
config.read('config.ini')
galaxy_config = config['Galaxy']
gi = GalaxyInstance(url='http://127.0.0.1:8080', key='5e8cc5748922c598c1aa6ec9e605780f')
name_folder_test = '160802_D00281L_0127_C9NPBANXX'
path_folder_test = './test-data/staging/' + name_folder_test
path_to_fastq_folder_test = os.path.join(path_folder_test, 'fastq')
# TODO: Make a loop which execute the following, for each directory found
libs_folder = gi.libraries.get_libraries(name=name_folder_test)
# TODO: Check the library does already exist
# Create the library with the name equal to the folder name
# and description 'Library' + folder_name
dict_library_test = gi.libraries.create_library(name_folder_test,
description=' '.join(['Library', name_folder_test]),
synopsis=None)
# Upload the data in the library just created
list_of_files = '\n'.join(os.listdir(path_to_fastq_folder_test))
unknow_return = gi.libraries.upload_from_galaxy_filesystem(
library_id=dict_library_test.get('id'),
filesystem_paths=list_of_files,
file_type='auto',
link_data_only='link_to_files',
)
print(unknow_return)
# TODO: Check if no new files, else upload them
# print("Already there! Skipping {0}".format(name_folder_test))
#print(gi.histories.get_histories())
if __name__ == "__main__":
upload_datasets_to_galaxy()
|
#!/usr/bin/python3
import argparse
# from bioblend.galaxy import GalaxyInstance
import configparser
def upload_datasets_to_galaxy():
# Arguments initialization
parser = argparse.ArgumentParser(description="Script to upload a folder into"
"Galaxy Data Libraries")
parser.add_argument('--folder', help='Folder to add in Data Libraries of Galaxy')
args = parser.parse_args()
# Fetch arguments
folder_path = args.folder
# Launch config
config = configparser.ConfigParser()
config.read('config.ini')
galaxy_config = config['Galaxy']
# gi = GalaxyInstance(url=galaxy_config['url'], key=galaxy_config['api-key'])
# print(gi.histories.get_histories())
if __name__ == "__main__":
upload_datasets_to_galaxy()
|
mit
|
Python
|
1f4006ba9831f47a7ccc3fa0f8f9fbbb44b0c217
|
fix plot_matplotlib_hist2d.py covariance matrix
|
ndawe/rootpy,kreczko/rootpy,kreczko/rootpy,rootpy/rootpy,ndawe/rootpy,rootpy/rootpy,kreczko/rootpy,rootpy/rootpy,ndawe/rootpy
|
examples/plotting/plot_matplotlib_hist2d.py
|
examples/plotting/plot_matplotlib_hist2d.py
|
#!/usr/bin/env python
"""
========================================
Plot a 2D ROOT histogram with matplotlib
========================================
This example demonstrates how a 2D ROOT histogram can be displayed with
matplotlib.
"""
print __doc__
import ROOT
from matplotlib import pyplot as plt
from rootpy.plotting import root2matplotlib as rplt
from rootpy.plotting import Hist2D
import numpy as np
a = Hist2D(100, -3, 3, 100, 0, 6)
a.fill_array(np.random.multivariate_normal(
mean=(0, 3),
cov=[[1, .5], [.5, 1]],
size=(1E6,)))
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(15, 5))
ax1.set_title('hist2d')
rplt.hist2d(a, axes=ax1)
ax2.set_title('imshow')
im = rplt.imshow(a, axes=ax2)
ax3.set_title('contour')
rplt.contour(a, axes=ax3)
fig.subplots_adjust(right=0.8)
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
fig.colorbar(im, cax=cbar_ax)
if not ROOT.gROOT.IsBatch():
plt.show()
|
#!/usr/bin/env python
"""
========================================
Plot a 2D ROOT histogram with matplotlib
========================================
This example demonstrates how a 2D ROOT histogram can be displayed with
matplotlib.
"""
print __doc__
import ROOT
from matplotlib import pyplot as plt
from rootpy.plotting import root2matplotlib as rplt
from rootpy.plotting import Hist2D
import numpy as np
a = Hist2D(100, -3, 3, 100, 0, 6)
a.fill_array(np.random.multivariate_normal(
mean=(0, 3),
cov=np.arange(4).reshape(2, 2),
size=(1E6,)))
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(15, 5))
ax1.set_title('hist2d')
rplt.hist2d(a, axes=ax1)
ax2.set_title('imshow')
im = rplt.imshow(a, axes=ax2)
ax3.set_title('contour')
rplt.contour(a, axes=ax3)
fig.subplots_adjust(right=0.8)
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
fig.colorbar(im, cax=cbar_ax)
if not ROOT.gROOT.IsBatch():
plt.show()
|
bsd-3-clause
|
Python
|
0e7a6f58bc740479a616c973c5973bd255501004
|
Update feedback_tags.py
|
bitmazk/django-feedback-form,bitmazk/django-feedback-form
|
feedback_form/templatetags/feedback_tags.py
|
feedback_form/templatetags/feedback_tags.py
|
"""Template tags and filters for the ``feedback_form`` app."""
from django import template
from ..app_settings import * # NOQA
from ..forms import FeedbackForm
register = template.Library()
@register.inclusion_tag('feedback_form/partials/form.html', takes_context=True)
def feedback_form(context):
"""Template tag to render a feedback form."""
user = None
url = None
if context.get('request'):
url = context['request'].path
if context['request'].user.is_authenticated():
user = context['request'].user
return {
'form': FeedbackForm(url=url, user=user),
'background_color': FEEDBACK_FORM_COLOR,
'text_color': FEEDBACK_FORM_TEXTCOLOR,
'text': FEEDBACK_FORM_TEXT,
}
|
"""Template tags and filters for the ``feedback_form`` app."""
from django import template
from ..app_settings import * # NOQA
from ..forms import FeedbackForm
register = template.Library()
@register.inclusion_tag('feedback_form/partials/form.html', takes_context=True)
def feedback_form(context):
"""Template tag to render a feedback form."""
user = None
if context['request'].user.is_authenticated():
user = context['request'].user
return {
'form': FeedbackForm(url=context['request'].path, user=user),
'background_color': FEEDBACK_FORM_COLOR,
'text_color': FEEDBACK_FORM_TEXTCOLOR,
'text': FEEDBACK_FORM_TEXT,
}
|
mit
|
Python
|
426dd82e9b2a7c2de2b6ba9091ad67057ffe9f5f
|
Create db, if there isn't one.
|
punchagan/statiki,punchagan/statiki
|
statiki.wsgi
|
statiki.wsgi
|
import os
from os.path import abspath, dirname
import sys
#active the python virtualenv for this application
HOME = os.environ['HOME']
activate_this = '%s/.virtualenvs/statiki/bin/activate_this.py' % HOME
execfile(activate_this, dict(__file__=activate_this))
# Add the source directory to the path
HERE = dirname(abspath(__file__))
sys.path.insert(0, HERE)
from statiki import db, app as application
db.create_all()
|
import os
from os.path import abspath, dirname
import sys
#active the python virtualenv for this application
HOME = os.environ['HOME']
activate_this = '%s/.virtualenvs/statiki/bin/activate_this.py' % HOME
execfile(activate_this, dict(__file__=activate_this))
# Add the source directory to the path
HERE = dirname(abspath(__file__))
sys.path.insert(0, HERE)
from statiki import app as application
|
mit
|
Python
|
78bfcf1561597113a91f7449642085a392c20429
|
use doctype instead of service name to send email
|
vjFaLk/frappe,vjFaLk/frappe,vjFaLk/frappe,vjFaLk/frappe
|
frappe/integrations/offsite_backup_utils.py
|
frappe/integrations/offsite_backup_utils.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import glob
import os
from frappe.utils import split_emails, get_backups_path
def send_email(success, service_name, doctype, email_field, error_status=None):
recipients = get_recipients(doctype, email_field)
if not recipients:
frappe.log_error("No Email Recipient found for {0}".format(service_name),
"{0}: Failed to send backup status email".format(service_name))
return
if success:
if not frappe.db.get_value(doctype, None, "send_email_for_successful_backup"):
return
subject = "Backup Upload Successful"
message = """
<h3>Backup Uploaded Successfully!</h3>
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {0} bucket. So relax!</p>""".format(service_name)
else:
subject = "[Warning] Backup Upload Failed"
message = """
<h3>Backup Upload Failed!</h3>
<p>Oops, your automated backup to {0} failed.</p>
<p>Error message: {1}</p>
<p>Please contact your system manager for more information.</p>""".format(service_name, error_status)
frappe.sendmail(recipients=recipients, subject=subject, message=message)
def get_recipients(doctype, email_field):
if not frappe.db:
frappe.connect()
return split_emails(frappe.db.get_value(doctype, None, email_field))
def get_latest_backup_file(with_files=False):
def get_latest(file_ext):
file_list = glob.glob(os.path.join(get_backups_path(), file_ext))
return max(file_list, key=os.path.getctime)
latest_file = get_latest('*.sql.gz')
if with_files:
latest_public_file_bak = get_latest('*-files.tar')
latest_private_file_bak = get_latest('*-private-files.tar')
return latest_file, latest_public_file_bak, latest_private_file_bak
return latest_file
def get_file_size(file_path, unit):
if not unit:
unit = 'MB'
file_size = os.path.getsize(file_path)
memory_size_unit_mapper = {'KB': 1, 'MB': 2, 'GB': 3, 'TB': 4}
i = 0
while i < memory_size_unit_mapper[unit]:
file_size = file_size / 1000.0
i += 1
return file_size
def validate_file_size():
frappe.flags.create_new_backup = True
latest_file = get_latest_backup_file()
file_size = get_file_size(latest_file, unit='GB')
if file_size > 1:
frappe.flags.create_new_backup = False
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import glob
import os
from frappe.utils import split_emails, get_backups_path
def send_email(success, service_name, doctype, email_field, error_status=None):
recipients = get_recipients(service_name, email_field)
if not recipients:
frappe.log_error("No Email Recipient found for {0}".format(service_name),
"{0}: Failed to send backup status email".format(service_name))
return
if success:
if not frappe.db.get_value(doctype, None, "send_email_for_successful_backup"):
return
subject = "Backup Upload Successful"
message = """
<h3>Backup Uploaded Successfully!</h3>
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {0} bucket. So relax!</p>""".format(service_name)
else:
subject = "[Warning] Backup Upload Failed"
message = """
<h3>Backup Upload Failed!</h3>
<p>Oops, your automated backup to {0} failed.</p>
<p>Error message: {1}</p>
<p>Please contact your system manager for more information.</p>""".format(service_name, error_status)
frappe.sendmail(recipients=recipients, subject=subject, message=message)
def get_recipients(service_name, email_field):
if not frappe.db:
frappe.connect()
return split_emails(frappe.db.get_value(service_name, None, email_field))
def get_latest_backup_file(with_files=False):
def get_latest(file_ext):
file_list = glob.glob(os.path.join(get_backups_path(), file_ext))
return max(file_list, key=os.path.getctime)
latest_file = get_latest('*.sql.gz')
if with_files:
latest_public_file_bak = get_latest('*-files.tar')
latest_private_file_bak = get_latest('*-private-files.tar')
return latest_file, latest_public_file_bak, latest_private_file_bak
return latest_file
def get_file_size(file_path, unit):
if not unit:
unit = 'MB'
file_size = os.path.getsize(file_path)
memory_size_unit_mapper = {'KB': 1, 'MB': 2, 'GB': 3, 'TB': 4}
i = 0
while i < memory_size_unit_mapper[unit]:
file_size = file_size / 1000.0
i += 1
return file_size
def validate_file_size():
frappe.flags.create_new_backup = True
latest_file = get_latest_backup_file()
file_size = get_file_size(latest_file, unit='GB')
if file_size > 1:
frappe.flags.create_new_backup = False
|
mit
|
Python
|
5659ae2668edb934f422e15edb81b1977da9b2c2
|
clean up
|
dkohreidze/seo-auto-internal-links
|
sail.py
|
sail.py
|
#!/usr/bin/python
# David Kohreidze
import csv
import os
import re
with open('keywords.csv', 'rU') as csvf:
reader = csv.reader(csvf)
links = {rows[0]:rows[1] for rows in reader}
for f in os.listdir('.'):
if os.path.isfile(f):
if f.endswith(".txt"):
s = open(f).read()
print "Processing %s.." %f
for i in links:
s = re.sub(r'\b'+i+r'\b', '<a href="%s">%s</a>' %(links[i],i), s, 1)
f = open(f, 'w')
f.write(s)
f.close()
print "Complete."
|
#!/usr/bin/python
# David Kohreidze
import csv
import os
import re
with open('keywords.csv', 'rU') as csvf:
reader = csv.reader(csvf)
links = {rows[0]:rows[1] for rows in reader} # builds dictionary from file
for f in os.listdir('.'): # for every file in the current directory
if os.path.isfile(f): # must be a file
if f.endswith(".txt"): # must be a text file
s = open(f).read() # read file
print "Processing %s.." %f
for i in links:
s = re.sub(r'\b'+i+r'\b', '<a href="%s">%s</a>'%(links[i],i), s, 1)
f = open(f, 'w')
f.write(s)
f.close()
print "Complete."
|
mit
|
Python
|
03c221d7ac1ca955b41577d525bd40b6188045ea
|
Clarify comment.
|
earcanal/dotprobe,earcanal/dotprobe,earcanal/dotprobe
|
size.py
|
size.py
|
#!/usr/bin/python
# calculate the number of pixels for a stimulus
# fixed: viewer distance, vertical resolution, visual angle
# argv[1] = vertical screen height (cm)
from math import atan2, degrees
import sys
if sys.argv[1]:
h = float(sys.argv[1])
else:
h = 21.5 # Dell laptop
h = 20.6 # Macbook Pro
h = 28.7 # Dell monitor
d = 60 # distance between monitor and participant in cm
r = 768 # vertical resolution of monitor
size_in_px = 0 # stimulus size in pixels
size_in_deg = 0
target_degrees = 2
# calculate the number of degrees that correspond to a single pixel. This will
# generally be a very small value, something like 0.03.
deg_per_px = degrees(atan2(.5*h, d)) / (.5*r)
print '%s degrees correspond to a single pixel' % deg_per_px
# calculate the size of the stimulus in degrees
while size_in_deg < target_degrees:
size_in_px += 1
size_in_deg = size_in_px * deg_per_px
print 'The size of the stimulus is %s pixels and %s visual degrees' % (size_in_px, size_in_deg)
|
#!/usr/bin/python
# calculate the number of pixels for a stimulus
# fixed: viewer distance, vertical resolution, visual angle
# argv[1] = vertical screen height
from math import atan2, degrees
import sys
if sys.argv[1]:
h = float(sys.argv[1])
else:
h = 21.5 # Dell laptop
h = 20.6 # Macbook Pro
h = 28.7 # Dell monitor
d = 60 # distance between monitor and participant in cm
r = 768 # vertical resolution of monitor
size_in_px = 0 # stimulus size in pixels
size_in_deg = 0
target_degrees = 2
# calculate the number of degrees that correspond to a single pixel. This will
# generally be a very small value, something like 0.03.
deg_per_px = degrees(atan2(.5*h, d)) / (.5*r)
print '%s degrees correspond to a single pixel' % deg_per_px
# calculate the size of the stimulus in degrees
while size_in_deg < target_degrees:
size_in_px += 1
size_in_deg = size_in_px * deg_per_px
print 'The size of the stimulus is %s pixels and %s visual degrees' % (size_in_px, size_in_deg)
|
cc0-1.0
|
Python
|
3972f861fae155b84bc344810b0e5a1c8cbb418c
|
Fix SMBC next page XPath
|
J-CPelletier/WebComicToCBZ,J-CPelletier/webcomix,J-CPelletier/webcomix
|
webcomix/supported_comics.py
|
webcomix/supported_comics.py
|
supported_comics = {
"xkcd": ("http://xkcd.com/1/", "//a[@rel='next']/@href", "//div[@id='comic']//img/@src"),
"Nedroid": ("http://nedroid.com/2005/09/2210-whee/", "//div[@class='nav-next']/a/@href", "//div[@id='comic']/img/@src"),
"JL8": ("http://limbero.org/jl8/1", "//a[text()='>']/@href", "//img/@src"),
"SMBC": ("http://www.smbc-comics.com/comic/2002-09-05", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src"),
"Blindsprings": ("http://www.blindsprings.com/comic/blindsprings-cover-book-one", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src"),
"TheAbominableCharlesChristopher": ("http://abominable.cc/post/44164796353/episode-one", "//span[@class='next_post']//@href", "//div[@class='photo']//img/@src"),
"GuildedAge": ("http://guildedage.net/comic/chapter-1-cover/", "//a[@class='navi comic-nav-next navi-next']/@href", "//div[@id='comic']//img/@src"),
"TalesOfElysium": ("http://ssp-comics.com/comics/toe/?page=1", "//a[button/@id='nextButton']/@href", "//div[@id='ImageComicContainer']//img/@src"),
"AmazingSuperPowers": ("http://www.amazingsuperpowers.com/2007/09/heredity/", "//a[@class='navi navi-next']/@href", "//div[@class='comicpane']/img/@src"),
"Gunshow": ("http://gunshowcomic.com/1", "(//span[@class='snavb'])[4]/a/@href", "//img[@class='strip']/@src"),
"Lackadaisy": ("http://www.lackadaisycats.com/comic.php?comicid=1", "//div[@class='next']/a/@href", "//div[@id='content']/img/@src"),
"WildeLife": ("http://www.wildelifecomic.com/comic/1", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src")
}
|
supported_comics = {
"xkcd": ("http://xkcd.com/1/", "//a[@rel='next']/@href", "//div[@id='comic']//img/@src"),
"Nedroid": ("http://nedroid.com/2005/09/2210-whee/", "//div[@class='nav-next']/a/@href", "//div[@id='comic']/img/@src"),
"JL8": ("http://limbero.org/jl8/1", "//a[text()='>']/@href", "//img/@src"),
"SMBC": ("http://www.smbc-comics.com/comic/2002-09-05", "//a[@class='next']/@href", "//img[@id='cc-comic']/@src"),
"Blindsprings": ("http://www.blindsprings.com/comic/blindsprings-cover-book-one", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src"),
"TheAbominableCharlesChristopher": ("http://abominable.cc/post/44164796353/episode-one", "//span[@class='next_post']//@href", "//div[@class='photo']//img/@src"),
"GuildedAge": ("http://guildedage.net/comic/chapter-1-cover/", "//a[@class='navi comic-nav-next navi-next']/@href", "//div[@id='comic']//img/@src"),
"TalesOfElysium": ("http://ssp-comics.com/comics/toe/?page=1", "//a[button/@id='nextButton']/@href", "//div[@id='ImageComicContainer']//img/@src"),
"AmazingSuperPowers": ("http://www.amazingsuperpowers.com/2007/09/heredity/", "//a[@class='navi navi-next']/@href", "//div[@class='comicpane']/img/@src"),
"Gunshow": ("http://gunshowcomic.com/1", "(//span[@class='snavb'])[4]/a/@href", "//img[@class='strip']/@src"),
"Lackadaisy": ("http://www.lackadaisycats.com/comic.php?comicid=1", "//div[@class='next']/a/@href", "//div[@id='content']/img/@src"),
"WildeLife": ("http://www.wildelifecomic.com/comic/1", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src")
}
|
mit
|
Python
|
c0ebc5d757e71c06a8ca3597bf92d496aa0dd5ee
|
update test child age
|
vanesa/kid-o,vanesa/kid-o,vanesa/kid-o,vanesa/kid-o
|
test.py
|
test.py
|
import os
import unittest
import tempfile
import json
from app import app
from app.models import db, Child, User
from datetime import datetime
class ChildViewTestCase(unittest.TestCase):
def test_child_view(self):
first_name = "Martha"
last_name = "Sosa"
birth_date= datetime.strptime("2009-02-02", "%Y-%m-%d")
test_child_view = Child(first_name=first_name, last_name=last_name, birth_date=birth_date)
self.assertEqual(test_child_view.age, 7)
class AuthTestCase(unittest.TestCase):
def setUp(self):
self.client = app.test_client()
def test_can_login(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="[email protected]",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Log Out", response.get_data())
db.session.delete(user)
db.session.commit()
def test_can_sign_out(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="[email protected]",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
response = self.client.get('/logout', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Login", response.get_data())
db.session.delete(user)
db.session.commit()
if __name__ == "__main__":
app.config['TESTING'] = True
app.testing = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/travis_ci_test'
app.config['WTF_CSRF_ENABLED'] = False
db.init_app(app)
db.create_all()
unittest.main()
|
import os
import unittest
import tempfile
import json
from app import app
from app.models import db, Child, User
from datetime import datetime
class ChildViewTestCase(unittest.TestCase):
def test_child_view(self):
first_name = "Martha"
last_name = "Sosa"
birth_date= datetime.strptime("2009-02-02", "%Y-%m-%d")
test_child_view = Child(first_name=first_name, last_name=last_name, birth_date=birth_date)
self.assertEqual(test_child_view.age, 6)
class AuthTestCase(unittest.TestCase):
def setUp(self):
self.client = app.test_client()
def test_can_login(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="[email protected]",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Log Out", response.get_data())
db.session.delete(user)
db.session.commit()
def test_can_sign_out(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="[email protected]",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
response = self.client.get('/logout', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Login", response.get_data())
db.session.delete(user)
db.session.commit()
if __name__ == "__main__":
app.config['TESTING'] = True
app.testing = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/travis_ci_test'
app.config['WTF_CSRF_ENABLED'] = False
db.init_app(app)
db.create_all()
unittest.main()
|
bsd-3-clause
|
Python
|
e890ac9ef00193beac77b757c62911553cebf656
|
Change save path to local path
|
adampiskorski/lpr_poc
|
test.py
|
test.py
|
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', 'img.jpg')
|
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
|
mit
|
Python
|
89b1bfaad82f1e19df51b189b65ce940983d0da1
|
comment out cfl in tests since it seems to be broken.
|
reticulatingspline/Scores,cottongin/Scores
|
test.py
|
test.py
|
###
# Copyright (c) 2012-2014, spline
# All rights reserved.
###
from supybot.test import *
class ScoresTestCase(PluginTestCase):
plugins = ('Scores',)
def testScores(self):
# cfb, cfl, d1bb, golf, mlb, nascar, nba, ncb, ncw, nfl, nhl, racing, tennis, and wnba
conf.supybot.plugins.Scores.disableANSI.setValue('True')
self.assertNotError('cfb')
# self.assertNotError('cfl')
self.assertNotError('d1bb')
self.assertNotError('golf')
self.assertNotError('mlb')
self.assertNotError('nascar')
self.assertNotError('nba')
self.assertNotError('ncb')
self.assertNotError('ncw')
self.assertNotError('nfl')
self.assertNotError('nhl')
self.assertNotError('racing f1')
self.assertNotError('tennis')
self.assertNotError('wnba')
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
###
# Copyright (c) 2012-2014, spline
# All rights reserved.
###
from supybot.test import *
class ScoresTestCase(PluginTestCase):
plugins = ('Scores',)
def testScores(self):
# cfb, cfl, d1bb, golf, mlb, nascar, nba, ncb, ncw, nfl, nhl, racing, tennis, and wnba
conf.supybot.plugins.Scores.disableANSI.setValue('True')
self.assertNotError('cfb')
self.assertNotError('cfl')
self.assertNotError('d1bb')
self.assertNotError('golf')
self.assertNotError('mlb')
self.assertNotError('nascar')
self.assertNotError('nba')
self.assertNotError('ncb')
self.assertNotError('ncw')
self.assertNotError('nfl')
self.assertNotError('nhl')
self.assertNotError('racing f1')
self.assertNotError('tennis')
self.assertNotError('wnba')
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
mit
|
Python
|
ac8d6210b1e48e7ce1131412b45d23846b7c73d2
|
Fix to minor style issue
|
panoplyio/panoply-python-sdk
|
test.py
|
test.py
|
import time
import panoply
KEY = "panoply/2g866xw4oaqt1emi"
SECRET = "MmM0NWNvc2wwYmJ4ZDJ0OS84MmY3MzQ4NC02MDIzLTQyN2QtODdkMS0yY2I0NTAzNDk0NDQvMDM3MzM1OTk5NTYyL3VzLWVhc3QtMQ==" # noqa
sdk = panoply.SDK(KEY, SECRET)
sdk.write('roi-test', {'hello': 1})
print sdk.qurl
time.sleep(5)
|
import time
import panoply
KEY = "panoply/2g866xw4oaqt1emi"
SECRET = "MmM0NWNvc2wwYmJ4ZDJ0OS84MmY3MzQ4NC02MDIzLTQyN2QtODdkMS0yY2I0NTAzNDk0NDQvMDM3MzM1OTk5NTYyL3VzLWVhc3QtMQ==" # noqa
sdk = panoply.SDK(KEY, SECRET)
sdk.write('roi-test', {'hello': 1})
print sdk.qurl
time.sleep(5)
|
mit
|
Python
|
5e089a1b155071bb9f009657320c9c12418f517d
|
debug travis
|
scienceopen/histutils,scienceopen/histutils
|
test.py
|
test.py
|
#!/usr/bin/env python
from numpy import array,nan,uint16,int64
from numpy.testing import assert_allclose
from datetime import datetime
#
try:
from .airMass import airmass
from .rawDMCreader import goRead
from .plotSolarElev import compsolar
except Exception as e:
print(e)
from airMass import airmass
from rawDMCreader import goRead
from plotSolarElev import compsolar
def test_airmass():
theta=[-1.,38.]
Irr,M,I0 = airmass(theta,datetime(2015,7,1,0,0,0))
assert_allclose(Irr,[nan, 805.13538427])
assert_allclose(M,[nan, 1.62045712])
def test_rawread():
bigfn='test/testframes.DMCdata'
framestoplay=(1,2,1) #this is (start,stop,step) so (1,2,1) means read only the second frame in the file
testframe, testind,finf = goRead(bigfn,(512,512),(1,1),framestoplay,verbose=1)
#these are both tested by goRead
#finf = getDMCparam(bigfn,(512,512),(1,1),None,verbose=2)
#with open(bigfn,'rb') as f:
# testframe,testind = getDMCframe(f,iFrm=1,finf=finf,verbose=2)
#test a handful of pixels
assert testind.dtype == int64
assert testframe.dtype == uint16
assert testind == 710731
assert (testframe[0,:5,0] == array([642, 1321, 935, 980, 1114])).all()
assert (testframe[0,-5:,-1] == array([2086, 1795, 2272, 1929, 1914])).all()
def test_plotsolar():
Irr,sunel = compsolar('pfisr',(None,None,None),
datetime(2015,7,1,0,0,0), 1, False)
assert_allclose(Irr[[6,14,6],[2,125,174]], [nan, 216.436431, 405.966392])
assert_allclose(sunel[[6,14,6],[2,125,174]], [-33.736906, 4.438728, 9.068415])
if __name__ == '__main__':
test_airmass()
test_rawread()
test_plotsolar()
|
#!/usr/bin/env python
from numpy import array,nan,uint16,int64
from numpy.testing import assert_allclose
from datetime import datetime
#
try:
from .airMass import airmass
from .rawDMCreader import goRead
from .plotSolarElev import compsolar
except:
from airMass import airmass
from rawDMCreader import goRead
from plotSolarElev import compsolar
def test_airmass():
theta=[-1.,38.]
Irr,M,I0 = airmass(theta,datetime(2015,7,1,0,0,0))
assert_allclose(Irr,[nan, 805.13538427])
assert_allclose(M,[nan, 1.62045712])
def test_rawread():
bigfn='test/testframes.DMCdata'
framestoplay=(1,2,1) #this is (start,stop,step) so (1,2,1) means read only the second frame in the file
testframe, testind,finf = goRead(bigfn,(512,512),(1,1),framestoplay,verbose=1)
#these are both tested by goRead
#finf = getDMCparam(bigfn,(512,512),(1,1),None,verbose=2)
#with open(bigfn,'rb') as f:
# testframe,testind = getDMCframe(f,iFrm=1,finf=finf,verbose=2)
#test a handful of pixels
assert testind.dtype == int64
assert testframe.dtype == uint16
assert testind == 710731
assert (testframe[0,:5,0] == array([642, 1321, 935, 980, 1114])).all()
assert (testframe[0,-5:,-1] == array([2086, 1795, 2272, 1929, 1914])).all()
def test_plotsolar():
Irr,sunel = compsolar('pfisr',(None,None,None),
datetime(2015,7,1,0,0,0), 1, False)
assert_allclose(Irr[[6,14,6],[2,125,174]], [nan, 216.436431, 405.966392])
assert_allclose(sunel[[6,14,6],[2,125,174]], [-33.736906, 4.438728, 9.068415])
if __name__ == '__main__':
test_airmass()
test_rawread()
test_plotsolar()
|
mit
|
Python
|
e859119ba7c898c9c5a1e3c9a719050461abc249
|
test installed package
|
tsadm/desktop,tsadm/desktop,tsadm/desktop,tsadm/desktop
|
test.py
|
test.py
|
#!/usr/bin/env python3
import sys
from os import path
from unittest import TestLoader, TextTestRunner
print("Python {}".format(sys.version))
if not '--test-installed' in sys.argv:
libdir = path.join(path.abspath(path.curdir), 'lib')
sys.path.insert(0, libdir)
from tsdesktop import version
version.println()
ldr = TestLoader()
suite = ldr.discover('tsdesktop', '*_test.py')
verbose = 1
if '-v' in sys.argv: verbose = 2
rnr = TextTestRunner(verbosity=verbose)
rst = rnr.run(suite)
sys.exit(len(rst.errors))
|
#!/usr/bin/env python3
import sys
from os import path
from unittest import TestLoader, TextTestRunner
print("Python {}".format(sys.version))
libdir = path.join(path.abspath(path.curdir), 'lib')
sys.path.insert(0, libdir)
from tsdesktop import version
version.println()
ldr = TestLoader()
suite = ldr.discover('tsdesktop', '*_test.py')
verbose = 1
if '-v' in sys.argv: verbose = 2
rnr = TextTestRunner(verbosity=verbose)
rst = rnr.run(suite)
sys.exit(len(rst.errors))
|
bsd-3-clause
|
Python
|
10a78f1d5cfb38c14c7e5434fdd5258fdf41a351
|
Fix failing tests (oops)
|
rhargreaves/zx-spec
|
test.py
|
test.py
|
#!/usr/bin/env python
import os
import subprocess
import time
import glob
import unittest
class TestPasses(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-passes.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec: The TDD Framework')
def test_indicators_show_tests_passed(self):
self.assertRegexpMatches(self.output, '\.' * 4)
def test_all_tests_pass(self):
self.assertRegexpMatches(self.output, 'Pass: 5, Fail: 0, Total: 5')
@classmethod
def tearDownClass(self):
clean()
class TestFailures(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-failures.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec: The TDD Framework')
def test_shows_failed_tests(self):
self.assertRegexpMatches(self.output, 'assert_fail')
self.assertRegexpMatches(self.output, 'assert_a_equals')
self.assertRegexpMatches(self.output, 'assert_a_not_equals')
self.assertRegexpMatches(self.output, 'assert_a_is_zero')
self.assertRegexpMatches(self.output, 'assert_a_is_not_zero')
def test_all_tests_failed(self):
self.assertRegexpMatches(self.output, 'Pass: 0, Fail: 5, Total: 5')
@classmethod
def tearDownClass(self):
clean()
def clean():
for f in glob.glob("printout.*"):
os.remove(f)
def run_zx_spec(tape):
ZX_SPEC_OUTPUT_FILE = "printout.txt"
proc = subprocess.Popen([
"fuse",
"--tape", tape,
"--auto-load",
"--no-autosave-settings"])
wait_count = 0
while not os.path.exists(ZX_SPEC_OUTPUT_FILE):
time.sleep(0.1)
wait_count += 1
if wait_count == 20:
raise 'Output file not produced in time'
time.sleep(10)
proc.kill()
with open(ZX_SPEC_OUTPUT_FILE, 'r') as f:
return f.read()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
#!/usr/bin/env python
import os
import subprocess
import time
import glob
import unittest
class TestPasses(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-passes.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec - The TDD Framework')
def test_indicators_show_tests_passed(self):
self.assertRegexpMatches(self.output, '\.' * 4)
def test_all_tests_pass(self):
self.assertRegexpMatches(self.output, 'Pass: 5, Fail: 0, Total: 5')
@classmethod
def tearDownClass(self):
clean()
class TestFailures(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-failures.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec - The TDD Framework')
def test_shows_failed_tests(self):
self.assertRegexpMatches(self.output, 'assert_fail fails')
self.assertRegexpMatches(self.output, 'assert_a_equals fails')
self.assertRegexpMatches(self.output, 'assert_a_not_equals fails')
self.assertRegexpMatches(self.output, 'assert_a_is_zero fails')
self.assertRegexpMatches(self.output, 'assert_a_is_not_zero fails')
def test_all_tests_failed(self):
self.assertRegexpMatches(self.output, 'Pass: 0, Fail: 5, Total: 5')
@classmethod
def tearDownClass(self):
clean()
def clean():
for f in glob.glob("printout.*"):
os.remove(f)
def run_zx_spec(tape):
ZX_SPEC_OUTPUT_FILE = "printout.txt"
proc = subprocess.Popen([
"fuse",
"--tape", tape,
"--auto-load",
"--no-autosave-settings"])
wait_count = 0
while not os.path.exists(ZX_SPEC_OUTPUT_FILE):
time.sleep(0.1)
wait_count += 1
if wait_count == 20:
raise 'Output file not produced in time'
time.sleep(10)
proc.kill()
with open(ZX_SPEC_OUTPUT_FILE, 'r') as f:
return f.read()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
mit
|
Python
|
8638e02de720954ed33098ec88a044dee38302f6
|
test ...
|
ryanrhymes/scandex
|
test.py
|
test.py
|
#!/usr/bin/env python
import os
import socket
import sys
def test():
print "hello"
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
server_address = '/var/run/docker.sock'
sock.connect(server_address)
pass
if __name__=="__main__":
test()
sys.exit(0)
|
#!/usr/bin/env python
import os
import socket
import sys
def test():
print "hello"
pass
if __name__=="__main__":
test()
sys.exit(0)
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.