commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
3ca4199db1a3bc4bd9a408ff2b86fc20ee477e41
setup.py
setup.py
#!/usr/bin/python import distutils from setuptools import setup, Extension long_desc = """This is a C extension module for Python which implements extended attributes manipulation. It is a wrapper on top of the attr C library - see attr(5).""" version = "0.5.1" author = "Iustin Pop" author_email = "[email protected]" macros = [ ("_XATTR_VERSION", '"%s"' % version), ("_XATTR_AUTHOR", '"%s"' % author), ("_XATTR_EMAIL", '"%s"' % author_email), ] setup(name = "pyxattr", version = version, description = "Filesystem extended attributes for python", long_description = long_desc, author = author, author_email = author_email, url = "http://pyxattr.k1024.org/", license = "LGPL", ext_modules = [Extension("xattr", ["xattr.c"], libraries=["attr"], define_macros=macros)], test_suite = "test", )
#!/usr/bin/python import distutils from setuptools import setup, Extension long_desc = """This is a C extension module for Python which implements extended attributes manipulation. It is a wrapper on top of the attr C library - see attr(5).""" version = "0.5.1" author = "Iustin Pop" author_email = "[email protected]" macros = [ ("_XATTR_VERSION", '"%s"' % version), ("_XATTR_AUTHOR", '"%s"' % author), ("_XATTR_EMAIL", '"%s"' % author_email), ] setup(name = "pyxattr", version = version, description = "Filesystem extended attributes for python", long_description = long_desc, author = author, author_email = author_email, url = "http://pyxattr.k1024.org/", download_url = "https://github.com/iustin/pyxattr/downloads", license = "LGPL", ext_modules = [Extension("xattr", ["xattr.c"], libraries=["attr"], define_macros=macros)], test_suite = "test", )
Add a download_url for pypi
Add a download_url for pypi
Python
lgpl-2.1
iustin/pyxattr,iustin/pyxattr
2ef360762cf807806417fbd505319165716e4591
setup.py
setup.py
#!/usr/bin/env python # Copyright (c) 2014, Michael Boyle # See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE> from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},)
#!/usr/bin/env python # Copyright (c) 2014, Michael Boyle # See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE> from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-ffast-math', '-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},)
Add fast-math back to compiler options, now that anaconda can handle it
Add fast-math back to compiler options, now that anaconda can handle it Closes #13 See https://github.com/ContinuumIO/anaconda-issues/issues/182
Python
mit
moble/quaternion,moble/quaternion
8b00632dd6659b9b3c3f792564a81c7b47e0da2c
setup.py
setup.py
import sys import os.path as op from setuptools import setup from distutils.extension import Extension exts = [] if sys.platform == 'darwin': exts.append(Extension( '_send2trash_osx', [op.join('modules', 'send2trash_osx.c')], extra_link_args=['-framework', 'CoreServices'], )) if sys.platform == 'win32': exts.append(Extension( '_send2trash_win', [op.join('modules', 'send2trash_win.c')], extra_link_args = ['shell32.lib'], )) setup( name='Send2Trash', version='1.0.0', author='Hardcoded Software', author_email='[email protected]', packages=['send2trash'], scripts=[], ext_modules = exts, url='http://hg.hardcoded.net/send2trash/', license='LICENSE', description='Send file to trash natively under Mac OS X, Windows and Linux.', )
import sys import os.path as op from setuptools import setup from distutils.extension import Extension exts = [] if sys.platform == 'darwin': exts.append(Extension( '_send2trash_osx', [op.join('modules', 'send2trash_osx.c')], extra_link_args=['-framework', 'CoreServices'], )) if sys.platform == 'win32': exts.append(Extension( '_send2trash_win', [op.join('modules', 'send2trash_win.c')], extra_link_args = ['shell32.lib'], )) setup( name='Send2Trash', version='1.0.0', author='Hardcoded Software', author_email='[email protected]', packages=['send2trash'], scripts=[], ext_modules = exts, url='http://hg.hardcoded.net/send2trash/', license='LICENSE', description='Send file to trash natively under Mac OS X, Windows and Linux.', zip_safe=False, )
Set zip_safe to False, as it causes problems when creating executables for Windows of apps using it.
Set zip_safe to False, as it causes problems when creating executables for Windows of apps using it.
Python
bsd-3-clause
hsoft/send2trash
045417a97251dbb3af2f75e6c0872586acf1f0c4
setup.py
setup.py
from setuptools import setup, find_packages version = '1.0' setup( name='tn.bulletino', version=version, description='', classifiers=[ "Framework :: Plone", "Programming Language :: Python", ], keywords='', author='TN Tecnologia e Negocios', author_email='[email protected]', url='http://www.tecnologiaenegocios.com.br', packages=find_packages('src'), package_dir={'': 'src'}, namespace_packages=['tn'], include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'collective.autopermission', 'five.globalrequest', 'Plone', 'plone.app.z3cform', 'plone.directives.form', 'tn.plonebehavior.template', 'tn.plonehtmlimagecache', 'tn.plonehtmlpage', 'tn.plonemailing', 'tn.plonestyledpage', ], extras_require={ 'test': [ 'stubydoo', ] }, entry_points=""" [z3c.autoinclude.plugin] target = plone """, )
from setuptools import setup, find_packages version = '1.0' setup( name='tn.bulletino', version=version, description='', classifiers=[ "Framework :: Plone", "Programming Language :: Python", ], keywords='', author='TN Tecnologia e Negocios', author_email='[email protected]', url='http://www.tecnologiaenegocios.com.br', packages=find_packages('src'), package_dir={'': 'src'}, namespace_packages=['tn'], include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'collective.autopermission', 'five.globalrequest', 'Plone', 'plone.app.z3cform', 'plone.api', 'plone.directives.form', 'tn.plonebehavior.template', 'tn.plonehtmlimagecache', 'tn.plonehtmlpage', 'tn.plonemailing', 'tn.plonestyledpage', ], extras_require={ 'test': [ 'stubydoo', ] }, entry_points=""" [z3c.autoinclude.plugin] target = plone """, )
Add plone.api as a dependency
Add plone.api as a dependency
Python
bsd-3-clause
tecnologiaenegocios/tn.bulletino
4f561976b28a81d233fc12903252a56a5de4f84e
setup.py
setup.py
from setuptools import ( setup, find_packages, ) #from os import path #here = path.abspath(path.dirname(__file__)) #with open(path.join(here, "README.md")) as f: # long_description = f.read() long_description = "stuff will go here eventually" setup( name="py_types", version="0.1.0a", description="Gradual typing for python 3.", long_description=long_description, url="https://github.com/zekna/py-types", author="Zach Nelson", author_email="[email protected]", license="MIT", classifiers=[ "Develpoment Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development :: Tools", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", ], keywords="type checking development schema", packages=find_packages(exclude=["tests*"]), install_requires=[], extras_require={}, package_data={}, data_files=[], entry_points={}, )
from setuptools import ( setup, find_packages, ) from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, "README.md")) as rdme: with open(path.join(here, "CHANGELOG.md")) as chlog: readme = rdme.read() changes = chlog.read() long_description = readme + "\nCHANGELOG\n--------------------------------------\n" + changes setup( name="py_types", version="0.1.0a", description="Gradual typing for python 3.", long_description=long_description, url="https://github.com/zekna/py-types", author="Zach Nelson", author_email="[email protected]", license="MIT", classifiers=[ "Develpoment Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development :: Tools", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", ], keywords="type checking development schema", packages=find_packages(exclude=["tests*"]), install_requires=[], extras_require={}, package_data={}, data_files=[], entry_points={}, )
Change long description to be README and CHANGELOG
Change long description to be README and CHANGELOG
Python
mit
zekna/py-types
1c678846cf612c83bf4c9a680dc4a6c3a524bd3e
setup.py
setup.py
#!/usr/bin/env python3 from os import curdir, pardir from os.path import join from distutils.core import setup from Cython.Distutils import Extension, build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")], cython_c_in_temp = 1)] )
#!/usr/bin/env python from os import curdir, pardir from os.path import join from distutils.core import setup from Cython.Distutils import Extension, build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")], cython_c_in_temp = 1)] )
Unify the python binaries being invoked in the various scripts.
Unify the python binaries being invoked in the various scripts.
Python
lgpl-2.1
vapoursynth/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth
22e2d980d900f30901cf6f2ef5f167ddec62e9a7
setup.py
setup.py
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux', 'https://github.com/barbarahui/nuxeo-calisphere/archive/master.zip#egg=UCLDC-Deep-Harvester' ], install_requires=[ 'boto', 'pynux', 'python-magic', 'UCLDC-Deep-Harvester' ], packages=['deepharvest'], test_suite='tests' )
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = "UCLDC Deep Harvester", version = "0.0.1", description = ("deep harvester code for the UCLDC project"), long_description=read('README.md'), author='Barbara Hui', author_email='[email protected]', dependency_links=[ 'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux', ], install_requires=[ 'boto', 'pynux', 'python-magic', ], packages=['deepharvest'], test_suite='tests' )
Remove this package as a dependency for itself!
Remove this package as a dependency for itself!
Python
bsd-3-clause
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
1d9349255fa29b8c29c7d916a5750a8cd0da8f78
setup.py
setup.py
from setuptools import setup description = """ Full featured redis cache backend for Django. """ setup( name = "django-redis", url = "https://github.com/niwibe/django-redis", author = "Andrei Antoukh", author_email = "[email protected]", version='3.5.2', packages = [ "redis_cache", "redis_cache.client" ], description = description.strip(), install_requires=[ 'redis>=2.7.0', ], zip_safe=False, include_package_data = True, package_data = { '': ['*.html'], }, classifiers = [ "Development Status :: 5 - Production/Stable", "Operating System :: OS Independent", "Environment :: Web Environment", "Framework :: Django", "License :: OSI Approved :: BSD License", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Software Development :: Libraries", "Topic :: Utilities", ], )
from setuptools import setup description = """ Full featured redis cache backend for Django. """ setup( name = "django-redis", url = "https://github.com/niwibe/django-redis", author = "Andrei Antoukh", author_email = "[email protected]", version='3.5.2', packages = [ "redis_cache", "redis_cache.client" ], description = description.strip(), install_requires=[ 'redis>=2.9.0', ], zip_safe=False, include_package_data = True, package_data = { '': ['*.html'], }, classifiers = [ "Development Status :: 5 - Production/Stable", "Operating System :: OS Independent", "Environment :: Web Environment", "Framework :: Django", "License :: OSI Approved :: BSD License", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Software Development :: Libraries", "Topic :: Utilities", ], )
Set redis-py >= 2.9.0 requirement.
Set redis-py >= 2.9.0 requirement.
Python
bsd-3-clause
smahs/django-redis,yanheng/django-redis,lucius-feng/django-redis,zl352773277/django-redis,GetAmbassador/django-redis
9ed88cba879168a7b9ba550668e7f7a617b4e789
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import find_packages, setup from valohai_cli import __version__ setup( name='valohai-cli', version=__version__, entry_points={'console_scripts': ['vh=valohai_cli.cli:cli']}, author='Valohai', author_email='[email protected]', license='MIT', install_requires=[ 'click>=6.0', 'six>=1.10.0', 'valohai-yaml>=0.8', 'requests[security]>=2.0.0', 'requests-toolbelt>=0.7.1', ], packages=find_packages(include=('valohai_cli*',)), )
# -*- coding: utf-8 -*- from setuptools import find_packages, setup from valohai_cli import __version__ setup( name='valohai-cli', version=__version__, entry_points={'console_scripts': ['vh=valohai_cli.cli:cli']}, author='Valohai', author_email='[email protected]', license='MIT', install_requires=[ 'click>=7.0', 'six>=1.10.0', 'valohai-yaml>=0.8', 'requests[security]>=2.0.0', 'requests-toolbelt>=0.7.1', ], packages=find_packages(include=('valohai_cli*',)), )
Correct click version requirement to >= 7.0
Correct click version requirement to >= 7.0 We need the show_envvar kwarg since 1a0f77b33150c02648652e793974f0312a17e7d7 which was added in pallets/click#710 and released in Click 7.0.
Python
mit
valohai/valohai-cli
3a67b514968f0c002f049ce8e34710412ca39904
setup.py
setup.py
from setuptools import setup, find_packages import os scriptnames = ['runElegant.sh', 'sddsprintdata.sh', 'renametolower.sh', 'file2lower.sh', 'lte2json', 'json2lte', 'latticeviewer', 'lv'] def readme(): with open('README.rst') as f: return f.read() requiredpackages = ['pyrpn', 'h5py', 'numpy', 'matplotlib', 'pyepics'] setup( name = "beamline", version = "1.3.6", description = "online model package for electron accelerator", long_description = readme() + '\n\n', author = "Tong Zhang", author_email = "[email protected]", platforms = ["Linux"], license = "MIT", packages = find_packages(), url = "http://archman.github.io/beamline/", scripts = [os.path.join('scripts',sn) for sn in scriptnames], requires = requiredpackages, extras_require = {'sdds': ['sddswhl']}, )
from setuptools import setup, find_packages import os scriptnames = ['runElegant.sh', 'sddsprintdata.sh', 'renametolower.sh', 'file2lower.sh', 'lte2json', 'json2lte', 'latticeviewer', 'lv'] def readme(): with open('README.rst') as f: return f.read() requiredpackages = ['pyrpn', 'h5py', 'numpy', 'matplotlib', 'pyepics', 'wxPython'] setup( name = "beamline", version = "1.3.6", description = "online model package for electron accelerator", long_description = readme() + '\n\n', author = "Tong Zhang", author_email = "[email protected]", platforms = ["Linux"], license = "MIT", packages = find_packages(), url = "http://archman.github.io/beamline/", scripts = [os.path.join('scripts',sn) for sn in scriptnames], requires = requiredpackages, install_requires = requiredpackages, extras_require = {'sdds': ['sddswhl']}, )
Add wxPython and use install_requires
Add wxPython and use install_requires * Add wxPython as a depenency * Use `install_requires` so dependencies are installed if one installs this package
Python
mit
Archman/beamline
7627b8759ab08df562048ec1fa94fe9d69d01374
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict==1.1') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, )
#!/usr/bin/env python from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict>=1.1') try: import importlib except ImportError: required.append('importlib>=1.0.2') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, )
Add importlib if not included
Add importlib if not included
Python
bsd-3-clause
tadpol/exoline,azdle/exoline,asolz/exoline,danslimmon/exoline,tadpol/exoline,asolz/exoline,azdle/exoline,danslimmon/exoline
48f664721fc866871a17b459eb22e5641b311067
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup setup( name='sht', version='1.0', description='A fast spherical harmonic transform implementation', author='Praveen Venkatesh', url='https://github.com/praveenv253/sht', packages=['sht', ], install_requires=['numpy', 'scipy', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], license='MIT', )
#!/usr/bin/env python3 from setuptools import setup setup( name='sht', version='1.1', description='A fast spherical harmonic transform implementation', author='Praveen Venkatesh', url='https://github.com/praveenv253/sht', packages=['sht', ], install_requires=['numpy', 'scipy', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], license='MIT', )
Update version number to 1.1.
Update version number to 1.1.
Python
mit
praveenv253/sht,praveenv253/sht
160ad684262b654ce4f1e6ca2fc97a06f79ec6c6
setup.py
setup.py
# coding: utf-8 import sys from setuptools import setup PY_VERSION = sys.version_info[0], sys.version_info[1] requirements = [ 'requests>=1.0', 'python-dateutil>=2.1', 'six>=1.2.0', ] if PY_VERSION == (2, 6): requirements.append('argparse') setup( name='pyuploadcare', version='1.2.12', description='Python library for Uploadcare.com', long_description=( open('README.rst').read() + '\n\n' + open('HISTORY.rst').read() ), author='Uploadcare LLC', author_email='[email protected]', url='https://github.com/uploadcare/pyuploadcare', packages=['pyuploadcare', 'pyuploadcare.dj'], package_data={ 'pyuploadcare.dj': [ 'static/uploadcare/assets/uploaders/*.js', ] }, entry_points={ 'console_scripts': [ 'ucare = pyuploadcare.ucare_cli:main', ], }, install_requires=requirements, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], )
# coding: utf-8 import sys from setuptools import setup PY_VERSION = sys.version_info[0], sys.version_info[1] requirements = [ 'requests>=1.0', 'python-dateutil>=2.1', 'six>=1.2.0', ] if PY_VERSION == (2, 6): requirements.append('argparse') if PY_VERSION < (3, 0): long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read() else: long_description = open('README.rst', encoding='utf-8').read() + '\n\n' + open('HISTORY.rst', encoding='utf-8').read() setup( name='pyuploadcare', version='1.2.12', description='Python library for Uploadcare.com', long_description=(long_description), author='Uploadcare LLC', author_email='[email protected]', url='https://github.com/uploadcare/pyuploadcare', packages=['pyuploadcare', 'pyuploadcare.dj'], package_data={ 'pyuploadcare.dj': [ 'static/uploadcare/assets/uploaders/*.js', ] }, entry_points={ 'console_scripts': [ 'ucare = pyuploadcare.ucare_cli:main', ], }, install_requires=requirements, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], )
Fix encoding issue when installing with pip3
Fix encoding issue when installing with pip3
Python
mit
uploadcare/pyuploadcare
2901988d46a644c70ba12409c06e0bcb3bfc0eff
onadata/apps/restservice/services/kpi_hook.py
onadata/apps/restservice/services/kpi_hook.py
# coding: utf-8 import logging import re import requests from django.conf import settings from onadata.apps.restservice.RestServiceInterface import RestServiceInterface from onadata.apps.logger.models import Instance class ServiceDefinition(RestServiceInterface): id = 'kpi_hook' verbose_name = 'KPI Hook POST' def send(self, endpoint, data): # Will be used internally by KPI to fetch data with KoBoCatBackend post_data = { 'instance_id': data.get('instance_id') } headers = {'Content-Type': 'application/json'} # Verify if endpoint starts with `/assets/` before sending # the request to KPI pattern = r'{}'.format(settings.KPI_HOOK_ENDPOINT_PATTERN.replace( '{asset_uid}', '[^/]*')) # Match v2 and v1 endpoints. if re.match(pattern, endpoint) or re.match(pattern[7:], endpoint): # Build the url in the service to avoid saving hardcoded # domain name in the DB url = f'{settings.KOBOFORM_INTERNAL_URL}{endpoint}' response = requests.post(url, headers=headers, json=post_data) response.raise_for_status() # Save successful Instance.objects.filter(pk=data.get('instance_id')).update( posted_to_kpi=True ) else: logging.warning( f'This endpoint: `{endpoint}` is not valid for `KPI Hook`' )
# coding: utf-8 import logging import re import requests from django.conf import settings from onadata.apps.restservice.RestServiceInterface import RestServiceInterface from onadata.apps.logger.models import Instance class ServiceDefinition(RestServiceInterface): id = 'kpi_hook' verbose_name = 'KPI Hook POST' def send(self, endpoint, data): # Will be used internally by KPI to fetch data with KoBoCatBackend post_data = { 'submission_id': data.get('instance_id') } headers = {'Content-Type': 'application/json'} # Verify if endpoint starts with `/assets/` before sending # the request to KPI pattern = r'{}'.format(settings.KPI_HOOK_ENDPOINT_PATTERN.replace( '{asset_uid}', '[^/]*')) # Match v2 and v1 endpoints. if re.match(pattern, endpoint) or re.match(pattern[7:], endpoint): # Build the url in the service to avoid saving hardcoded # domain name in the DB url = f'{settings.KOBOFORM_INTERNAL_URL}{endpoint}' response = requests.post(url, headers=headers, json=post_data) response.raise_for_status() # Save successful Instance.objects.filter(pk=data.get('instance_id')).update( posted_to_kpi=True ) else: logging.warning( f'This endpoint: `{endpoint}` is not valid for `KPI Hook`' )
Use "submission_id" instead of "instance_id" parameter to send to KPI for RESTservice
Use "submission_id" instead of "instance_id" parameter to send to KPI for RESTservice
Python
bsd-2-clause
kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat
9f485a55227406c3cfbfb3154ec8d0f2cad8ae67
publisher/build_paper.py
publisher/build_paper.py
#!/usr/bin/env python import docutils.core as dc from writer import writer import os.path import sys import glob preamble = r''' % These preamble commands are from build_paper.py % PDF Standard Fonts \usepackage{mathptmx} \usepackage[scaled=.90]{helvet} \usepackage{courier} % Make verbatim environment smaller \makeatletter \g@addto@macro\@verbatim\footnotesize \makeatother \renewcommand{\quote}{} ''' settings = {'documentclass': 'IEEEtran', 'use_verbatim_when_possible': True, 'use_latex_citations': True, 'latex_preamble': preamble} if len(sys.argv) != 2: print "Usage: build_paper.py paper_directory" sys.exit(-1) path = sys.argv[1] if not os.path.isdir(path): print("Cannot open directory: %s" % path) sys.exit(-1) rst = glob.glob(os.path.join(path, '*.rst'))[0] content = open(rst, 'r').read() content = ''' .. role:: math(raw) :format: latex ''' + content tex = dc.publish_string(source=content, writer=writer, settings_overrides=settings) out = open('/tmp/paper.tex', 'w') out.write(tex) out.close()
#!/usr/bin/env python import docutils.core as dc from writer import writer import os.path import sys import glob preamble = r''' % These preamble commands are from build_paper.py % PDF Standard Fonts \usepackage{mathptmx} \usepackage[scaled=.90]{helvet} \usepackage{courier} % Make verbatim environment smaller \makeatletter \g@addto@macro\@verbatim\footnotesize \makeatother \renewcommand{\quote}{} ''' settings = {'documentclass': 'IEEEtran', 'use_verbatim_when_possible': True, 'use_latex_citations': True, 'latex_preamble': preamble, 'documentoptions': 'letterpaper,compsoc,twoside'} if len(sys.argv) != 2: print "Usage: build_paper.py paper_directory" sys.exit(-1) path = sys.argv[1] if not os.path.isdir(path): print("Cannot open directory: %s" % path) sys.exit(-1) rst = glob.glob(os.path.join(path, '*.rst'))[0] content = open(rst, 'r').read() content = ''' .. role:: math(raw) :format: latex ''' + content tex = dc.publish_string(source=content, writer=writer, settings_overrides=settings) out = open('/tmp/paper.tex', 'w') out.write(tex) out.close()
Use IEEE computer society layout to improve looks.
Use IEEE computer society layout to improve looks.
Python
bsd-2-clause
Stewori/euroscipy_proceedings,helgee/euroscipy_proceedings,juhasch/euroscipy_proceedings,mwcraig/scipy_proceedings,sbenthall/scipy_proceedings,helgee/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,chendaniely/scipy_proceedings,dotsdl/scipy_proceedings,mikaem/euroscipy_proceedings,mjklemm/euroscipy_proceedings,Stewori/euroscipy_proceedings,katyhuff/scipy_proceedings,Stewori/euroscipy_proceedings,katyhuff/scipy_proceedings,mikaem/euroscipy_proceedings,mikaem/euroscipy_proceedings,helgee/euroscipy_proceedings,michaelpacer/scipy_proceedings,katyhuff/scipy_proceedings,michaelpacer/scipy_proceedings,mwcraig/scipy_proceedings,dotsdl/scipy_proceedings,euroscipy/euroscipy_proceedings,juhasch/euroscipy_proceedings,springcoil/euroscipy_proceedings,euroscipy/euroscipy_proceedings,springcoil/euroscipy_proceedings,chendaniely/scipy_proceedings,springcoil/euroscipy_proceedings,juhasch/euroscipy_proceedings,euroscipy/euroscipy_proceedings,sbenthall/scipy_proceedings,michaelpacer/scipy_proceedings,SepidehAlassi/euroscipy_proceedings,dotsdl/scipy_proceedings,mjklemm/euroscipy_proceedings,mwcraig/scipy_proceedings,mjklemm/euroscipy_proceedings,sbenthall/scipy_proceedings,chendaniely/scipy_proceedings
efe15dae9d57fe6e18d722057c1cf48bd855c28e
py2app/recipes/pyside.py
py2app/recipes/pyside.py
import pkg_resources import glob import os def check(cmd, mf): name = 'PySide' m = mf.findNode(name) if m is None or m.filename is None: return None from PySide import QtCore plugin_dir = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.PluginsPath) resources = [pkg_resources.resource_filename('py2app', 'recipes/qt.conf')] for item in cmd.qt_plugins: if '/' not in item: item = item + '/*' if '*' in item: for path in glob.glob(os.path.join(plugin_dir, item)): resources.append((os.path.dirname('qt_plugins' + path[len(plugin_dir):]), [path])) else: resources.append((os.path.dirname(os.path.join('qt_plugins', item)), [os.path.join(plugin_dir, item)])) # PySide dumps some of its shared files # into /usr/lib, which is a system location # and those files are therefore not included # into the app bundle by default. from macholib.util import NOT_SYSTEM_FILES NOT_SYSTEM_FILES import sys for fn in os.listdir('/usr/lib'): add=False if fn.startswith('libpyside-python'): add=True elif fn.startswith('libshiboken-python'): add=True if add: NOT_SYSTEM_FILES.append(os.path.join('/usr/lib', fn)) return dict(resources=resources)
import pkg_resources import glob import os def check(cmd, mf): name = 'PySide' m = mf.findNode(name) if m is None or m.filename is None: return None from PySide import QtCore plugin_dir = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.PluginsPath) resources = [pkg_resources.resource_filename('py2app', 'recipes/qt.conf')] for item in cmd.qt_plugins: if '/' not in item: item = item + '/*' if '*' in item: for path in glob.glob(os.path.join(plugin_dir, item)): resources.append((os.path.dirname('qt_plugins' + path[len(plugin_dir):]), [path])) else: resources.append((os.path.dirname(os.path.join('qt_plugins', item)), [os.path.join(plugin_dir, item)])) # PySide dumps some of its shared files # into /usr/lib, which is a system location # and those files are therefore not included # into the app bundle by default. from macholib.util import NOT_SYSTEM_FILES NOT_SYSTEM_FILES import sys for fn in os.listdir('/usr/lib'): add=False if fn.startswith('libpyside-python'): add=True elif fn.startswith('libshiboken-python'): add=True if add: NOT_SYSTEM_FILES.append(os.path.join('/usr/lib', fn)) return dict(resources=resources)
Fix incorrect indentation messing up PySide
Fix incorrect indentation messing up PySide
Python
mit
metachris/py2app,metachris/py2app,metachris/py2app,metachris/py2app
cb2c937fa16590a7431f450c0fc79cc68dd9984c
readthedocs/cdn/purge.py
readthedocs/cdn/purge.py
import logging from django.conf import settings log = logging.getLogger(__name__) CDN_SERVICE = getattr(settings, 'CDN_SERVICE') CDN_USERNAME = getattr(settings, 'CDN_USERNAME') CDN_KEY = getattr(settings, 'CDN_KEY') CDN_SECET = getattr(settings, 'CDN_SECET') CDN_ID = getattr(settings, 'CDN_ID') def purge(files): log.error("CDN not configured, can't purge files") if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID: if CDN_SERVICE == 'maxcdn': from maxcdn import MaxCDN as cdn_service api = cdn_service(CDN_USERNAME, CDN_KEY, CDN_SECET) def purge(files): return api.purge(CDN_ID, files)
import logging from django.conf import settings log = logging.getLogger(__name__) CDN_SERVICE = getattr(settings, 'CDN_SERVICE') CDN_USERNAME = getattr(settings, 'CDN_USERNAME') CDN_KEY = getattr(settings, 'CDN_KEY') CDN_SECET = getattr(settings, 'CDN_SECET') CDN_ID = getattr(settings, 'CDN_ID') def purge(files): log.error("CDN not configured, can't purge files") if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID: if CDN_SERVICE == 'maxcdn': from maxcdn import MaxCDN api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET) def purge(files): return api.purge(CDN_ID, files)
Clean up bad logic to make it slightly less bad
Clean up bad logic to make it slightly less bad
Python
mit
sid-kap/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,laplaceliu/readthedocs.org,hach-que/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,techtonik/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,mhils/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,davidfischer/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,GovReady/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,GovReady/readthedocs.org,espdev/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,Tazer/readthedocs.org,VishvajitP/readthedocs.org,fujita-shintaro/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,royalwang/readthedocs.org,LukasBoersma/readthedocs.org,wijerasa/readthedocs.org,sid-kap/readthedocs.org,gjtorikian/readthedocs.org,emawind84/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,tddv/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,davidfischer/readthedocs.org,LukasBoersma/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,istresearch/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,VishvajitP/readthedocs.org,stevepiercy/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,gjtorikian/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,rtfd/readthedocs.org,laplaceliu/readthedocs.org,soulshake/readthedocs.org,rtfd/readthedocs.org,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,techtonik/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,atsuyim/readthedocs.org
552afcd33d890d2798b52919c0b4c0d146b7d914
make_ids.py
make_ids.py
#!/usr/bin/env python import csv import json import os import sys def format_entities_as_list(entities): for i, entity in enumerate(entities, 1): yield (unicode(i), json.dumps(entity["terms"])) def generate_entities(fobj): termsets_seen = set() for line in fobj: entity = json.loads(line) termset = tuple(entity["terms"]) if termset not in termsets_seen: termsets_seen.add(termset) yield entity def load_entities_from_file(infile, outfile): if os.path.exists(outfile): raise RuntimeError("Output file %r already exists" % outfile) with open(infile) as in_fobj: with open(outfile, "wb") as out_fobj: writer = csv.writer(out_fobj) for row in format_entities_as_list(generate_entities(in_fobj)): writer.writerow(row) if __name__ == '__main__': load_entities_from_file(sys.argv[1], sys.argv[2])
#!/usr/bin/env python import csv import json import os import sys def format_entities_as_list(entities): """Format entities read from an iterator as lists. :param entities: An iterator yielding entities as dicts: eg {"terms": ["Fred"]} Yield a sequence of entites formatted as lists containing string values. Also allocates identifier numbers. Sequences are formatted as json. eg: ["1", '["Fred"]'] The resulting sequence is ideal for conversion to CSV. """ for i, entity in enumerate(entities, 1): yield (unicode(i), json.dumps(entity["terms"])) def generate_entities(fobj): """Generate entities by reading from a file object. :param fobj: File object to read from. Each line in the file should represent an entity. Yields a sequence of dicts representing entities, where the dicts will contain at the least a "terms" object. """ termsets_seen = set() for line in fobj: entity = json.loads(line) termset = tuple(entity["terms"]) if termset not in termsets_seen: termsets_seen.add(termset) yield entity def convert_entities_from_file_to_csv(infile, outfile): """Convert entities from a file to CSV format. :param infile: The file name to read entities from. Formatted as jsonlines (http://jsonlines.org/) - one line per entity. :param outfile: The file name to write entities to as CSV. """ if os.path.exists(outfile): raise RuntimeError("Output file %r already exists" % outfile) with open(infile) as in_fobj: with open(outfile, "wb") as out_fobj: writer = csv.writer(out_fobj) for row in format_entities_as_list(generate_entities(in_fobj)): writer.writerow(row) if __name__ == '__main__': convert_entities_from_file_to_csv(sys.argv[1], sys.argv[2])
Add docstrings to all functions
Add docstrings to all functions
Python
mit
alphagov/entity-manager
5fc7dccdb61eefed40361385166330e285eab85f
a11y_tests/test_course_enrollment_demographics_axs.py
a11y_tests/test_course_enrollment_demographics_axs.py
from bok_choy.web_app_test import WebAppTest from bok_choy.promise import EmptyPromise from a11y_tests.pages import CourseEnrollmentDemographicsAgePage from a11y_tests.mixins import CoursePageTestsMixin _multiprocess_can_split_ = True class CourseEnrollmentDemographicsAgeTests(CoursePageTestsMixin, WebAppTest): """ A test for the accessibility of the CourseEnrollmentDemographicsAgePage. """ def setUp(self): super(CourseEnrollmentDemographicsAgeTests, self).setUp() self.page = CourseEnrollmentDemographicsAgePage(self.browser) def test_a11y(self): # Log in and navigate to page self.login() self.page.visit() self.page.a11y_audit.config.set_rules({ "ignore": [ 'color-contrast', # TODO: AN-6010, AN-6011 'skip-link', # TODO: AN-6185 'link-href', # TODO: AN-6186 'icon-aria-hidden', # TODO: AN-6187 ], }) # Wait for the datatable to finish loading ready_promise = EmptyPromise( lambda: 'Loading' not in self.q(css='div.section-data-table').text, "Page finished loading" ).fulfill() # Check the page for accessibility errors report = self.page.a11y_audit.check_for_accessibility_errors()
from bok_choy.web_app_test import WebAppTest from bok_choy.promise import EmptyPromise from a11y_tests.pages import CourseEnrollmentDemographicsAgePage from a11y_tests.mixins import CoursePageTestsMixin _multiprocess_can_split_ = True class CourseEnrollmentDemographicsAgeTests(CoursePageTestsMixin, WebAppTest): """ A test for the accessibility of the CourseEnrollmentDemographicsAgePage. """ def setUp(self): super(CourseEnrollmentDemographicsAgeTests, self).setUp() self.page = CourseEnrollmentDemographicsAgePage(self.browser) def test_a11y(self): # Log in and navigate to page self.login() self.page.visit() self.page.a11y_audit.config.set_rules({ "ignore": [ 'color-contrast', # TODO: AN-6010, AN-6011 'skip-link', # TODO: AN-6185 'link-href', # TODO: AN-6186 'icon-aria-hidden', # TODO: AN-6187 ], }) # Wait for the datatable to finish loading ready_promise = EmptyPromise( lambda: 'Loading' not in self.page.q(css='div.section-data-table').text, "Page finished loading" ).fulfill() # Check the page for accessibility errors report = self.page.a11y_audit.check_for_accessibility_errors()
Fix bok-choy page query call
Fix bok-choy page query call
Python
agpl-3.0
edx/edx-analytics-dashboard,edx/edx-analytics-dashboard,Stanford-Online/edx-analytics-dashboard,edx/edx-analytics-dashboard,edx/edx-analytics-dashboard,Stanford-Online/edx-analytics-dashboard,Stanford-Online/edx-analytics-dashboard,Stanford-Online/edx-analytics-dashboard
25a2aeda1b041afbfbd1de09f784e0b7a3732215
IPython/nbconvert/exporters/python.py
IPython/nbconvert/exporters/python.py
""" Python exporter which exports Notebook code into a PY file. """ #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import Unicode from .exporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class PythonExporter(TemplateExporter): """ Exports a Python code file. """ file_extension = Unicode( 'py', config=True, help="Extension of the file that should be written to disk")
""" Python exporter which exports Notebook code into a PY file. """ #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.utils.traitlets import Unicode from .templateexporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class PythonExporter(TemplateExporter): """ Exports a Python code file. """ file_extension = Unicode( 'py', config=True, help="Extension of the file that should be written to disk")
Rebase changes made by hand
Rebase changes made by hand
Python
bsd-3-clause
SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets
4663fdb44628238997ecc5adbb0f0193c99efc6c
script/lib/config.py
script/lib/config.py
#!/usr/bin/env python import platform import sys BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = '26dd65a62e35aa98b25c10cbfc00f1a621fd4c4b' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform] verbose_mode = False def enable_verbose_mode(): print 'Running in verbose mode' global verbose_mode verbose_mode = True def is_verbose_mode(): return verbose_mode
#!/usr/bin/env python import platform import sys BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = 'c01b10faf0d478e48f537210ec263fabd551578d' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform] verbose_mode = False def enable_verbose_mode(): print 'Running in verbose mode' global verbose_mode verbose_mode = True def is_verbose_mode(): return verbose_mode
Update libchromiumcontent to disable zygote process
Update libchromiumcontent to disable zygote process
Python
mit
Jonekee/electron,Faiz7412/electron,cos2004/electron,mubassirhayat/electron,smczk/electron,fomojola/electron,Jonekee/electron,IonicaBizauKitchen/electron,howmuchcomputer/electron,Floato/electron,bruce/electron,joaomoreno/atom-shell,tomashanacek/electron,ervinb/electron,gbn972/electron,Zagorakiss/electron,rhencke/electron,tomashanacek/electron,biblerule/UMCTelnetHub,beni55/electron,astoilkov/electron,deed02392/electron,coderhaoxin/electron,dkfiresky/electron,bruce/electron,shaundunne/electron,oiledCode/electron,systembugtj/electron,brave/muon,lzpfmh/electron,coderhaoxin/electron,Jonekee/electron,miniak/electron,eriser/electron,shaundunne/electron,brenca/electron,howmuchcomputer/electron,cqqccqc/electron,bbondy/electron,RobertJGabriel/electron,pandoraui/electron,anko/electron,destan/electron,eriser/electron,simonfork/electron,kcrt/electron,egoist/electron,jiaz/electron,gamedevsam/electron,twolfson/electron,bwiggs/electron,leethomas/electron,iftekeriba/electron,shockone/electron,adamjgray/electron,edulan/electron,jaanus/electron,michaelchiche/electron,evgenyzinoviev/electron,howmuchcomputer/electron,xiruibing/electron,jannishuebl/electron,hokein/atom-shell,baiwyc119/electron,nicobot/electron,leftstick/electron,preco21/electron,MaxWhere/electron,destan/electron,michaelchiche/electron,jiaz/electron,LadyNaggaga/electron,bitemyapp/electron,aaron-goshine/electron,brave/muon,deed02392/electron,Evercoder/electron,vaginessa/electron,Jacobichou/electron,LadyNaggaga/electron,Andrey-Pavlov/electron,gabriel/electron,gbn972/electron,shennushi/electron,Rokt33r/electron,neutrous/electron,xiruibing/electron,electron/electron,soulteary/electron,setzer777/electron,jtburke/electron,smczk/electron,RIAEvangelist/electron,leethomas/electron,bwiggs/electron,yan-foto/electron,shennushi/electron,yan-foto/electron,jiaz/electron,bbondy/electron,posix4e/electron,ervinb/electron,Neron-X5/electron,michaelchiche/electron,meowlab/electron,aaron-goshine/electron,greyhwndz/electron,fomojola/electron,carsonmcdonald/electron,matiasinsaurralde/electron,MaxGraey/electron,vHanda/electron,noikiy/electron,synaptek/electron,felixrieseberg/electron,mattotodd/electron,medixdev/electron,JussMee15/electron,tinydew4/electron,adamjgray/electron,jacksondc/electron,leolujuyi/electron,rsvip/electron,voidbridge/electron,leethomas/electron,dahal/electron,trankmichael/electron,adamjgray/electron,aaron-goshine/electron,leolujuyi/electron,faizalpribadi/electron,jjz/electron,systembugtj/electron,astoilkov/electron,tinydew4/electron,trankmichael/electron,rreimann/electron,twolfson/electron,aichingm/electron,jsutcodes/electron,davazp/electron,pirafrank/electron,rhencke/electron,kcrt/electron,jaanus/electron,jlhbaseball15/electron,carsonmcdonald/electron,preco21/electron,RobertJGabriel/electron,gabrielPeart/electron,rreimann/electron,cqqccqc/electron,shaundunne/electron,gerhardberger/electron,dkfiresky/electron,yalexx/electron,joneit/electron,fffej/electron,ankitaggarwal011/electron,leolujuyi/electron,bobwol/electron,jlord/electron,dongjoon-hyun/electron,aliib/electron,brenca/electron,dahal/electron,shennushi/electron,Neron-X5/electron,michaelchiche/electron,saronwei/electron,Evercoder/electron,anko/electron,mattotodd/electron,nicholasess/electron,JussMee15/electron,synaptek/electron,chrisswk/electron,pirafrank/electron,Evercoder/electron,stevemao/electron,mhkeller/electron,mjaniszew/electron,egoist/electron,Neron-X5/electron,carsonmcdonald/electron,nekuz0r/electron,brave/muon,bpasero/electron,wolfflow/electron,gerhardberger/electron,icattlecoder/electron,kokdemo/electron,IonicaBizauKitchen/electron,eriser/electron,coderhaoxin/electron,bright-sparks/electron,sircharleswatson/electron,cqqccqc/electron,baiwyc119/electron,thompsonemerson/electron,hokein/atom-shell,Andrey-Pavlov/electron,leethomas/electron,trigrass2/electron,icattlecoder/electron,bbondy/electron,SufianHassan/electron,simonfork/electron,arusakov/electron,jjz/electron,jannishuebl/electron,DivyaKMenon/electron,kostia/electron,pirafrank/electron,tylergibson/electron,bbondy/electron,pirafrank/electron,rhencke/electron,bitemyapp/electron,kazupon/electron,jtburke/electron,digideskio/electron,takashi/electron,joneit/electron,simongregory/electron,anko/electron,astoilkov/electron,Andrey-Pavlov/electron,jsutcodes/electron,hokein/atom-shell,John-Lin/electron,Jonekee/electron,kokdemo/electron,shiftkey/electron,stevemao/electron,timruffles/electron,aecca/electron,MaxWhere/electron,medixdev/electron,nicobot/electron,minggo/electron,posix4e/electron,Ivshti/electron,tonyganch/electron,nicholasess/electron,cqqccqc/electron,aichingm/electron,jaanus/electron,preco21/electron,natgolov/electron,howmuchcomputer/electron,Evercoder/electron,pandoraui/electron,jhen0409/electron,sshiting/electron,minggo/electron,Andrey-Pavlov/electron,tonyganch/electron,John-Lin/electron,Faiz7412/electron,neutrous/electron,miniak/electron,evgenyzinoviev/electron,xiruibing/electron,dongjoon-hyun/electron,greyhwndz/electron,shaundunne/electron,nicholasess/electron,brenca/electron,jjz/electron,Faiz7412/electron,minggo/electron,leftstick/electron,trigrass2/electron,stevemao/electron,hokein/atom-shell,meowlab/electron,tomashanacek/electron,jiaz/electron,webmechanicx/electron,christian-bromann/electron,tincan24/electron,zhakui/electron,tylergibson/electron,thompsonemerson/electron,kcrt/electron,sircharleswatson/electron,setzer777/electron,ianscrivener/electron,aliib/electron,neutrous/electron,arturts/electron,darwin/electron,astoilkov/electron,leftstick/electron,GoooIce/electron,John-Lin/electron,jaanus/electron,bpasero/electron,fabien-d/electron,brave/muon,biblerule/UMCTelnetHub,farmisen/electron,ianscrivener/electron,evgenyzinoviev/electron,farmisen/electron,stevekinney/electron,saronwei/electron,arturts/electron,aaron-goshine/electron,voidbridge/electron,fabien-d/electron,roadev/electron,jonatasfreitasv/electron,rhencke/electron,RIAEvangelist/electron,ervinb/electron,the-ress/electron,wan-qy/electron,ianscrivener/electron,posix4e/electron,natgolov/electron,kcrt/electron,electron/electron,wolfflow/electron,shaundunne/electron,timruffles/electron,mhkeller/electron,tincan24/electron,fritx/electron,chriskdon/electron,noikiy/electron,gerhardberger/electron,biblerule/UMCTelnetHub,joaomoreno/atom-shell,saronwei/electron,gabrielPeart/electron,egoist/electron,brave/electron,systembugtj/electron,bruce/electron,vaginessa/electron,jonatasfreitasv/electron,christian-bromann/electron,cos2004/electron,fffej/electron,anko/electron,aichingm/electron,rreimann/electron,lzpfmh/electron,jhen0409/electron,cos2004/electron,takashi/electron,rhencke/electron,tincan24/electron,brenca/electron,tomashanacek/electron,nekuz0r/electron,chrisswk/electron,brenca/electron,meowlab/electron,chriskdon/electron,gabrielPeart/electron,edulan/electron,yan-foto/electron,davazp/electron,eric-seekas/electron,Faiz7412/electron,benweissmann/electron,chriskdon/electron,robinvandernoord/electron,jacksondc/electron,wan-qy/electron,bobwol/electron,davazp/electron,Floato/electron,MaxGraey/electron,leethomas/electron,JesselJohn/electron,JesselJohn/electron,twolfson/electron,gabriel/electron,christian-bromann/electron,gabrielPeart/electron,gabriel/electron,GoooIce/electron,Rokt33r/electron,thompsonemerson/electron,simonfork/electron,natgolov/electron,webmechanicx/electron,electron/electron,Andrey-Pavlov/electron,coderhaoxin/electron,Gerhut/electron,aichingm/electron,IonicaBizauKitchen/electron,sky7sea/electron,felixrieseberg/electron,xfstudio/electron,etiktin/electron,Gerhut/electron,DivyaKMenon/electron,timruffles/electron,thompsonemerson/electron,gamedevsam/electron,shockone/electron,stevemao/electron,oiledCode/electron,Rokt33r/electron,trankmichael/electron,seanchas116/electron,bitemyapp/electron,Zagorakiss/electron,sircharleswatson/electron,abhishekgahlot/electron,adcentury/electron,brave/electron,subblue/electron,leolujuyi/electron,matiasinsaurralde/electron,LadyNaggaga/electron,bwiggs/electron,fabien-d/electron,neutrous/electron,brave/electron,arusakov/electron,adcentury/electron,sky7sea/electron,pandoraui/electron,minggo/electron,dongjoon-hyun/electron,sky7sea/electron,tincan24/electron,fritx/electron,medixdev/electron,joneit/electron,ianscrivener/electron,mirrh/electron,yalexx/electron,tomashanacek/electron,soulteary/electron,sshiting/electron,leolujuyi/electron,tincan24/electron,baiwyc119/electron,saronwei/electron,coderhaoxin/electron,trankmichael/electron,yalexx/electron,pombredanne/electron,benweissmann/electron,bright-sparks/electron,chriskdon/electron,leftstick/electron,bobwol/electron,jonatasfreitasv/electron,Gerhut/electron,shiftkey/electron,pombredanne/electron,vipulroxx/electron,mrwizard82d1/electron,JesselJohn/electron,tonyganch/electron,mrwizard82d1/electron,jiaz/electron,thomsonreuters/electron,davazp/electron,renaesop/electron,fomojola/electron,preco21/electron,jonatasfreitasv/electron,kostia/electron,aliib/electron,darwin/electron,gbn972/electron,saronwei/electron,mjaniszew/electron,zhakui/electron,mubassirhayat/electron,John-Lin/electron,MaxWhere/electron,joneit/electron,aichingm/electron,bruce/electron,icattlecoder/electron,mhkeller/electron,Floato/electron,aliib/electron,trankmichael/electron,greyhwndz/electron,jlord/electron,bwiggs/electron,deed02392/electron,dongjoon-hyun/electron,fomojola/electron,egoist/electron,trankmichael/electron,shockone/electron,RIAEvangelist/electron,Floato/electron,jlhbaseball15/electron,kenmozi/electron,vaginessa/electron,GoooIce/electron,gerhardberger/electron,vHanda/electron,jlhbaseball15/electron,noikiy/electron,bpasero/electron,etiktin/electron,Ivshti/electron,tinydew4/electron,yalexx/electron,the-ress/electron,GoooIce/electron,subblue/electron,ankitaggarwal011/electron,adamjgray/electron,vipulroxx/electron,hokein/atom-shell,matiasinsaurralde/electron,xfstudio/electron,takashi/electron,brave/electron,ianscrivener/electron,smczk/electron,dahal/electron,shiftkey/electron,leolujuyi/electron,evgenyzinoviev/electron,shiftkey/electron,egoist/electron,kenmozi/electron,systembugtj/electron,simongregory/electron,webmechanicx/electron,d-salas/electron,joneit/electron,fffej/electron,vipulroxx/electron,SufianHassan/electron,SufianHassan/electron,brenca/electron,aecca/electron,tylergibson/electron,leethomas/electron,egoist/electron,jannishuebl/electron,lzpfmh/electron,bobwol/electron,the-ress/electron,Neron-X5/electron,gabrielPeart/electron,ankitaggarwal011/electron,greyhwndz/electron,simongregory/electron,yan-foto/electron,dkfiresky/electron,rreimann/electron,faizalpribadi/electron,smczk/electron,nicholasess/electron,nekuz0r/electron,minggo/electron,sshiting/electron,beni55/electron,trigrass2/electron,mrwizard82d1/electron,Floato/electron,mattdesl/electron,pombredanne/electron,roadev/electron,arusakov/electron,MaxWhere/electron,micalan/electron,mattotodd/electron,jlhbaseball15/electron,bpasero/electron,baiwyc119/electron,chrisswk/electron,baiwyc119/electron,subblue/electron,vaginessa/electron,Zagorakiss/electron,electron/electron,MaxGraey/electron,robinvandernoord/electron,thingsinjars/electron,roadev/electron,joaomoreno/atom-shell,robinvandernoord/electron,arturts/electron,iftekeriba/electron,nicobot/electron,seanchas116/electron,destan/electron,adcentury/electron,synaptek/electron,micalan/electron,tylergibson/electron,kazupon/electron,Jonekee/electron,beni55/electron,jjz/electron,bobwol/electron,brave/electron,rajatsingla28/electron,kazupon/electron,cqqccqc/electron,felixrieseberg/electron,sircharleswatson/electron,nekuz0r/electron,IonicaBizauKitchen/electron,wan-qy/electron,tonyganch/electron,smczk/electron,dongjoon-hyun/electron,jsutcodes/electron,wan-qy/electron,thomsonreuters/electron,GoooIce/electron,stevekinney/electron,biblerule/UMCTelnetHub,jannishuebl/electron,xiruibing/electron,tylergibson/electron,jiaz/electron,joaomoreno/atom-shell,timruffles/electron,kazupon/electron,digideskio/electron,jlord/electron,digideskio/electron,Floato/electron,wan-qy/electron,gamedevsam/electron,mirrh/electron,bwiggs/electron,bright-sparks/electron,IonicaBizauKitchen/electron,jhen0409/electron,sircharleswatson/electron,vHanda/electron,kostia/electron,oiledCode/electron,edulan/electron,arusakov/electron,JussMee15/electron,rajatsingla28/electron,jsutcodes/electron,bright-sparks/electron,faizalpribadi/electron,xfstudio/electron,leftstick/electron,eric-seekas/electron,biblerule/UMCTelnetHub,kenmozi/electron,gamedevsam/electron,carsonmcdonald/electron,MaxGraey/electron,mhkeller/electron,jjz/electron,rreimann/electron,IonicaBizauKitchen/electron,John-Lin/electron,ervinb/electron,meowlab/electron,Gerhut/electron,jtburke/electron,simongregory/electron,miniak/electron,the-ress/electron,astoilkov/electron,setzer777/electron,darwin/electron,aecca/electron,gamedevsam/electron,darwin/electron,takashi/electron,etiktin/electron,Neron-X5/electron,brave/muon,renaesop/electron,mattotodd/electron,jcblw/electron,trigrass2/electron,fireball-x/atom-shell,gabriel/electron,pandoraui/electron,medixdev/electron,lrlna/electron,abhishekgahlot/electron,bitemyapp/electron,iftekeriba/electron,jaanus/electron,eric-seekas/electron,posix4e/electron,xiruibing/electron,simongregory/electron,fireball-x/atom-shell,mrwizard82d1/electron,Jonekee/electron,bright-sparks/electron,thomsonreuters/electron,SufianHassan/electron,leftstick/electron,eric-seekas/electron,aichingm/electron,renaesop/electron,fritx/electron,deed02392/electron,voidbridge/electron,roadev/electron,noikiy/electron,thomsonreuters/electron,chrisswk/electron,pandoraui/electron,RIAEvangelist/electron,mjaniszew/electron,stevekinney/electron,digideskio/electron,takashi/electron,zhakui/electron,Zagorakiss/electron,xfstudio/electron,subblue/electron,aaron-goshine/electron,fffej/electron,pombredanne/electron,cos2004/electron,Gerhut/electron,thingsinjars/electron,gabrielPeart/electron,electron/electron,jtburke/electron,eric-seekas/electron,kokdemo/electron,shennushi/electron,dahal/electron,mrwizard82d1/electron,michaelchiche/electron,pirafrank/electron,roadev/electron,deed02392/electron,icattlecoder/electron,fritx/electron,the-ress/electron,stevemao/electron,kostia/electron,neutrous/electron,darwin/electron,Rokt33r/electron,preco21/electron,miniak/electron,soulteary/electron,howmuchcomputer/electron,LadyNaggaga/electron,webmechanicx/electron,RobertJGabriel/electron,d-salas/electron,Evercoder/electron,simonfork/electron,kokdemo/electron,vipulroxx/electron,carsonmcdonald/electron,d-salas/electron,bwiggs/electron,DivyaKMenon/electron,felixrieseberg/electron,lrlna/electron,edulan/electron,tincan24/electron,mjaniszew/electron,pombredanne/electron,zhakui/electron,fffej/electron,christian-bromann/electron,kazupon/electron,jaanus/electron,ianscrivener/electron,thomsonreuters/electron,sshiting/electron,electron/electron,MaxWhere/electron,robinvandernoord/electron,rreimann/electron,wan-qy/electron,voidbridge/electron,subblue/electron,sky7sea/electron,kenmozi/electron,brave/electron,setzer777/electron,tinydew4/electron,rsvip/electron,kenmozi/electron,synaptek/electron,matiasinsaurralde/electron,oiledCode/electron,voidbridge/electron,gabriel/electron,destan/electron,digideskio/electron,gerhardberger/electron,abhishekgahlot/electron,renaesop/electron,michaelchiche/electron,matiasinsaurralde/electron,rhencke/electron,bitemyapp/electron,kostia/electron,jlhbaseball15/electron,shiftkey/electron,mhkeller/electron,joaomoreno/atom-shell,dkfiresky/electron,gabriel/electron,pirafrank/electron,mattdesl/electron,shaundunne/electron,mubassirhayat/electron,yalexx/electron,cos2004/electron,jhen0409/electron,davazp/electron,fireball-x/atom-shell,GoooIce/electron,mirrh/electron,Faiz7412/electron,vipulroxx/electron,webmechanicx/electron,DivyaKMenon/electron,jsutcodes/electron,lzpfmh/electron,jlord/electron,baiwyc119/electron,mattdesl/electron,nekuz0r/electron,subblue/electron,vipulroxx/electron,nicobot/electron,eriser/electron,mirrh/electron,the-ress/electron,nicholasess/electron,BionicClick/electron,BionicClick/electron,wolfflow/electron,tylergibson/electron,faizalpribadi/electron,pandoraui/electron,Evercoder/electron,RIAEvangelist/electron,adcentury/electron,thomsonreuters/electron,eric-seekas/electron,micalan/electron,eriser/electron,destan/electron,benweissmann/electron,rajatsingla28/electron,RobertJGabriel/electron,farmisen/electron,mhkeller/electron,davazp/electron,Jacobichou/electron,bitemyapp/electron,mirrh/electron,twolfson/electron,jacksondc/electron,oiledCode/electron,benweissmann/electron,roadev/electron,ankitaggarwal011/electron,jsutcodes/electron,kokdemo/electron,thingsinjars/electron,lrlna/electron,vaginessa/electron,farmisen/electron,adcentury/electron,icattlecoder/electron,neutrous/electron,JussMee15/electron,nekuz0r/electron,bpasero/electron,d-salas/electron,shiftkey/electron,brave/muon,soulteary/electron,webmechanicx/electron,Ivshti/electron,thompsonemerson/electron,LadyNaggaga/electron,wolfflow/electron,JesselJohn/electron,trigrass2/electron,jacksondc/electron,vaginessa/electron,vHanda/electron,smczk/electron,vHanda/electron,xiruibing/electron,mattdesl/electron,electron/electron,BionicClick/electron,BionicClick/electron,jacksondc/electron,ankitaggarwal011/electron,RobertJGabriel/electron,chrisswk/electron,adamjgray/electron,jcblw/electron,Ivshti/electron,Jacobichou/electron,JussMee15/electron,cqqccqc/electron,yan-foto/electron,jonatasfreitasv/electron,coderhaoxin/electron,rajatsingla28/electron,bruce/electron,faizalpribadi/electron,adamjgray/electron,simongregory/electron,iftekeriba/electron,kostia/electron,fomojola/electron,mjaniszew/electron,jcblw/electron,bbondy/electron,jlord/electron,abhishekgahlot/electron,BionicClick/electron,jjz/electron,benweissmann/electron,beni55/electron,posix4e/electron,zhakui/electron,jhen0409/electron,jhen0409/electron,aecca/electron,medixdev/electron,bobwol/electron,MaxWhere/electron,bruce/electron,ervinb/electron,meowlab/electron,LadyNaggaga/electron,MaxGraey/electron,pombredanne/electron,joaomoreno/atom-shell,fireball-x/atom-shell,JesselJohn/electron,eriser/electron,gamedevsam/electron,rsvip/electron,arturts/electron,seanchas116/electron,Rokt33r/electron,Jacobichou/electron,Neron-X5/electron,voidbridge/electron,fritx/electron,jtburke/electron,dkfiresky/electron,farmisen/electron,Zagorakiss/electron,rajatsingla28/electron,kokdemo/electron,mirrh/electron,shockone/electron,SufianHassan/electron,noikiy/electron,synaptek/electron,zhakui/electron,anko/electron,aecca/electron,howmuchcomputer/electron,twolfson/electron,thingsinjars/electron,systembugtj/electron,robinvandernoord/electron,saronwei/electron,lrlna/electron,dongjoon-hyun/electron,Andrey-Pavlov/electron,timruffles/electron,lrlna/electron,tomashanacek/electron,greyhwndz/electron,natgolov/electron,minggo/electron,RobertJGabriel/electron,beni55/electron,etiktin/electron,oiledCode/electron,micalan/electron,gerhardberger/electron,sshiting/electron,tonyganch/electron,aecca/electron,gbn972/electron,DivyaKMenon/electron,xfstudio/electron,mjaniszew/electron,thingsinjars/electron,fffej/electron,sshiting/electron,lzpfmh/electron,robinvandernoord/electron,Ivshti/electron,Jacobichou/electron,gbn972/electron,medixdev/electron,kcrt/electron,thingsinjars/electron,dkfiresky/electron,adcentury/electron,anko/electron,icattlecoder/electron,kcrt/electron,tinydew4/electron,natgolov/electron,evgenyzinoviev/electron,benweissmann/electron,abhishekgahlot/electron,miniak/electron,jannishuebl/electron,JesselJohn/electron,destan/electron,Gerhut/electron,Jacobichou/electron,mattdesl/electron,miniak/electron,d-salas/electron,aliib/electron,twolfson/electron,christian-bromann/electron,dahal/electron,edulan/electron,bright-sparks/electron,trigrass2/electron,soulteary/electron,yalexx/electron,jannishuebl/electron,kazupon/electron,fomojola/electron,JussMee15/electron,rajatsingla28/electron,mattotodd/electron,bpasero/electron,chriskdon/electron,DivyaKMenon/electron,micalan/electron,stevemao/electron,mattotodd/electron,shennushi/electron,digideskio/electron,arusakov/electron,tinydew4/electron,aliib/electron,gerhardberger/electron,seanchas116/electron,farmisen/electron,natgolov/electron,SufianHassan/electron,jcblw/electron,simonfork/electron,mattdesl/electron,synaptek/electron,biblerule/UMCTelnetHub,nicobot/electron,iftekeriba/electron,deed02392/electron,arturts/electron,renaesop/electron,greyhwndz/electron,seanchas116/electron,jcblw/electron,joneit/electron,John-Lin/electron,setzer777/electron,the-ress/electron,nicobot/electron,aaron-goshine/electron,renaesop/electron,felixrieseberg/electron,ankitaggarwal011/electron,faizalpribadi/electron,Rokt33r/electron,chriskdon/electron,simonfork/electron,christian-bromann/electron,mubassirhayat/electron,soulteary/electron,fritx/electron,shockone/electron,iftekeriba/electron,jacksondc/electron,arturts/electron,stevekinney/electron,takashi/electron,stevekinney/electron,rsvip/electron,dahal/electron,noikiy/electron,cos2004/electron,abhishekgahlot/electron,bpasero/electron,micalan/electron,BionicClick/electron,fireball-x/atom-shell,lzpfmh/electron,matiasinsaurralde/electron,shennushi/electron,d-salas/electron,shockone/electron,tonyganch/electron,beni55/electron,Zagorakiss/electron,etiktin/electron,mubassirhayat/electron,arusakov/electron,wolfflow/electron,sky7sea/electron,posix4e/electron,mrwizard82d1/electron,thompsonemerson/electron,ervinb/electron,rsvip/electron,fabien-d/electron,sky7sea/electron,edulan/electron,stevekinney/electron,systembugtj/electron,jonatasfreitasv/electron,seanchas116/electron,xfstudio/electron,meowlab/electron,wolfflow/electron,jcblw/electron,setzer777/electron,kenmozi/electron,jtburke/electron,fabien-d/electron,lrlna/electron,carsonmcdonald/electron,etiktin/electron,preco21/electron,vHanda/electron,bbondy/electron,astoilkov/electron,felixrieseberg/electron,sircharleswatson/electron,jlhbaseball15/electron,gbn972/electron,yan-foto/electron,evgenyzinoviev/electron,nicholasess/electron,RIAEvangelist/electron
6869d5edd706d95c8cadbd1945b29fdd3bfecd6b
blaze/datashape/unification.py
blaze/datashape/unification.py
""" Unification is a generalization of Numpy broadcasting. In Numpy we two arrays and broadcast them to yield similar shaped arrays. In Blaze we take two arrays with more complex datashapes and unify the types prescribed by more complicated pattern matching on the types. """ from numpy import promote_types from coretypes import Fixed, Range, TypeVar, Record, \ CType, Enum, top, dynamic class Incommensurable(Exception): def __init__(self, space, dim): self.space = space self.dim = dim def __str__(self): return "No way of unifying (%s) (%s)" % ( self.space, self.dim ) def unify(a, b): """ Unification of Datashapes. """ ta = type(a) tb = type(b) # -- # Unification over BlazeT has two zeros if ta == top or tb == top: return top if ta == dynamic or tb == dynamic: return top # -- if (ta,tb) == (Fixed, Fixed): if a.val == b.val: return Fixed(a.val) else: return Enum(a.val, b.val) # -- if (ta,tb) == (TypeVar, Fixed): return TypeVar('x0') if (ta,tb) == (Fixed, TypeVar): return TypeVar('x0') # -- if (ta,tb) == (Record, Record): c = a.d.items() + b.d.items() return Record(**dict(c)) # -- if (ta,tb) == (Fixed, Range): return Range(min(a.val, b.lower), max(a.val, b.upper)) if (ta,tb) == (Range, Fixed): return Range(min(a.lower, b.val), max(a.val, b.val)) if (ta,tb) == (Range, Range): return Range(min(a.lower, b.lower), max(b.upper, b.upper)) # -- #if (ta,tb) == (Union, Union): #return Union(a.parameters + b.parameters) # -- if (ta,tb) == (CType, CType): return CType.from_str(promote_types(a.name, b.name).name) raise Incommensurable(a,b)
""" Unification is a generalization of Numpy broadcasting. In Numpy we two arrays and broadcast them to yield similar shaped arrays. In Blaze we take two arrays with more complex datashapes and unify the types prescribed by more complicated pattern matching on the types. """ from numpy import promote_types from blaze.datashape.coretypes import TypeVar from blaze.expr.typeinference import infer class Incommensurable(TypeError): pass def unify(sig, concrete=True): """ Unification of Datashapes. """ resolved = infer(sig) if all(not isinstance(a, TypeVar) for a in resolved): return resolved
Remove very old type unifier, for robust one
Remove very old type unifier, for robust one
Python
bsd-2-clause
seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core
e00dc2a5725faeb3b11c6aac0d9ed0be0a55d33f
OIPA/iati/parser/schema_validators.py
OIPA/iati/parser/schema_validators.py
import os import os.path from lxml import etree from common.util import findnth_occurence_in_string def validate(iati_parser, xml_etree): base = os.path.dirname(os.path.abspath(__file__)) location = base + "/../schemas/" + iati_parser.VERSION \ + "/iati-activities-schema.xsd" xsd_data = open(location) xmlschema_doc = etree.parse(xsd_data) xsd_data.close() xmlschema = etree.XMLSchema(xmlschema_doc) xml_errors = None try: xmlschema.assertValid(xml_etree) except etree.DocumentInvalid as xml_errors: pass if xml_errors: for error in xml_errors.error_log: element = error.message[ (findnth_occurence_in_string( error.message, '\'', 0 ) + 1):findnth_occurence_in_string( error.message, '\'', 1 ) ] attribute = '-' if 'attribute' in error.message: attribute = error.message[ (findnth_occurence_in_string( error.message, '\'', 2 ) + 1):findnth_occurence_in_string( error.message, '\'', 3 ) ] iati_parser.append_error( 'XsdValidationError', element, attribute, error.message.split(':')[0], error.line, error.message.split(':')[1], 'unkown for XSD validation errors')
import os import os.path from lxml import etree from common.util import findnth_occurence_in_string def validate(iati_parser, xml_etree): base = os.path.dirname(os.path.abspath(__file__)) location = base + "/../schemas/" + iati_parser.VERSION \ + "/iati-activities-schema.xsd" xsd_data = open(location) xmlschema_doc = etree.parse(xsd_data) xsd_data.close() xmlschema = etree.XMLSchema(xmlschema_doc) xml_errors = None try: xmlschema.assertValid(xml_etree) except etree.DocumentInvalid as e: xml_errors = e pass if xml_errors: for error in xml_errors.error_log: element = error.message[ (findnth_occurence_in_string( error.message, '\'', 0 ) + 1):findnth_occurence_in_string( error.message, '\'', 1 ) ] attribute = '-' if 'attribute' in error.message: attribute = error.message[ (findnth_occurence_in_string( error.message, '\'', 2 ) + 1):findnth_occurence_in_string( error.message, '\'', 3 ) ] iati_parser.append_error( 'XsdValidationError', element, attribute, error.message.split(':')[0], error.line, error.message.split(':')[1], 'unkown for XSD validation errors')
Fix another bug related to logging dataset errors
Fix another bug related to logging dataset errors OIPA-612 / #589
Python
agpl-3.0
openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA
b5e5a18bfb6071189d96f64ba9d86f91fc48fd66
template_utils/templatetags/generic_markup.py
template_utils/templatetags/generic_markup.py
""" Filters for converting plain text to HTML and enhancing the typographic appeal of text on the Web. """ from django.conf import settings from django.template import Library from template_utils.markup import formatter def apply_markup(value, arg=None): """ Applies text-to-HTML conversion. Takes an optional argument to specify the name of a filter to use. """ if arg is not None: return formatter(value, filter_name=arg) return formatter(value) def smartypants(value): """ Applies SmartyPants to a piece of text, applying typographic niceties. Requires the Python SmartyPants library to be installed; see http://web.chad.org/projects/smartypants.py/ """ try: from smartypants import smartyPants except ImportError: if settings.DEBUG: raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported") return value else: return smartyPants(value) register = Library() register.filter(apply_markup)
""" Filters for converting plain text to HTML and enhancing the typographic appeal of text on the Web. """ from django.conf import settings from django.template import Library from template_utils.markup import formatter def apply_markup(value, arg=None): """ Applies text-to-HTML conversion. Takes an optional argument to specify the name of a filter to use. """ if arg is not None: return formatter(value, filter_name=arg) return formatter(value) def smartypants(value): """ Applies SmartyPants to a piece of text, applying typographic niceties. Requires the Python SmartyPants library to be installed; see http://web.chad.org/projects/smartypants.py/ """ try: from smartypants import smartyPants except ImportError: if settings.DEBUG: raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported") return value else: return smartyPants(value) register = Library() register.filter(apply_markup) register.filter(smartypants)
Enable the SmartyPants filter; need to document it later
Enable the SmartyPants filter; need to document it later
Python
bsd-3-clause
dongpoliu/django-template-utils
8c9739572aa679cb6d55cb31737bff6d304db2d1
openstack/tests/functional/network/v2/test_extension.py
openstack/tests/functional/network/v2/test_extension.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from openstack.tests.functional import base class TestExtension(base.BaseFunctionalTest): def test_list_and_find(self): extensions = list(self.conn.network.extensions()) self.assertGreater(len(extensions), 0) for ext in extensions: self.assertIsInstance(ext.name, six.string_types) self.assertIsInstance(ext.alias, six.string_types)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from openstack.tests.functional import base class TestExtension(base.BaseFunctionalTest): def test_list(self): extensions = list(self.conn.network.extensions()) self.assertGreater(len(extensions), 0) for ext in extensions: self.assertIsInstance(ext.name, six.string_types) self.assertIsInstance(ext.alias, six.string_types) def test_find(self): extension = self.conn.network.find_extension('external-net') self.assertEqual('Neutron external network', extension.name)
Add a functional test for find_extension
Add a functional test for find_extension Change-Id: I351a1c1529beb3cae799650e1e57364b3521d00c
Python
apache-2.0
briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk,dudymas/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk
d44a338e704732b9e3e7cb935eb2c9b38d2cfa06
api/drive.py
api/drive.py
# -*- encoding:utf8 -*- import httplib2 from flask import Blueprint, redirect, request, Response, abort from model.oauth import OAuth from model.utils import Utils drive = Blueprint('drive', __name__, url_prefix='/drive') @drive.route("/auth", methods=['GET']) def hookauth(): flow = OAuth().get_flow() if not flow: abort(500) auth_uri = flow.step1_get_authorize_url() return redirect(auth_uri) @drive.route("/callback", methods=['GET']) def callback(): try: code = request.args['code'] except: abort(400) flow = OAuth().get_flow() credentials = flow.step2_exchange(code) http = httplib2.Http() credentials.authorize(http) dic = {"response": "success"} return Response(Utils().dump_json(dic), mimetype='application/json')
# -*- encoding:utf8 -*- import httplib2 from flask import Blueprint, redirect, request, Response, abort from model.cache import Cache from model.oauth import OAuth from model.utils import Utils drive = Blueprint('drive', __name__, url_prefix='/drive') @drive.route("/auth", methods=['GET']) def hookauth(): flow = OAuth().get_flow() if not flow: abort(500) auth_uri = flow.step1_get_authorize_url() return redirect(auth_uri) @drive.route("/callback", methods=['GET']) def callback(): try: code = request.args['code'] except: abort(400) flow = OAuth().get_flow() credentials = flow.step2_exchange(code) http = httplib2.Http() credentials.authorize(http) dic = {"response": "success"} return Response(Utils().dump_json(dic), mimetype='application/json') @drive.route("/webhook", methods=['POST']) def webhook(): document_id = request.json.get('id') if not document_id: abort(400) return Cache().clear(document_id) dic = {"response": "success", "document_id": document_id} return Response(Utils().dump_json(dic), mimetype='application/json')
Introduce cache clear logic through GoogleDrive webhook endpoint.
Introduce cache clear logic through GoogleDrive webhook endpoint.
Python
mit
supistar/Botnyan
e81b920ad19872306d6e18bc9f21c296bb2fd6ab
danceschool/backups/management/commands/backup_now.py
danceschool/backups/management/commands/backup_now.py
from django.core.management.base import BaseCommand from django.core.management import call_command from django.conf import settings from django.utils import timezone import logging import os from danceschool.core.constants import getConstant # Define logger for this file logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Perform a backup of the site database, using configuration options from site settings.' def handle(self, *args, **options): backup_folder = getattr(settings,'BACKUP_LOCATION','/backup') if not os.path.isdir(backup_folder): logger.error( 'Backup failed because destination folder does not exist; ' + 'BACKUP_LOCATION must be updated in project settings.py.' ) return None backup_loc = os.path.join(backup_folder,'%s%s.json' % (getConstant('backups__filePrefix'), timezone.now().strftime('%Y%m%d'))) if not getConstant('backups__enableDataBackups'): logger.info('Aborting backup because backups are not enabled in global settings.') return None logger.info('Beginning JSON backup to file %s.' % backup_loc) with open(backup_loc,'w') as f: try: call_command('dumpdata',indent=1,format='json',natural_foreign=True,stdout=f) logger.info('Backup completed.') except: logger.error('Backup to file %s failed.' % backup_loc)
from django.core.management.base import BaseCommand from django.core.management import call_command from django.conf import settings from django.utils import timezone import logging import os from danceschool.core.constants import getConstant # Define logger for this file logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Perform a backup of the site database, using configuration options from site settings.' def handle(self, *args, **options): backup_folder = getattr(settings,'BACKUP_LOCATION','/backup') if not os.path.isdir(backup_folder): logger.error( 'Backup failed because destination folder does not exist; ' + 'BACKUP_LOCATION must be updated in project settings.py.' ) return None backup_loc = os.path.join(backup_folder,'%s%s.json' % (getConstant('backups__filePrefix'), timezone.now().strftime('%Y%m%d%H%M%S'))) if not getConstant('backups__enableDataBackups'): logger.info('Aborting backup because backups are not enabled in global settings.') return None logger.info('Beginning JSON backup to file %s.' % backup_loc) with open(backup_loc,'w') as f: try: call_command('dumpdata',indent=1,format='json',natural_foreign=True,stdout=f) logger.info('Backup completed.') except: logger.error('Backup to file %s failed.' % backup_loc)
Change timestamp format (important for hourly backups).
Change timestamp format (important for hourly backups).
Python
bsd-3-clause
django-danceschool/django-danceschool,django-danceschool/django-danceschool,django-danceschool/django-danceschool
c1d6e066ea622cc3fa7cec33cb77aa12e43a6519
avocado/exporters/_html.py
avocado/exporters/_html.py
from django.template import Context from django.template.loader import get_template from _base import BaseExporter class HTMLExporter(BaseExporter): preferred_formats = ('html', 'string') def write(self, iterable, buff=None, template=None): if not buff and not template: raise Exception('Either a file-like object or template must be supplied') generator = self.read(iterable) if buff: for row in generator: buff.write(row) return buff context = Context({'rows': generator}) if isinstance(template, basestring): template = get_template(template) return template.render(context)
from django.template import Context from django.template.loader import get_template from _base import BaseExporter class HTMLExporter(BaseExporter): preferred_formats = ('html', 'string') def write(self, iterable, buff=None, template=None): if not buff and not template: raise Exception('Either a file-like object or template must be supplied') generator = self.read(iterable) if buff: for row in generator: for item in row: buff.write(item) return buff context = Context({'rows': generator}) if isinstance(template, basestring): template = get_template(template) return template.render(context)
Fix missing row iteration in HTMLExporter
Fix missing row iteration in HTMLExporter
Python
bsd-2-clause
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
323167f22c3176366cf2f90ce2ec314ee2c49c8f
moa/factory_registers.py
moa/factory_registers.py
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
from kivy.factory import Factory r = Factory.register r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') # --------------------- devices ----------------------------- r('Device', module='moa.device.__init__') r('DigitalChannel', module='moa.device.digital') r('DigitalPort', module='moa.device.digital') r('ButtonChannel', module='moa.device.digital') r('ButtonPort', module='moa.device.digital') r('AnalogChannel', module='moa.device.analog') r('AnalogPort', module='moa.device.analog') r('NumericPropertyChannel', module='moa.device.analog') r('NumericPropertyPort', module='moa.device.analog') # ---------------------- stages -------------------------------- r('MoaStage', module='moa.stage.__init__') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('DigitalGateStage', module='moa.stage.gate') r('AnalogGateStage', module='moa.stage.gate')
Use __init__ for factory imports.
Use __init__ for factory imports.
Python
mit
matham/moa
a9be23f6e3b45b766b770b60e3a2a318e6fd7e71
tests/script/test_no_silent_add_and_commit.py
tests/script/test_no_silent_add_and_commit.py
import pytest pytestmark = pytest.mark.slow version_file_content = """ major = 0 minor = 2 patch = 0 """ config_file_content = """ __config_version__ = 1 GLOBALS = { 'serializer': '{{major}}.{{minor}}.{{patch}}', } FILES = ["VERSION"] VERSION = ['major', 'minor', 'patch'] VCS = { 'name': 'git', } """ def test_update_major(test_environment): test_environment.ensure_file_is_present("VERSION", "0.2.0") test_environment.ensure_file_is_present( "punch_version.py", version_file_content ) test_environment.ensure_file_is_present( "punch_config.py", config_file_content ) test_environment.output(["git", "init"]) test_environment.output(["git", "add", "punch_config.py"]) test_environment.output(["git", "commit", "-m", "some message"]) test_environment.ensure_file_is_present("untracked_file") test_environment.call(["punch", "--part", "minor"]) out = test_environment.output( ["git", "ls-tree", "-r", "master", "--name-only"] ) assert "untracked_file" not in out
import pytest pytestmark = pytest.mark.slow version_file_content = """ major = 0 minor = 2 patch = 0 """ config_file_content = """ __config_version__ = 1 GLOBALS = { 'serializer': '{{major}}.{{minor}}.{{patch}}', } FILES = ["VERSION"] VERSION = ['major', 'minor', 'patch'] VCS = { 'name': 'git', } """ def test_check_no_silent_addition_happens(test_environment): test_environment.ensure_file_is_present("VERSION", "0.2.0") test_environment.ensure_file_is_present( "punch_version.py", version_file_content ) test_environment.ensure_file_is_present( "punch_config.py", config_file_content ) test_environment.output(["git", "init"]) test_environment.output(["git", "add", "punch_config.py"]) test_environment.output(["git", "commit", "-m", "some message"]) test_environment.ensure_file_is_present("untracked_file") test_environment.call(["punch", "--part", "minor"]) out = test_environment.output( ["git", "ls-tree", "-r", "master", "--name-only"] ) assert "untracked_file" not in out
Test name changed to reflect behaviour
Test name changed to reflect behaviour
Python
isc
lgiordani/punch
c958615b7dd6548418117046e6ca06b657465ee5
benchmarker/modules/problems/cnn2d_toy/pytorch.py
benchmarker/modules/problems/cnn2d_toy/pytorch.py
import torch import torch.nn as nn import torch.nn.functional as F from ..helpers_torch import Net4Both class Net(nn.Module): def __init__(self): super().__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=2) self.conv2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=2) # TODO: make sure we check cnt_classes self.dense1 = nn.Linear(1577088, 2) def __call__(self, x): h = x h = self.conv1(h) h = F.relu(h) h = self.conv2(h) h = F.relu(h) h = torch.flatten(h, 1) h = self.dense1(h) return h def get_kernel(params, unparsed_args=None): net = Net() return Net4Both(params, net, lambda t1: F.softmax(t1, dim=1))
import torch import torch.nn as nn import torch.nn.functional as F from ..helpers_torch import Net4Both class Net(nn.Module): def __init__(self): super().__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=2) self.conv2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=2) # TODO: make sure we check cnt_classes self.dense1 = nn.Linear(1577088, 2) def __call__(self, x): h = x h = self.conv1(h) h = F.relu(h) h = self.conv2(h) h = F.relu(h) h = torch.flatten(h, 1) h = self.dense1(h) return h def get_kernel(params, unparsed_args=None): net = Net() return Net4Both(params, net, lambda t1: F.softmax(t1, dim=-1))
Fix softmax dim (I hope!!!)
Fix softmax dim (I hope!!!)
Python
mpl-2.0
undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker
d98b891d882ca916984586631b5ba09c52652a74
app/__init__.py
app/__init__.py
from flask import Flask from flask.ext.bower import Bower from flask.ext.pymongo import PyMongo from config import Config app = Flask(__name__) app.config.from_object(Config) # Register bower Bower(app) # Create mongodb client mongo = PyMongo(app) from .report.views import index, report
from flask import Flask from flask_bower import Bower from flask_pymongo import PyMongo from config import Config app = Flask(__name__) app.config.from_object(Config) # Register bower Bower(app) # Create mongodb client mongo = PyMongo(app) from .report.views import index, report
Resolve the deprecated flask ext imports
Resolve the deprecated flask ext imports
Python
mit
mingrammer/pyreportcard,mingrammer/pyreportcard
8ebec493b086525d23bbe4110c9d277c9b9b8301
src/sentry/tsdb/dummy.py
src/sentry/tsdb/dummy.py
""" sentry.tsdb.dummy ~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from sentry.tsdb.base import BaseTSDB class DummyTSDB(BaseTSDB): """ A no-op time-series storage. """ def incr(self, model, key, timestamp=None, count=1): pass def get_range(self, model, keys, start, end, rollup=None): return dict((k, []) for k in keys)
""" sentry.tsdb.dummy ~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from sentry.tsdb.base import BaseTSDB class DummyTSDB(BaseTSDB): """ A no-op time-series storage. """ def incr(self, model, key, timestamp=None, count=1): pass def get_range(self, model, keys, start, end, rollup=None): return dict((k, []) for k in keys) def record(self, model, key, values, timestamp=None): pass def get_distinct_counts_series(self, model, keys, start, end=None, rollup=None): return {k: [] for k in keys} def get_distinct_counts_totals(self, model, keys, start, end=None, rollup=None): return {k: 0 for k in keys}
Add support for DummyTSDB backend.
Add support for DummyTSDB backend.
Python
bsd-3-clause
daevaorn/sentry,gencer/sentry,mvaled/sentry,BuildingLink/sentry,daevaorn/sentry,beeftornado/sentry,jean/sentry,JackDanger/sentry,JamesMura/sentry,zenefits/sentry,jean/sentry,jean/sentry,ifduyue/sentry,mvaled/sentry,gencer/sentry,BayanGroup/sentry,imankulov/sentry,nicholasserra/sentry,JamesMura/sentry,beeftornado/sentry,fotinakis/sentry,alexm92/sentry,mvaled/sentry,mitsuhiko/sentry,alexm92/sentry,looker/sentry,ifduyue/sentry,BayanGroup/sentry,zenefits/sentry,BuildingLink/sentry,zenefits/sentry,gencer/sentry,looker/sentry,BuildingLink/sentry,imankulov/sentry,mvaled/sentry,nicholasserra/sentry,JamesMura/sentry,looker/sentry,JackDanger/sentry,ifduyue/sentry,JamesMura/sentry,mitsuhiko/sentry,looker/sentry,BayanGroup/sentry,imankulov/sentry,zenefits/sentry,jean/sentry,fotinakis/sentry,gencer/sentry,jean/sentry,zenefits/sentry,BuildingLink/sentry,fotinakis/sentry,fotinakis/sentry,daevaorn/sentry,JackDanger/sentry,daevaorn/sentry,looker/sentry,nicholasserra/sentry,BuildingLink/sentry,mvaled/sentry,ifduyue/sentry,alexm92/sentry,ifduyue/sentry,gencer/sentry,JamesMura/sentry,mvaled/sentry,beeftornado/sentry
0498778db28fd2e2272b48fb84a99eece7b662ff
autocorrect.py
autocorrect.py
# Open list of correcly-spelled words. wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a == b)) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 print(*listOfWords)
# Open list of correcly-spelled words. wordFile = open("words.txt") threshold = 8 listOfWords = input().split() index = 0 # Compute Levenshtein distance def lev(a, b): if min(len(a), len(b)) == 0: return max(len(a), len(b)) elif len(a) == len(b): # Use Hamming Distance (special case) return sum(x != y for x, y in zip(a, b)) else: return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1, lev(a[:-1], b[:-1]) + int(not a[-1] == b[-1])) for x in listOfWords: replacement = (x, threshold + 1) for word in wordFile: x = x.lower() word = word[:-1].lower() if x == word: replacement = (x, 0) break # Some words may actually be spelled correctly! d = lev(x, word) if (d < threshold) and (replacement[1] > d): replacement = (word, d) listOfWords[index] = replacement[0] index += 1 wordFile.seek(0) print(*listOfWords)
Use Hamming distance for efficiency
Use Hamming distance for efficiency Hamming distance is faster when strings are of same length (Hamming is a special case of Levenshtein).
Python
mit
jmanuel1/spellingbee
76c44154ca1bc2eeb4e24cc820338c36960b1b5c
caniuse/test/test_caniuse.py
caniuse/test/test_caniuse.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest from caniuse.main import check def test_package_name_has_been_used(): assert 'Sorry' in check('requests') assert 'Sorry' in check('flask') assert 'Sorry' in check('pip') def test_package_name_has_not_been_used(): assert 'Congratulation' in check('this_package_name_has_not_been_used') assert 'Congratulation' in check('you_will_never_use_this_package_name') assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import pytest from click.testing import CliRunner from caniuse.main import check from caniuse.cli import cli class TestAPI(): def test_package_name_has_been_used(self): assert 'Sorry' in check('requests') assert 'Sorry' in check('flask') assert 'Sorry' in check('pip') def test_package_name_has_not_been_used(self): assert 'Congratulation' in check('this_package_name_has_not_been_used') assert 'Congratulation' in \ check('you_will_never_use_this_package_name') assert 'Congratulation' in \ check('I_suck_and_my_tests_are_order_dependent') class TestCLI(): def test_package_name_has_been_used(self): runner = CliRunner() result_one = runner.invoke(cli, ['requests']) assert 'Sorry' in result_one.output result_two = runner.invoke(cli, ['flask']) assert 'Sorry' in result_two.output result_three = runner.invoke(cli, ['pip']) assert 'Sorry' in result_three.output def test_package_name_has_not_been_used(self): runner = CliRunner() result_one = runner.invoke( cli, ['this_package_name_has_not_been_used']) assert 'Congratulation' in result_one.output result_two = runner.invoke( cli, ['you_will_never_use_this_package_name']) assert 'Congratulation' in result_two.output result_three = runner.invoke( cli, ['I_suck_and_my_tests_are_order_dependent']) assert 'Congratulation' in result_three.output
Add tests for cli.py to improve code coverage
Add tests for cli.py to improve code coverage
Python
mit
lord63/caniuse
429bd22a98895252dfb993d770c9b3060fef0fe3
tests/runalldoctests.py
tests/runalldoctests.py
import doctest import glob import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass testfiles = glob.glob('*.txt') for file in testfiles: doctest.testfile(file)
import doctest import getopt import glob import sys import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass def run(pattern): if pattern is None: testfiles = glob.glob('*.txt') else: testfiles = glob.glob(pattern) for file in testfiles: doctest.testfile(file) if __name__ == "__main__": try: opts, args = getopt.getopt(sys.argv[1:], "t:v") except getopt.GetoptError: print "Usage: python runalldoctests.py [-t GLOB_PATTERN]" sys.exit(2) pattern = None for o, a in opts: if o == '-t': pattern = a run(pattern)
Add option to pick single test file from the runner
Add option to pick single test file from the runner
Python
bsd-3-clause
datagovuk/OWSLib,kwilcox/OWSLib,QuLogic/OWSLib,KeyproOy/OWSLib,tomkralidis/OWSLib,menegon/OWSLib,datagovuk/OWSLib,datagovuk/OWSLib,dblodgett-usgs/OWSLib,ocefpaf/OWSLib,mbertrand/OWSLib,gfusca/OWSLib,jaygoldfinch/OWSLib,daf/OWSLib,JuergenWeichand/OWSLib,bird-house/OWSLib,geographika/OWSLib,kalxas/OWSLib,Jenselme/OWSLib,robmcmullen/OWSLib,geopython/OWSLib,jachym/OWSLib,daf/OWSLib,daf/OWSLib,b-cube/OWSLib,jaygoldfinch/OWSLib
459546a9cedb8e9cf3bee67edb4a76d37874f03b
tests/test_athletics.py
tests/test_athletics.py
from nose.tools import ok_, eq_ from pennathletics.athletes import get_roster, get_player class TestAthletics(): def test_roster(self): ok_(get_roster("m-baskbl", 2015) != []) def test_player_empty(self): ok_(get_player("m-baskbl", 2014) != []) def test_player_number(self): eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"")
from nose.tools import ok_, eq_ from pennathletics.athletes import get_roster, get_player class TestAthletics(): def test_roster(self): ok_(get_roster("m-baskbl", 2015) != []) def test_player_empty(self): ok_(get_player("m-baskbl", 2014) != []) def test_player_number(self): eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"") def test_player_hometown(self): player = get_player("m-baskbl", 2012, homeTown="Belfast, Ireland")[0] eq_(player.weight, '210 lbs') def test_player_softball(self): # 19 players on the 2013 softball team eq_(len(get_roster("w-softbl", 2013)), 19)
Add a few more tests for variety
Add a few more tests for variety
Python
mit
pennlabs/pennathletics
921225181fc1d0242d61226c7b10663ddba1a1a2
indra/tests/test_rlimsp.py
indra/tests/test_rlimsp.py
from indra.sources import rlimsp def test_simple_usage(): stmts = rlimsp.process_pmc('PMC3717945')
from indra.sources import rlimsp def test_simple_usage(): rp = rlimsp.process_pmc('PMC3717945') stmts = rp.statements assert len(stmts) == 6, len(stmts) def test_ungrounded_usage(): rp = rlimsp.process_pmc('PMC3717945', with_grounding=False) assert len(rp.statements)
Update test and add test for ungrounded endpoint.
Update test and add test for ungrounded endpoint.
Python
bsd-2-clause
johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,bgyori/indra,pvtodorov/indra
c461c57a90804558a30f3980b2608497a43c06a7
nipy/testing/__init__.py
nipy/testing/__init__.py
"""The testing directory contains a small set of imaging files to be used for doctests only. More thorough tests and example data will be stored in a nipy-data-suite to be created later and downloaded separately. Examples -------- >>> from nipy.testing import funcfile >>> from nipy.io.api import load_image >>> img = load_image(funcfile) >>> img.shape (17, 21, 3, 20) """ import os #__all__ = ['funcfile', 'anatfile'] # Discover directory path filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) funcfile = os.path.join(basedir, 'functional.nii.gz') anatfile = os.path.join(basedir, 'anatomical.nii.gz') from numpy.testing import * import decorators as dec from nose.tools import assert_true, assert_false
"""The testing directory contains a small set of imaging files to be used for doctests only. More thorough tests and example data will be stored in a nipy data packages that you can download separately - see :mod:`nipy.utils.data` .. note: We use the ``nose`` testing framework for tests. Nose is a dependency for the tests, but should not be a dependency for running the algorithms in the NIPY library. This file should import without nose being present on the python path. Examples -------- >>> from nipy.testing import funcfile >>> from nipy.io.api import load_image >>> img = load_image(funcfile) >>> img.shape (17, 21, 3, 20) """ import os #__all__ = ['funcfile', 'anatfile'] # Discover directory path filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) funcfile = os.path.join(basedir, 'functional.nii.gz') anatfile = os.path.join(basedir, 'anatomical.nii.gz') from numpy.testing import * import decorators as dec # Allow failed import of nose if not now running tests try: from nose.tools import assert_true, assert_false except ImportError: pass
Allow failed nose import without breaking nipy import
Allow failed nose import without breaking nipy import
Python
bsd-3-clause
bthirion/nipy,nipy/nipy-labs,alexis-roche/register,arokem/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/niseg,alexis-roche/nipy,alexis-roche/register,nipy/nipy-labs,nipy/nireg,nipy/nireg,alexis-roche/register,alexis-roche/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nireg,bthirion/nipy,arokem/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nipy,alexis-roche/nipy
04fbd65f90a3ce821fed76377ce7858ae0dd56ee
masters/master.chromium.webrtc/master_builders_cfg.py
masters/master.chromium.webrtc/master_builders_cfg.py
# Copyright (c) 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.changes.filter import ChangeFilter from buildbot.schedulers.basic import SingleBranchScheduler from master.factory import annotator_factory m_annotator = annotator_factory.AnnotatorFactory() def Update(c): c['schedulers'].append( SingleBranchScheduler(name='chromium_scheduler', change_filter=ChangeFilter(project='chromium', branch='master'), treeStableTimer=60, builderNames=[ 'Win Builder', 'Mac Builder', 'Linux Builder', ]), ) specs = [ {'name': 'Win Builder', 'category': 'win'}, {'name': 'WinXP Tester', 'category': 'win'}, {'name': 'Win7 Tester', 'category': 'win'}, {'name': 'Win8 Tester', 'category': 'win'}, {'name': 'Win10 Tester', 'category': 'win'}, {'name': 'Mac Builder', 'category': 'mac'}, {'name': 'Mac Tester', 'category': 'mac'}, {'name': 'Linux Builder', 'recipe': 'chromium', 'category': 'linux'}, {'name': 'Linux Tester', 'recipe': 'chromium', 'category': 'linux'}, ] c['builders'].extend([ { 'name': spec['name'], 'factory': m_annotator.BaseFactory(spec.get('recipe', 'webrtc/chromium')), 'category': spec['category'], 'notify_on_missing': True, } for spec in specs ])
# Copyright (c) 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.changes.filter import ChangeFilter from buildbot.schedulers.basic import SingleBranchScheduler from master.factory import annotator_factory m_annotator = annotator_factory.AnnotatorFactory() def Update(c): c['schedulers'].append( SingleBranchScheduler(name='chromium_scheduler', change_filter=ChangeFilter(project='chromium', branch='master'), treeStableTimer=60, builderNames=[ 'Win Builder', 'Mac Builder', 'Linux Builder', ]), ) specs = [ {'name': 'Win Builder', 'category': 'win'}, {'name': 'WinXP Tester', 'category': 'win'}, {'name': 'Win7 Tester', 'category': 'win'}, {'name': 'Win8 Tester', 'category': 'win'}, {'name': 'Win10 Tester', 'category': 'win'}, {'name': 'Mac Builder', 'category': 'mac'}, {'name': 'Mac Tester', 'category': 'mac'}, {'name': 'Linux Builder', 'category': 'linux'}, {'name': 'Linux Tester', 'category': 'linux'}, ] c['builders'].extend([ { 'name': spec['name'], 'factory': m_annotator.BaseFactory('chromium'), 'category': spec['category'], 'notify_on_missing': True, } for spec in specs ])
Switch remaining chromium.webrtc builders to chromium recipe.
WebRTC: Switch remaining chromium.webrtc builders to chromium recipe. Linux was switched in https://codereview.chromium.org/1508933002/ This switches the rest over to the chromium recipe. BUG=538259 [email protected] Review URL: https://codereview.chromium.org/1510853002 . git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@297886 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
45e86667311f4c9b79d90a3f86e71ffc072b1219
oneflow/landing/admin.py
oneflow/landing/admin.py
# -*- coding: utf-8 -*- from django.contrib import admin from django.conf import settings from .models import LandingContent TRUNCATE_LENGTH = 50 content_fields_names = tuple(('content_' + code) for code, lang in settings.LANGUAGES) content_fields_displays = tuple((field + '_display') for field in content_fields_names) class LandingContentAdmin(admin.ModelAdmin): list_display = ('name', ) + content_fields_displays #list_display_links = ('name') #list_filter = (HasTranslationFilter(lang) # for lang, lang_name in settings.LANGUAGES) ordering = ('name',) search_fields = ('name', ) + content_fields_names def truncated(cls, field_name): def wrapped(self, obj): value = getattr(obj, field_name) return value[:TRUNCATE_LENGTH] + (value[TRUNCATE_LENGTH:] and u'…') wrapped.short_description = cls._meta.get_field_by_name( field_name)[0].verbose_name wrapped.admin_order_field = field_name return wrapped for attr, attr_name in zip(content_fields_names, content_fields_displays): setattr(LandingContentAdmin, attr_name, truncated(LandingContent, attr)) admin.site.register(LandingContent, LandingContentAdmin)
# -*- coding: utf-8 -*- from django.contrib import admin from django.conf import settings from .models import LandingContent from sparks.django.admin import truncate_field content_fields_names = tuple(('content_' + code) for code, lang in settings.LANGUAGES) content_fields_displays = tuple((field + '_display') for field in content_fields_names) class LandingContentAdmin(admin.ModelAdmin): # #list_display_links = ('name') #list_filter = (HasTranslationFilter(lang) # for lang, lang_name in settings.LANGUAGES) # list_display = ('name', ) + content_fields_displays ordering = ('name',) search_fields = ('name', ) + content_fields_names for attr, attr_name in zip(content_fields_names, content_fields_displays): setattr(LandingContentAdmin, attr_name, truncate_field(LandingContent, attr)) admin.site.register(LandingContent, LandingContentAdmin)
Move the `truncate_field` pseudo-decorator to sparks (which just released 1.17).
Move the `truncate_field` pseudo-decorator to sparks (which just released 1.17).
Python
agpl-3.0
WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow
b7e657134c21b62e78453b11f0745e0048e346bf
examples/simple_distribution.py
examples/simple_distribution.py
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver start_time = time.time() distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') elapsed_time = time.time() - start_time # Output print(output) if __name__ == '__main__': main()
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. users = ['user1', 'user2'] tasks = ['task1', 'task2'] preferences = [ [1, 2], [2, 1], ] # Run solver distributor = FairDistributor(users, tasks, preferences) output = distributor.distribute(output='problem.lp') # Output print(output) if __name__ == '__main__': main()
Remove time metrics from the simple example
Remove time metrics from the simple example
Python
mit
Hackathonners/vania
a6e2c0fc837b17321e2979cb12ba2d0e69603eac
orderedmodel/__init__.py
orderedmodel/__init__.py
__all__ = ['OrderedModel', 'OrderedModelAdmin'] from models import OrderedModel from admin import OrderedModelAdmin
from .models import OrderedModel from .admin import OrderedModelAdmin __all__ = ['OrderedModel', 'OrderedModelAdmin'] try: from django.conf import settings except ImportError: pass else: if 'mptt' in settings.INSTALLED_APPS: from .mptt_models import OrderableMPTTModel from .mptt_admin import OrderedMPTTModelAdmin __all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
Python
bsd-3-clause
MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel
163cfea2a0c5e7d96dd870aa540c95a2ffa139f9
appstats/filters.py
appstats/filters.py
# encoding: utf-8 import json def json_filter(value): return json.dumps(value) def count_filter(value): if value is None: return "" count = float(value) base = 1000 prefixes = [ ('K'), ('M'), ('G'), ('T'), ('P'), ('E'), ('Z'), ('Y') ] if count < base: return '%.1f' % count else: for i, prefix in enumerate(prefixes): unit = base ** (i + 2) if count < unit: return '%.1f %s' % ((base * count / unit), prefix) return '%.1f %s' % ((base * count / unit), prefix) def time_filter(value): if value is None: return "" time = float(value) # Transform secs into ms time = value * 1000 if time < 1000: return '%.1f ms' % time else: time /= 1000 if time < 60: return '%.1f s' % time else: time /= 60 if time < 60: return '%.1f m' % time else: time /= 60 if time < 24: return '%.1f h' % time else: time /= 24 return'%.1f d' % time def default_filter(value): if value is None: return "" return value
# encoding: utf-8 import json def json_filter(value): return json.dumps(value) def count_filter(value): if value is None: return "" count = float(value) base = 1000 prefixes = [ ('K'), ('M'), ('G'), ('T'), ('P'), ('E'), ('Z'), ('Y') ] if count < base: return '%.1f' % count else: for i, prefix in enumerate(prefixes): unit = base ** (i + 2) if count < unit: return '%.1f %s' % ((base * count / unit), prefix) return '%.1f %s' % ((base * count / unit), prefix) def time_filter(value): if value is None: return "" # Transform secs into ms time = float(value) * 1000 if time < 1000: return '%.1f ms' % time else: time /= 1000 if time < 60: return '%.1f s' % time else: time /= 60 if time < 60: return '%.1f m' % time else: time /= 60 if time < 24: return '%.1f h' % time else: time /= 24 return'%.1f d' % time def default_filter(value): if value is None: return "" return value
Join two lines in one
Join two lines in one
Python
mit
uvNikita/appstats,uvNikita/appstats,uvNikita/appstats
fc94d60066692e6e8dc496bb854039bb66af3311
scout.py
scout.py
# Python does not require explicit interfaces, # but I believe that code which does is more # maintainable. Thus I include this explicit # interface for Problems. class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" def search(problem): print "Searching..." if (__name__ == '__main__'): problem = Problem(); search(problem)
# Python does not require explicit interfaces, # but I believe that code which does is more # maintainable. Thus I include this explicit # interface for Problems. class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" class SquareProblem(Problem): def __init__(self, size): self.size = size def getStartState(self): return (0, 0) def getEndState(self): return (self.size, self.size) def isValidState(self, state): return 0 <= state[0] <= self.size and 0 <= state[1] <= self.size def getSuccessors(self, state): return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]] def getStringRepr(self, state): return "(%d, %d)" % state def search(problem): print "Searching..." if (__name__ == '__main__'): problem = SquareProblem(2); search(problem)
Add a simple problem for testing
Add a simple problem for testing
Python
mit
SpexGuy/Scout
928290ac4659c5da387b6bd511818b31535eb09e
setup.py
setup.py
# coding=utf-8 from setuptools import setup, find_packages long_description = open('README.md').read() VERSION = "0.1.10" setup( name="PyTrustNFe", version=VERSION, author="Danimar Ribeiro", author_email='[email protected]', keywords=['nfe', 'mdf-e'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Lesser General Public License v2 or \ later (LGPLv2+)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], packages=find_packages(exclude=['*test*']), package_data={'pytrustnfe': ['nfe/templates/*xml', 'nfse/paulistana/templates/*xml']}, url='https://github.com/danimaribeiro/PyTrustNFe', license='LGPL-v2.1+', description='PyTrustNFe Γ© uma biblioteca para envio de NF-e', long_description=long_description, install_requires=[ 'Jinja2 >= 2.8', 'signxml >= 2.0.0', ], test_suite='nose.collector', tests_require=[ 'nose', 'mock', ], )
# coding=utf-8 from setuptools import setup, find_packages VERSION = "0.1.11" setup( name="PyTrustNFe", version=VERSION, author="Danimar Ribeiro", author_email='[email protected]', keywords=['nfe', 'mdf-e'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Lesser General Public License v2 or \ later (LGPLv2+)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], packages=find_packages(exclude=['*test*']), package_data={'pytrustnfe': ['nfe/templates/*xml', 'nfse/paulistana/templates/*xml']}, url='https://github.com/danimaribeiro/PyTrustNFe', license='LGPL-v2.1+', description='PyTrustNFe Γ© uma biblioteca para envio de NF-e', long_description=open('README.md', 'r').read(), install_requires=[ 'Jinja2 >= 2.8', 'signxml >= 2.0.0', ], test_suite='nose.collector', tests_require=[ 'nose', 'mock', ], )
FIX - No such file or directory: 'README.md'
FIX - No such file or directory: 'README.md'
Python
agpl-3.0
danimaribeiro/PyTrustNFe
5801bf2644b26fc93ade4651ec9b2cc4c58d25ec
setup.py
setup.py
"""Rachiopy setup script.""" from setuptools import find_packages, setup version = "0.2.0" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], )
"""Rachiopy setup script.""" from setuptools import find_packages, setup VERSION = "0.2.0" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], )
Convert to using requests to resolve ssl errors
Convert to using requests to resolve ssl errors
Python
mit
rfverbruggen/rachiopy
b07b4194528e08526be60b04413e40eb64d313d8
setup.py
setup.py
import versioneer from setuptools import setup setup( name='domain_events', version=versioneer.get_version(), cmdclass = versioneer.get_cmdclass(), desription='Ableton Domain Events via Rabbitmq', author='the Ableton web team', author_email='[email protected]', license='MIT', packages=['domain_events'], install_requires=["pika >= 0.10.0"], zip_safe=False, )
import versioneer from setuptools import setup setup( name='domain_events', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='Send and receive domain events via RabbitMQ', author='Ableton AG', author_email='[email protected]', url='https://github.com/AbletonAG/domain-events', license='MIT', packages=['domain_events'], install_requires=["pika >= 0.10.0"], zip_safe=False, )
Fix project description and add repository URL
Fix project description and add repository URL
Python
mit
AbletonAG/domain-events
4f722f2574740e79bda114fcd27d0f81ee6ce102
setup.py
setup.py
import os try: from setuptools import setup except ImportError: from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() required = ['requests>=0.11.2', 'requests-oauth2>=0.2.1'] setup( name='basecampx', version='0.1.7', author='Rimvydas Naktinis', author_email='[email protected]', description=('Wrapper for Basecamp Next API.'), license="MIT", keywords="basecamp bcx api", url='https://github.com/nous-consulting/basecamp-next', packages=['basecampx'], install_requires=required, long_description=read('README.rst'), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7' ], )
import os try: from setuptools import setup except ImportError: from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() required = ['requests>=0.11.2', 'requests-oauth2>=0.2.0'] setup( name='basecampx', version='0.1.7', author='Rimvydas Naktinis', author_email='[email protected]', description=('Wrapper for Basecamp Next API.'), license="MIT", keywords="basecamp bcx api", url='https://github.com/nous-consulting/basecamp-next', packages=['basecampx'], install_requires=required, long_description=read('README.rst'), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7' ], )
Use the correct Requests OAuth lib version (previously we used a patched lib).
Use the correct Requests OAuth lib version (previously we used a patched lib).
Python
mit
nous-consulting/basecamp-next
7caf008f5442baff92cd820d3fd3a059293a3e5d
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup(name='icalendar', version='0.10', description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
#!/usr/bin/env python from distutils.core import setup f = open('version.txt', 'r') version = f.read().strip() f.close() setup(name='icalendar', version=version, description='iCalendar support module', package_dir = {'': 'src'}, packages=['icalendar'], )
Tweak so that version information is picked up from version.txt.
Tweak so that version information is picked up from version.txt. git-svn-id: aa2e0347f72f9208cad9c7a63777f32311fef72e@11576 fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
Python
lgpl-2.1
greut/iCalendar,ryba-xek/iCalendar,offby1/icalendar
17594ab5b3d591ae8c45c30834fefbe49644cb5f
setup.py
setup.py
import setuptools setuptools.setup( name='sprockets.handlers.status', version='0.1.2', description='A small handler for reporting application status', long_description=open('test-requirements.txt', 'r').read(), url='https://github.com/sprockets/sprockets.handlers.status', author='AWeber Communications', author_email='[email protected]', license='BSD', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules' ], packages=['sprockets', 'sprockets.handlers', 'sprockets.handlers.status'], package_data={'': ['LICENSE', 'README.md']}, include_package_data=True, install_requires=['tornado'], namespace_packages=['sprockets', 'sprockets.handlers'], zip_safe=False)
import codecs import setuptools setuptools.setup( name='sprockets.handlers.status', version='0.1.2', description='A small handler for reporting application status', long_description=codecs.open('README.rst', 'r', 'utf8').read(), url='https://github.com/sprockets/sprockets.handlers.status', author='AWeber Communications', author_email='[email protected]', license='BSD', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules' ], packages=['sprockets', 'sprockets.handlers', 'sprockets.handlers.status'], package_data={'': ['LICENSE', 'README.md']}, include_package_data=True, install_requires=['tornado'], namespace_packages=['sprockets', 'sprockets.handlers'], zip_safe=False)
Read the readme instead of the test requirements
Read the readme instead of the test requirements
Python
bsd-3-clause
sprockets/sprockets.handlers.status
24b112885d7611fca4186cd97bdee97ea2f934c3
setup.py
setup.py
from setuptools import setup, find_packages setup( name = 'annotator', version = '0.7.3', packages = find_packages(), install_requires = [ 'Flask==0.8', 'pyes==0.16.0', 'PyJWT==0.1.4', 'nose==1.1.2', 'mock==0.8.0' ], # metadata for upload to PyPI author = 'Rufus Pollock and Nick Stenning (Open Knowledge Foundation)', author_email = '[email protected]', description = 'Inline web annotation application and middleware using javascript and WSGI', long_description = """Inline javascript-based web annotation library. \ Package includeds a database-backed annotation store \ with RESTFul (WSGI-powered) web-interface.""", license = 'MIT', keywords = 'annotation web javascript', url = 'http://okfnlabs.org/annotator/', download_url = 'https://github.com/okfn/annotator-store', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python' ], )
from setuptools import setup, find_packages setup( name = 'annotator', version = '0.7.3', packages = find_packages(), install_requires = [ 'Flask==0.8', 'pyes==0.16.0', 'PyJWT==0.1.4', 'iso8601==0.1.4', 'nose==1.1.2', 'mock==0.8.0' ], # metadata for upload to PyPI author = 'Rufus Pollock and Nick Stenning (Open Knowledge Foundation)', author_email = '[email protected]', description = 'Inline web annotation application and middleware using javascript and WSGI', long_description = """Inline javascript-based web annotation library. \ Package includeds a database-backed annotation store \ with RESTFul (WSGI-powered) web-interface.""", license = 'MIT', keywords = 'annotation web javascript', url = 'http://okfnlabs.org/annotator/', download_url = 'https://github.com/okfn/annotator-store', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python' ], )
Fix missing dep on iso8601
Fix missing dep on iso8601
Python
mit
nobita-isc/annotator-store,nobita-isc/annotator-store,ningyifan/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,happybelly/annotator-store,openannotation/annotator-store
1963d144362a66ca39ffdb909163ef4301a8048d
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages with open('README.rst') as readme_file: README = readme_file.read() install_requires = [ 'click==6.6', 'botocore>=1.5.40,<2.0.0', 'typing==3.5.3.0', 'six>=1.10.0,<2.0.0', 'pip>=9,<10' ] setup( name='chalice', version='1.0.0', description="Microframework", long_description=README, author="James Saryerwinnie", author_email='[email protected]', url='https://github.com/jamesls/chalice', packages=find_packages(exclude=['tests']), install_requires=install_requires, license="Apache License 2.0", package_data={'chalice': ['*.json']}, include_package_data=True, zip_safe=False, keywords='chalice', entry_points={ 'console_scripts': [ 'chalice = chalice.cli:main', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', ], )
#!/usr/bin/env python from setuptools import setup, find_packages with open('README.rst') as readme_file: README = readme_file.read() install_requires = [ 'click==6.6', 'botocore>=1.5.40,<2.0.0', 'typing==3.5.3.0', 'six>=1.10.0,<2.0.0', 'pip>=9,<10' ] setup( name='chalice', version='1.0.0', description="Microframework", long_description=README, author="James Saryerwinnie", author_email='[email protected]', url='https://github.com/jamesls/chalice', packages=find_packages(exclude=['tests']), install_requires=install_requires, license="Apache License 2.0", package_data={'chalice': ['*.json']}, include_package_data=True, zip_safe=False, keywords='chalice', entry_points={ 'console_scripts': [ 'chalice = chalice.cli:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', ], )
Change status from Beta to Production/Stable
Change status from Beta to Production/Stable
Python
apache-2.0
awslabs/chalice
8646a5a111993dc58c322d3c6154b2d6197fdb06
setup.py
setup.py
from setuptools import setup setup( name='flask_flaskwork', description='A Flask plugin to talk with the Flaskwork Chrome extension.', version='0.1.12', license='BSD', author='Tim Radke', author_email='[email protected]', py_modules=['flask_flaskwork'], zip_safe=False, install_requires=[ 'Flask', 'sqlalchemy', 'sqlparse' ] )
from setuptools import setup from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='flask_flaskwork', description='A Flask plugin to talk with the Flaskwork Chrome extension.', version='0.1.12', license='BSD', author='Tim Radke', author_email='[email protected]', py_modules=['flask_flaskwork'], zip_safe=False, long_description=long_description, long_description_content_type='text/markdown', install_requires=[ 'Flask', 'sqlalchemy', 'sqlparse' ] )
Add description from README to pypi
Add description from README to pypi
Python
mit
countach74/flask_flaskwork
cb4421529e9564f110b84f590f14057eda8746c8
setup.py
setup.py
from setuptools import setup from setuptools.command.install import install as _install class install(_install): def run(self): _install.run(self) setup( cmdclass = { 'install' : install }, name = 'hydra', version = '0.1', author = 'tatsy', author_email = '[email protected]', url = 'https://github.com/tatsy/hydra.git', description = 'Python HDR image processing library.', license = 'MIT', classifiers = [ 'Development Status :: 1 - Planning', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], packages = [ 'hydra', 'hydra.core', 'hydra.gen', 'hydra.io', 'hydra.filters', 'hydra.tonemap' ] )
from setuptools import setup from setuptools.command.install import install as _install class install(_install): def run(self): _install.run(self) setup( cmdclass = { 'install' : install }, name = 'hydra', version = '0.1', author = 'tatsy', author_email = '[email protected]', url = 'https://github.com/tatsy/hydra.git', description = 'Python HDR image processing library.', license = 'MIT', classifiers = [ 'Development Status :: 1 - Planning', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ], packages = [ 'hydra', 'hydra.core', 'hydra.eo', 'hydra.filters', 'hydra.gen', 'hydra.io', 'hydra.tonemap' ] )
Add eo to installed packages.
Add eo to installed packages.
Python
mit
tatsy/hydra
3165cbdd418a38f72f2b638797b692589452528c
setup.py
setup.py
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian==0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='debexpo', version="", #description='', #author='', #author_email='', #url='', install_requires=[ "Pylons>=1.0", "SQLAlchemy>=0.6", "Webhelpers>=0.6.1", "Babel>=0.9.6", "ZSI", "python-debian>=0.1.16", "soaplib==0.8.1"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'debexpo': ['i18n/*/LC_MESSAGES/*.mo']}, message_extractors = {'debexpo': [ ('**.py', 'python', None), ('templates/**.mako', 'mako', None), ('public/**', 'ignore', None)]}, entry_points=""" [paste.app_factory] main = debexpo.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller [console_scripts] debexpo-importer = debexpo.scripts.debexpo_importer:main debexpo-user-importer = debexpo.scripts.user_importer:main """, )
Make library dependencies python-debian a bit more sane
Make library dependencies python-debian a bit more sane
Python
mit
jonnylamb/debexpo,jonnylamb/debexpo,jadonk/debexpo,jonnylamb/debexpo,jadonk/debexpo,swvist/Debexpo,swvist/Debexpo,swvist/Debexpo,jadonk/debexpo
8421166d2d374113e0c9cff92075250269daee76
setup.py
setup.py
import sys sys.path.insert(0, 'src') try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='ibmiotf', version="0.2.7", author='David Parker', author_email='[email protected]', package_dir={'': 'src'}, packages=['ibmiotf', 'ibmiotf.codecs'], package_data={'ibmiotf': ['*.pem']}, url='https://github.com/ibm-watson-iot/iot-python', license=open('LICENSE').read(), description='IBM Watson IoT Platform Client for Python', long_description=open('README.rst').read(), install_requires=[ "iso8601 >= 0.1.10", "paho-mqtt >= 1.1", "pytz >= 2014.7", "requests >= 2.5.0", "requests_toolbelt >= 0.7.0" ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
import sys sys.path.insert(0, 'src') try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='ibmiotf', version="0.2.7", author='David Parker', author_email='[email protected]', package_dir={'': 'src'}, packages=['ibmiotf', 'ibmiotf.codecs'], package_data={'ibmiotf': ['*.pem']}, url='https://github.com/ibm-watson-iot/iot-python', license=open('LICENSE').read(), description='Python Client for IBM Watson IoT Platform', long_description=open('README.rst').read(), install_requires=[ "iso8601 >= 0.1.10", "paho-mqtt >= 1.1", "pytz >= 2014.7", "requests >= 2.5.0", "requests_toolbelt >= 0.7.0", "dicttoxml >= 1.7.4", "xmltodict >= 0.10.2" ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Add xmltodict and dicttoxml to install_requires
Add xmltodict and dicttoxml to install_requires
Python
epl-1.0
ibm-watson-iot/iot-python,ibm-messaging/iot-python,Lokesh-K-Haralakatta/iot-python,ibm-watson-iot/iot-python
9d180976b27213cf2c59e34a5aefec6335a1deca
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages # Match releases to redis-py versions __version__ = '2.7.2' # Jenkins will replace __build__ with a unique value. __build__ = '' setup(name='mockredis', version=__version__ + __build__, description='Mock for redis-py', url='http://www.github.com/locationlabs/mockredis', license='Apache2', packages=find_packages(exclude=['*.tests']), setup_requires=[ 'nose==1.2.1' ], install_requires=[ ], tests_require=[ 'redis>=2.7.2' ], test_suite='mockredis.tests', )
#!/usr/bin/env python from setuptools import setup, find_packages # Match releases to redis-py versions __version__ = '2.7.2' # Jenkins will replace __build__ with a unique value. __build__ = '' setup(name='mockredis', version=__version__ + __build__, description='Mock for redis-py', url='http://www.github.com/locationlabs/mockredis', license='Apache2', packages=find_packages(exclude=['*.tests']), setup_requires=[ 'nose==1.2.1' ], install_requires=[ 'bintrees==1.0.1' ], tests_require=[ 'redis>=2.7.2' ], test_suite='mockredis.tests', )
Use bintrees to implement sorted sets.
Use bintrees to implement sorted sets.
Python
apache-2.0
matejkloska/mockredis,locationlabs/mockredis,optimizely/mockredis,yossigo/mockredis,path/mockredis
4b451e7c2e399baac311727de407db9138e97e56
setup.py
setup.py
from skbuild import setup setup( name="avogadrolibs", version="0.0.8", description="", author='Kitware', license="BSD", packages=['avogadro'], cmake_args=[ '-DUSE_SPGLIB:BOOL=FALSE', '-DUSE_OPENGL:BOOL=FALSE', '-DUSE_QT:BOOL=FALSE', '-DUSE_MMTF:BOOL=FALSE', '-DUSE_PYTHON:BOOL=TRUE', '-DUSE_MOLEQUEUE:BOOL=FALSE', '-DUSE_HDF5:BOOL=FALSE', '-DUSE_LIBARCHIVE:BOOL=FALSE', '-DUSE_LIBMSYM:BOOL=FALSE' ] )
from skbuild import setup setup( name="avogadro", version="0.0.8", description="", author='Kitware', license="BSD", packages=['avogadro'], cmake_args=[ '-DUSE_SPGLIB:BOOL=FALSE', '-DUSE_OPENGL:BOOL=FALSE', '-DUSE_QT:BOOL=FALSE', '-DUSE_MMTF:BOOL=FALSE', '-DUSE_PYTHON:BOOL=TRUE', '-DUSE_MOLEQUEUE:BOOL=FALSE', '-DUSE_HDF5:BOOL=FALSE', '-DUSE_LIBARCHIVE:BOOL=FALSE', '-DUSE_LIBMSYM:BOOL=FALSE' ] )
Rename distribution avogadrolibs => avogadro
Rename distribution avogadrolibs => avogadro Signed-off-by: Chris Harris <[email protected]>
Python
bsd-3-clause
ghutchis/avogadrolibs,OpenChemistry/avogadrolibs,OpenChemistry/avogadrolibs,OpenChemistry/avogadrolibs,ghutchis/avogadrolibs,ghutchis/avogadrolibs,ghutchis/avogadrolibs,OpenChemistry/avogadrolibs,OpenChemistry/avogadrolibs,ghutchis/avogadrolibs
8597b77de45621292801a51f6a72a678a19dee57
setup.py
setup.py
#!/usr/bin/env python import subprocess from setuptools import setup, find_packages import os def git_version(): def _minimal_ext_cmd(cmd): # construct minimal environment env = {} for k in ['SYSTEMROOT', 'PATH']: v = os.environ.get(k) if v is not None: env[k] = v # LANGUAGE is used on win32 env['LANGUAGE'] = 'C' env['LANG'] = 'C' env['LC_ALL'] = 'C' out = subprocess.Popen( cmd, stdout=subprocess.PIPE, env=env).communicate()[0] return out try: out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD']) GIT_REVISION = out.strip().decode('ascii') except OSError: GIT_REVISION = "" return GIT_REVISION def getVersion(version, release=True): if os.path.exists('.git'): _git_version = git_version()[:7] else: _git_version = '' if release: return version else: return version + '-dev.' + _git_version setup(name='pymks', version=getVersion('0.2.1', release=True), description='Materials Knowledge Systems in Python (PyMKS)', author='David Brough, Daniel Wheeler', author_email='[email protected]', url='http://pymks.org', packages=find_packages(), package_data={'': ['tests/*.py']}, )
#!/usr/bin/env python import subprocess from setuptools import setup, find_packages import os def git_version(): def _minimal_ext_cmd(cmd): # construct minimal environment env = {} for k in ['SYSTEMROOT', 'PATH']: v = os.environ.get(k) if v is not None: env[k] = v # LANGUAGE is used on win32 env['LANGUAGE'] = 'C' env['LANG'] = 'C' env['LC_ALL'] = 'C' out = subprocess.Popen( cmd, stdout=subprocess.PIPE, env=env).communicate()[0] return out try: out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD']) GIT_REVISION = out.strip().decode('ascii') except OSError: GIT_REVISION = "" return GIT_REVISION def getVersion(version, release=True): if os.path.exists('.git'): _git_version = git_version()[:7] else: _git_version = '' if release: return version else: return version + '-dev.' + _git_version setup(name='pymks', version=getVersion('0.2.3', release=True), description='Materials Knowledge Systems in Python (PyMKS)', author='David Brough, Daniel Wheeler', author_email='[email protected]', url='http://pymks.org', packages=find_packages(), package_data={'': ['tests/*.py']}, )
Update release number to 0.2.3
Update release number to 0.2.3
Python
mit
awhite40/pymks,davidbrough1/pymks,fredhohman/pymks,davidbrough1/pymks,XinyiGong/pymks
0574705dcbc473805aee35b482a41bdef060b0c9
setup.py
setup.py
from distutils.core import setup import py2pack with open('README') as f: README = f.read() setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = README, author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
from distutils.core import setup import py2pack setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = open('README').read(), author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
Load README file traditionally, with-statement is not supported by older Python releases.
Load README file traditionally, with-statement is not supported by older Python releases.
Python
apache-2.0
saschpe/py2pack,toabctl/py2pack
6d0307c7d145b02f7659efbed164833983cf1fcc
setup.py
setup.py
#!/usr/bin/env python import sys from setuptools import setup, find_packages try: import multiprocessing # NOQA except ImportError: pass install_requires = ['mock'] lint_requires = ['pep8', 'pyflakes'] tests_require = ['nose'] if sys.version_info < (2, 7): tests_require.append('unittest2') setup_requires = [] if 'nosetests' in sys.argv[1:]: setup_requires.append('nose') setup( name='exam', version='0.10.3', author='Jeff Pollard', author_email='[email protected]', url='https://github.com/fluxx/exam', description='Helpers for better testing.', license='MIT', packages=find_packages(), install_requires=install_requires, tests_require=tests_require, setup_requires=setup_requires, extras_require={ 'test': tests_require, 'all': install_requires + tests_require, 'docs': ['sphinx'] + tests_require, 'lint': lint_requires }, zip_safe=False, test_suite='nose.collector', )
#!/usr/bin/env python import sys from setuptools import setup, find_packages try: import multiprocessing # NOQA except ImportError: pass install_requires = ['mock'] lint_requires = ['pep8', 'pyflakes'] tests_require = ['nose'] if sys.version_info < (2, 7): tests_require.append('unittest2') setup_requires = [] if 'nosetests' in sys.argv[1:]: setup_requires.append('nose') setup( name='exam', version='0.10.3', author='Jeff Pollard', author_email='[email protected]', url='https://github.com/fluxx/exam', description='Helpers for better testing.', license='MIT', packages=find_packages(), install_requires=install_requires, tests_require=tests_require, setup_requires=setup_requires, extras_require={ 'test': tests_require, 'all': install_requires + tests_require, 'docs': ['sphinx'] + tests_require, 'lint': lint_requires }, zip_safe=False, test_suite='nose.collector', classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", ], )
Document the supported versions of Python
Document the supported versions of Python
Python
mit
Fluxx/exam,Fluxx/exam,gterzian/exam,gterzian/exam
6bece40a1a0c8977c6211234e5aa4e64ad5b01a2
linguine/ops/StanfordCoreNLP.py
linguine/ops/StanfordCoreNLP.py
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: proc = None """ When the JSON segments return from the CoreNLP library, they separate the data acquired from each word into their own element. For readability's sake, it would be nice to pair all of the information for a given word with that word, making a list of words with their part of speech tags """ def jsonCleanup(self, data, analysisTypes): for corpus in data: res = StanfordCoreNLP.proc.parse_doc(corpus.contents) print(str(res)); for sentence in res["sentences"]: words = [] for index, token in enumerate(sentence["tokens"]): word = {} word["token"] = sentence["tokens"][index] for atype in analysisTypes: word[atype] = sentence[atype][index] words.append(word) return words def __init__(self, analysisType): self.analysisType = analysisType if StanfordCoreNLP.proc == None: StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')]) def run(self, data): return self.jsonCleanup(data, self.analysisType)
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: proc = None """ When the JSON segments return from the CoreNLP library, they separate the data acquired from each word into their own element. For readability's sake, it would be nice to pair all of the information for a given word with that word, making a list of words with their part of speech tags """ def jsonCleanup(self, data, analysisTypes): for corpus in data: res = StanfordCoreNLP.proc.parse_doc(corpus.contents) words = [] for sentence in res["sentences"]: for index, token in enumerate(sentence["tokens"]): word = {} word["token"] = sentence["tokens"][index] for atype in analysisTypes: word[atype] = sentence[atype][index] words.append(word) return words def __init__(self, analysisType): self.analysisType = analysisType if StanfordCoreNLP.proc == None: StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')]) def run(self, data): return self.jsonCleanup(data, self.analysisType)
Return entire corpus from corenlp analysis
Return entire corpus from corenlp analysis
Python
mit
Pastafarians/linguine-python,rigatoni/linguine-python
468a66c0945ce9e78fb5da8a6a628ce581949759
livinglots_usercontent/views.py
livinglots_usercontent/views.py
from django.views.generic import CreateView from braces.views import FormValidMessageMixin from livinglots_genericviews import AddGenericMixin class AddContentView(FormValidMessageMixin, AddGenericMixin, CreateView): def _get_content_name(self): return self.form_class._meta.model._meta.object_name def get_form_valid_message(self): return '%s added successfully.' % self._get_content_name() def get_success_url(self): return self.get_content_object().get_absolute_url() def get_template_names(self): return [ 'livinglots/usercontent/add_%s.html' % self._get_content_name().lower(), ] def form_valid(self, form): """ Save the content and notify participants who are following the target lot. """ self.object = form.save() # NB: Notifications are sent to followers using a descendant of # NotifyParticipantsOnCreationForm return super(AddContentView, self).form_valid(form)
from django.views.generic import CreateView from braces.views import FormValidMessageMixin from livinglots_genericviews import AddGenericMixin class AddContentView(FormValidMessageMixin, AddGenericMixin, CreateView): def _get_content_name(self): return self.form_class._meta.model._meta.object_name def get_form_valid_message(self): return '%s added successfully.' % self._get_content_name() def get_initial(self): initial = super(AddContentView, self).get_initial() user = self.request.user # If user has name, set that for them try: initial['added_by_name'] = user.first_name or user.username except AttributeError: pass return initial def get_success_url(self): return self.get_content_object().get_absolute_url() def get_template_names(self): return [ 'livinglots/usercontent/add_%s.html' % self._get_content_name().lower(), ] def form_valid(self, form): """ Save the content and notify participants who are following the target lot. """ self.object = form.save() # NB: Notifications are sent to followers using a descendant of # NotifyParticipantsOnCreationForm return super(AddContentView, self).form_valid(form)
Set added_by_name if we can
Set added_by_name if we can
Python
agpl-3.0
596acres/django-livinglots-usercontent,596acres/django-livinglots-usercontent
77c69f592fe35ac4e3087366da084b7a73f21ee6
setup.py
setup.py
from setuptools import setup, find_packages setup( name='panoptescli', version='1.1.1', url='https://github.com/zooniverse/panoptes-cli', author='Adam McMaster', author_email='[email protected]', description=( 'A command-line client for Panoptes, the API behind the Zooniverse' ), packages=find_packages(), include_package_data=True, install_requires=[ 'Click>=6.7,<7.1', 'PyYAML>=5.1,<5.2', 'panoptes-client>=1.0,<2.0', 'humanize>=0.5.1,<0.6', 'pathvalidate>=0.29.0,<0.30', ], entry_points=''' [console_scripts] panoptes=panoptes_cli.scripts.panoptes:cli ''', )
from setuptools import setup, find_packages setup( name='panoptescli', version='1.1.1', url='https://github.com/zooniverse/panoptes-cli', author='Adam McMaster', author_email='[email protected]', description=( 'A command-line client for Panoptes, the API behind the Zooniverse' ), packages=find_packages(), include_package_data=True, install_requires=[ 'Click>=6.7,<7.1', 'PyYAML>=5.1,<5.3', 'panoptes-client>=1.0,<2.0', 'humanize>=0.5.1,<0.6', 'pathvalidate>=0.29.0,<0.30', ], entry_points=''' [console_scripts] panoptes=panoptes_cli.scripts.panoptes:cli ''', )
Update pyyaml requirement from <5.2,>=5.1 to >=5.1,<5.3
Update pyyaml requirement from <5.2,>=5.1 to >=5.1,<5.3 Updates the requirements on [pyyaml](https://github.com/yaml/pyyaml) to permit the latest version. - [Release notes](https://github.com/yaml/pyyaml/releases) - [Changelog](https://github.com/yaml/pyyaml/blob/master/CHANGES) - [Commits](https://github.com/yaml/pyyaml/compare/5.1...5.2) Signed-off-by: dependabot-preview[bot] <[email protected]>
Python
apache-2.0
zooniverse/panoptes-cli
3b692018632fef7e632086ed2ef5a980ad6f6c2f
setup.py
setup.py
#-*- coding: utf-8 -*- from setuptools import setup version = "1.0.post1" setup( name = "django-easy-pjax", version = version, description = "Easy PJAX for Django.", license = "BSD", author = "Filip Wasilewski", author_email = "[email protected]", url = "https://github.com/nigma/django-easy-pjax", download_url='https://github.com/nigma/django-easy-pjax/zipball/master', long_description = open("README.rst").read(), packages = ["easy_pjax"], include_package_data=True, classifiers = ( "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Libraries :: Python Modules" ), tests_require=[ "django>=1.4,<1.5", ], zip_safe = False )
#-*- coding: utf-8 -*- from setuptools import setup version = "1.0.post1" setup( name = "django-easy-pjax", version = version, description = "Easy PJAX for Django.", license = "BSD", author = "Filip Wasilewski", author_email = "[email protected]", url = "https://github.com/nigma/django-easy-pjax", download_url='https://github.com/nigma/django-easy-pjax/zipball/master', long_description = open("README.rst").read(), packages = ["easy_pjax"], include_package_data=True, classifiers = ( "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Software Development :: Libraries :: Python Modules" ), tests_require=[ "django>=1.4,<1.6", ], zip_safe = False )
Update trove classifiers and django test version requirements
Update trove classifiers and django test version requirements
Python
bsd-3-clause
Kondou-ger/django-easy-pjax,nigma/django-easy-pjax,nigma/django-easy-pjax,Kondou-ger/django-easy-pjax,Kondou-ger/django-easy-pjax,nigma/django-easy-pjax
c470da4fcf5bec84c255aa4514f6fd764781eb1a
setup.py
setup.py
from distutils.core import setup ext_files = ["pyreBloom/bloom.c"] kwargs = {} try: from Cython.Distutils import build_ext from Cython.Distutils import Extension print "Building from Cython" ext_files.append("pyreBloom/pyreBloom.pyx") kwargs['cmdclass'] = {'build_ext': build_ext} except ImportError: from distutils.core import Extension ext_files.append("pyreBloom/pyreBloom.c") print "Building from C" ext_modules = [Extension("pyreBloom", ext_files, libraries=['hiredis'])] setup( name = 'pyreBloom', version = "1.0.1", author = "Dan Lecocq", author_email = "[email protected]", license = "MIT License", ext_modules = ext_modules, classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: C', 'Programming Language :: Cython', 'Topic :: Software Development :: Libraries :: Python Modules', ], **kwargs )
from distutils.core import setup ext_files = ["pyreBloom/bloom.c"] kwargs = {} try: from Cython.Distutils import build_ext from Cython.Distutils import Extension print "Building from Cython" ext_files.append("pyreBloom/pyreBloom.pyx") kwargs['cmdclass'] = {'build_ext': build_ext} except ImportError: from distutils.core import Extension ext_files.append("pyreBloom/pyreBloom.c") print "Building from C" ext_modules = [Extension("pyreBloom", ext_files, libraries=['hiredis'], library_dirs=['/usr/local/lib'], include_dirs=['/usr/local/include'])] setup( name = 'pyreBloom', version = "1.0.1", author = "Dan Lecocq", author_email = "[email protected]", license = "MIT License", ext_modules = ext_modules, classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: C', 'Programming Language :: Cython', 'Topic :: Software Development :: Libraries :: Python Modules', ], **kwargs )
Fix build with newer dependencies.
Fix build with newer dependencies.
Python
mit
seomoz/pyreBloom,seomoz/pyreBloom,seomoz/pyreBloom
34eadaca8901706fd51dc12df5c63cf4c966249e
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-bitfield', version='1.9.0', author='DISQUS', author_email='[email protected]', url='https://github.com/disqus/django-bitfield', description='BitField in Django', packages=find_packages(), zip_safe=False, install_requires=[ 'Django>=1.10', 'six', ], extras_require={ 'tests': [ 'flake8', 'mysqlclient', 'psycopg2>=2.3', 'pytest-django', ], }, include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'Topic :: Software Development', "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", ], )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-bitfield', version='1.9.0', author='DISQUS', author_email='[email protected]', url='https://github.com/disqus/django-bitfield', description='BitField in Django', packages=find_packages(), zip_safe=False, install_requires=[ 'Django>=1.8', 'six', ], extras_require={ 'tests': [ 'flake8', 'mysqlclient', 'psycopg2>=2.3', 'pytest-django', ], }, include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'Topic :: Software Development', "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", ], )
Set Django requirement to the last LTS
Set Django requirement to the last LTS
Python
apache-2.0
disqus/django-bitfield,Elec/django-bitfield,joshowen/django-bitfield
fddc1198d54a8a868bd8b97ed7318feeb00f6725
setup.py
setup.py
from setuptools import setup VERSION = '0.2.8' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
from setuptools import setup VERSION = '0.2.9' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
Add keywords and bump to 0.2.9
Add keywords and bump to 0.2.9
Python
mit
filwaitman/jinja2-standalone-compiler
7fe0a7d03a13834f390180907265b8f83a978385
setup.py
setup.py
from setuptools import setup, find_packages setup( name='django-facebook', version='0.1', description='Replace Django Authentication with Facebook', long_description=open('README.md').read(), author='Aidan Lister', author_email='[email protected]', url='http://github.com/pythonforfacebook/django-facebook', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'django>=1.2.7', 'facebook-sdk>=0.2.0,==dev', ], dependency_links=[ 'https://github.com/pythonforfacebook/facebook-sdk/tarball/master#egg=facebook-sdk-dev', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
from setuptools import setup, find_packages setup( name='django-facebook', version='0.1', description='Replace Django Authentication with Facebook', long_description=open('README.md').read(), author='Aidan Lister', author_email='[email protected]', url='http://github.com/pythonforfacebook/django-facebook', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'django>=1.5', 'facebook-sdk==0.4.0', ], dependency_links=[ 'https://github.com/pythonforfacebook/facebook-sdk/tarball/master#egg=facebook-sdk-dev', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
Update 'install_requires' to right versions
Update 'install_requires' to right versions
Python
mit
tino/django-facebook2,tino/django-facebook2
8b4d4ace387d2d366ae03ef14883942908167ad4
setup.py
setup.py
from setuptools import setup, find_packages setup( name='stix2-matcher', version="0.1.0", packages=find_packages(), description='Match STIX content against STIX patterns', install_requires=[ "antlr4-python2-runtime==4.7 ; python_version < '3'", "antlr4-python3-runtime==4.7 ; python_version >= '3'", 'typing ; python_version<"3.5" and python_version>="3"', "enum34 ; python_version ~= '3.3.0'", "python-dateutil", "six", "stix2-patterns>=0.4.1", ], tests_require=[ "pytest>=2.9.2" ], entry_points={ 'console_scripts': [ 'stix2-matcher = stix2matcher.matcher:main', ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
from setuptools import setup, find_packages setup( name='stix2-matcher', version="0.1.0", packages=find_packages(), description='Match STIX content against STIX patterns', install_requires=[ "antlr4-python2-runtime==4.7 ; python_version < '3'", "antlr4-python3-runtime==4.7 ; python_version >= '3'", 'typing ; python_version<"3.5" and python_version>="3"', "enum34 ; python_version ~= '3.3.0'", "python-dateutil", "six", "stix2-patterns>=0.5.0", ], tests_require=[ "pytest>=2.9.2" ], entry_points={ 'console_scripts': [ 'stix2-matcher = stix2matcher.matcher:main', ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
Bump required version of stix2-patterns.
Bump required version of stix2-patterns.
Python
bsd-3-clause
chisholm/cti-pattern-matcher,oasis-open/cti-pattern-matcher
25df3a8b74e9e7f03d6239fcb5f2afaa38d4b4ee
setup.py
setup.py
from setuptools import setup, find_packages with open('README.rst') as f: long_description = f.read() setup( name='jiradoc', version='0.1', description='A small Python module to parse JIRAdoc markup files and insert them into JIRA', long_description=long_description, url='https://github.com/lucianovdveekens/jiradoc', author='Luciano van der Veekens', author_email='[email protected]', packages=find_packages(), install_requires=['ply', 'jira', 'pyyaml', 'appdirs'], package_data={ 'jiradoc': ['data/test.jiradoc'] }, entry_points={ 'console_scripts': [ 'jiradoc=jiradoc.__main__:main', ], }, )
from setuptools import setup, find_packages with open('README.rst') as f: long_description = f.read() setup( name='jiradoc', version='0.1', description='A small Python module to parse JIRAdoc markup files and insert them into JIRA', long_description=long_description, url='https://github.com/lucianovdveekens/jiradoc', author='Luciano van der Veekens', author_email='[email protected]', packages=find_packages(), install_requires=['ply', 'jira', 'pyyaml', 'appdirs'], package_data={ 'jiradoc': ['data/test.jiradoc', 'data/sample_config.yml'] }, entry_points={ 'console_scripts': [ 'jiradoc=jiradoc.__main__:main', ], }, )
Add sample config to the package data
Add sample config to the package data
Python
mit
lucianovdveekens/jiradoc
a15ace7fdabbfd9943fe388e4177627f09894f4b
slack.py
slack.py
import httplib import urllib import json class Slack: def __init__(self, webhook_path, url='hooks.slack.com', channel='#sigbridge', username="sb-bot", icon=":satellite:"): self.web_hook_url = url self.webhook_path = webhook_path self.channel = channel self.username = username self.icon = icon def send(self, message): if message: conn = httplib.HTTPSConnection(self.web_hook_url) payload = { "channel": self.channel, "username": self.username, "icon_emoji": self.icon, "text": message } conn.request( "POST", self.webhook_path, urllib.urlencode({ 'payload': json.dumps(payload) }), {"Content-type": "application/x-www-form-urlencoded"} ) return conn.getresponse() if __name__ == '__main__': slack = Slack("/services/T2GLAPJHM/B4H2LRVS9/7fSoJ9VIrY5v5E0TQvML5kgC") slack.send("Hi there, I'm a robot added by Jay!! Reporting from SigBridge.")
import httplib import urllib import json class Slack: def __init__(self, webhook_path, url='hooks.slack.com', channel='#test_bed', username="sb-bot", icon=":satellite:"): self.web_hook_url = url self.webhook_path = webhook_path self.channel = channel self.username = username self.icon = icon def send(self, message): if message: conn = httplib.HTTPSConnection(self.web_hook_url) payload = { "channel": self.channel, "username": self.username, "icon_emoji": self.icon, "text": message } conn.request( "POST", self.webhook_path, urllib.urlencode({ 'payload': json.dumps(payload) }), {"Content-type": "application/x-www-form-urlencoded"} ) return conn.getresponse() if __name__ == '__main__': slack = Slack("/services/T2GLAPJHM/B4H2LRVS9/7fSoJ9VIrY5v5E0TQvML5kgC") slack.send("Hi there, I'm a robot added by Jay!! Reporting from SigBridge.")
Change default channel to test_bed
Change default channel to test_bed
Python
mit
nafooesi/sigbridge
23f59f95ea3e7d6504e03949a1400be452166d17
buildPy2app.py
buildPy2app.py
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
Update py2app script for Qt 5.11
Update py2app script for Qt 5.11
Python
apache-2.0
NeverDecaf/syncplay,alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay,NeverDecaf/syncplay
1d5175beedeed2a2ae335a41380280a2ed39901b
lambda/control/commands.py
lambda/control/commands.py
from __future__ import print_function import shlex from traceback import format_exception from obj import Obj import click from click.testing import CliRunner runner = CliRunner() @click.group(name='') @click.argument('user', required=True) @click.pass_context def command(ctx, user, **kwargs): ctx.obj = Obj(user=user) @command.command() @click.pass_context def about(ctx, **kwargs): click.echo('This is the about command.') @command.command() @click.pass_context def echo(ctx, **kwargs): click.echo('This is the echo command. You are {}.'.format(ctx.obj.user)) def run(user, cmd): result = runner.invoke(command, [user,] + shlex.split(cmd)) print('run result: {}'.format(result)) if not result.output: print('Exception: {}\nTraceback:\n {}'.format(result.exception, ''.join(format_exception(*result.exc_info)))) return 'Internal error.' return result.output # Import files with subcommands here--we don't use them directly, but we need # to make sure they're loaded, since that's when they add their commands to # our command object. import list_commands
from __future__ import print_function import shlex from traceback import format_exception from obj import Obj import click from click.testing import CliRunner runner = CliRunner() @click.group(name='') @click.argument('user', required=True) @click.pass_context def command(ctx, user, **kwargs): ctx.obj = Obj(user=user) @command.command() @click.pass_context def about(ctx, **kwargs): click.echo('This is the about command.') @command.command() @click.argument('stuff', nargs=-1, required=False) @click.pass_context def echo(ctx, stuff, **kwargs): click.echo('This is the echo command. You are {}.'.format(ctx.obj.user)) if stuff: click.echo(' '.join(stuff)) else: click.echo('[no parameters]') def run(user, cmd): result = runner.invoke(command, [user,] + shlex.split(cmd)) print('run result: {}'.format(result)) if not result.output: print('Exception: {}\nTraceback:\n {}'.format(result.exception, ''.join(format_exception(*result.exc_info)))) return 'Internal error.' return result.output # Import files with subcommands here--we don't use them directly, but we need # to make sure they're loaded, since that's when they add their commands to # our command object. import list_commands
Make the echo command actually echo all its parameters.
Make the echo command actually echo all its parameters.
Python
mit
ilg/LambdaMLM
f80c11efb4bcbca6d20cdbbc1a552ebb04aa8302
api/config/settings/production.py
api/config/settings/production.py
import os import dj_database_url from .base import * # BASE_NAME and BASE_DOMAIN are intentionally unset # They are only needed to seed data in staging and local BASE_URL = "https://voterengagement.com" ############################################################################### # Core SECRET_KEY = os.environ['SECRET_KEY'] ALLOWED_HOSTS = [ '127.0.0.1', 'localhost', # TODO: Prevent access from herokuapp.com when domain is registered # '.voterengagement.com', '.herokuapp.com', ] ############################################################################### # Static files STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' ############################################################################### # Database DATABASES = {} DATABASES['default'] = dj_database_url.config()
import os import dj_database_url from .base import * # BASE_NAME and BASE_DOMAIN are intentionally unset # They are only needed to seed data in staging and local BASE_URL = "https://voterengagement.com" ############################################################################### # Core SECRET_KEY = os.environ['SECRET_KEY'] ALLOWED_HOSTS = [ '127.0.0.1', 'localhost', '.citizenlabs.org', ] ############################################################################### # Static files STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' ############################################################################### # Database DATABASES = {} DATABASES['default'] = dj_database_url.config()
Allow citizenlabs.org as a host
Allow citizenlabs.org as a host
Python
mit
citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement
a8218a1c20ea48a3392ef9e6d898a73eb9642d9c
ui/repository/browse.py
ui/repository/browse.py
from django.shortcuts import render_to_response from django.template import RequestContext from registry.models import ResourceCollection from django.http import HttpResponse, HttpResponseBadRequest import json def browse(req): # Find all the collections that do not have parents top = ResourceCollection.objects.filter(parents__isnull=True) # Find the closure (all children) for each top-level collection result = [ col.jsonClosure(req.user) for col in top ] collections = json.dumps(result) return render_to_response('repository/browse.jade', {'collections': collections}, context_instance=RequestContext(req)) def populateCollection(req, collectionId): # Find this collection try: col = ResourceCollection.objects.get(collection_id=collectionId) except ResourceCollection.DoesNotExist: return HttpResponseBadRequest('There is no collection with the given ID: %s' % collectionId) # Find the children for this collection result = col.jsonClosure(req.user) collections = json.dumps(result) return HttpResponse(collections, content_type="application/json")
from django.shortcuts import render_to_response from django.template import RequestContext from registry.models import ResourceCollection from django.http import HttpResponse, HttpResponseBadRequest import json def browse(req): # Find all the collections that do not have parents top = ResourceCollection.objects.filter(parents="Top") # Find the closure (all children) for each top-level collection result = [ col.jsonClosure(req.user) for col in top ] collections = json.dumps(result) return render_to_response('repository/browse.jade', {'collections': collections}, context_instance=RequestContext(req)) def populateCollection(req, collectionId): # Find this collection try: col = ResourceCollection.objects.get(collection_id=collectionId) except ResourceCollection.DoesNotExist: return HttpResponseBadRequest('There is no collection with the given ID: %s' % collectionId) # Find the children for this collection result = col.jsonClosure(req.user) collections = json.dumps(result) return HttpResponse(collections, content_type="application/json")
Adjust parent filter on collections. Now top-level collections should specific 'top' as their parent instead of being null. This helps get rid of the problem of collections ending up being top-level when removed from their old parent
Adjust parent filter on collections. Now top-level collections should specific 'top' as their parent instead of being null. This helps get rid of the problem of collections ending up being top-level when removed from their old parent
Python
bsd-3-clause
usgin/nrrc-repository,usgin/nrrc-repository,usgin/metadata-repository,usgin/metadata-repository
142022516f310aeb58f3560031b2266f39a0f2e5
erpnext_ebay/tasks.py
erpnext_ebay/tasks.py
# -*- coding: utf-8 -*- """Scheduled tasks to be run by erpnext_ebay""" from frappe.utils.background_jobs import enqueue def all(): pass def hourly(): enqueue('erpnext_ebay.sync_orders.sync', queue='long', job_name='Sync eBay Orders') def daily(): enqueue('erpnext_ebay.ebay_active_listings.update_ebay_data', queue='long', job_name='Update eBay Data', multiple_error_sites=['UK']) enqueue('erpnext_ebay.ebay_categories.category_sync', queue='long', job_name='eBay Category Sync') def weekly(): pass def monthly(): pass
# -*- coding: utf-8 -*- """Scheduled tasks to be run by erpnext_ebay""" from frappe.utils.background_jobs import enqueue def all(): pass def hourly(): pass def daily(): enqueue('erpnext_ebay.ebay_categories.category_sync', queue='long', job_name='eBay Category Sync') def weekly(): pass def monthly(): pass
Remove sync_orders and update_ebay_listings from hooks scheduler
fix(hooks): Remove sync_orders and update_ebay_listings from hooks scheduler
Python
mit
bglazier/erpnext_ebay,bglazier/erpnext_ebay
4eba105663ba8d0323559b095055b3f89521ea07
demo/ubergui.py
demo/ubergui.py
#!/usr/bin/env python import sys import Pyro import Tkinter, tkMessageBox from VisionEgg.PyroApps.UberClientGUI import client_list, AppWindow # You can add your own controllers and GUIs to client_list try: app_window = AppWindow(client_list=client_list) except Pyro.errors.ProtocolError, x: if str(x) == 'connection failed': # Can't find UberServer running on network try: tkMessageBox.showerror("Can't find UberServer","Can't find UberServer running on Pyro network.") sys.exit(1) except: raise # Can't find UberServer running on network else: raise except Pyro.errors.PyroError, x: if str(x) in ["Name Server not responding","connection failed"]: try: tkMessageBox.showerror("Can't find Pyro Name Server","Can't find Pyro Name Server on network.") sys.exit(1) except: raise # Can't find Pyro Name Server on network else: raise app_window.winfo_toplevel().wm_iconbitmap() app_window.pack(expand=1,fill=Tkinter.BOTH) app_window.winfo_toplevel().title("Vision Egg") app_window.winfo_toplevel().minsize(1,1) app_window.mainloop()
#!/usr/bin/env python import sys import Pyro import Tkinter, tkMessageBox from VisionEgg.PyroApps.UberClientGUI import client_list, AppWindow # You can add your own controllers and GUIs to client_list try: app_window = AppWindow(client_list=client_list) except Pyro.errors.PyroError, x: uber_server_error = 0 if isinstance(x, Pyro.errors.ProtocolError) and str(x) == 'connection failed': # Can't find UberServer running on network uber_server_error = 1 if isinstance(x, Pyro.errors.NamingError) and str(x) == 'name not found': # Can't find UberServer running on network uber_server_error = 1 if uber_server_error: tkMessageBox.showerror("Can't find UberServer","Can't find UberServer running on Pyro network.") sys.exit(1) elif str(x) in ["Name Server not responding","connection failed"]: try: tkMessageBox.showerror("Can't find Pyro Name Server","Can't find Pyro Name Server on network.") sys.exit(1) except: raise # Can't find Pyro Name Server on network else: raise app_window.winfo_toplevel().wm_iconbitmap() app_window.pack(expand=1,fill=Tkinter.BOTH) app_window.winfo_toplevel().title("Vision Egg") app_window.winfo_toplevel().minsize(1,1) app_window.mainloop()
Update errors for other versions of Pyro
Minor: Update errors for other versions of Pyro git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@775 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7
Python
lgpl-2.1
visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg
a9e69025db7bf3c2f3cdf241f7c9b60a1e78ca58
tests/base.py
tests/base.py
import unittest2 from sparts.vservice import VService class BaseSpartsTestCase(unittest2.TestCase): def assertNotNone(self, o, msg=''): self.assertTrue(o is not None, msg) class SingleTaskTestCase(BaseSpartsTestCase): TASK = None def setUp(self): self.assertNotNone(self.TASK) class TestService(VService): TASKS=[self.TASK] ap = TestService._makeArgumentParser() ns = ap.parse_args(['--level', 'DEBUG']) self.service = TestService(ns) self.runloop = self.service.startBG() self.task = self.service.requireTask(self.TASK.__name__) def tearDown(self): self.service.stop() self.runloop.join()
import unittest2 import logging from sparts.vservice import VService class BaseSpartsTestCase(unittest2.TestCase): def assertNotNone(self, o, msg=''): self.assertTrue(o is not None, msg) def assertNotEmpty(self, o, msg=''): self.assertTrue(len(o) > 0, msg) @classmethod def setUpClass(cls): cls.logger = logging.getLogger('sparts.%s' % cls.__name__) super(BaseSpartsTestCase, cls).setUpClass() class MultiTaskTestCase(BaseSpartsTestCase): def requireTask(self, task_name): self.assertNotNone(self.service) return self.service.requireTask(task_name) TASKS = [] def setUp(self): self.assertNotEmpty(self.TASKS) class TestService(VService): TASKS=self.TASKS ap = TestService._makeArgumentParser() ns = ap.parse_args(['--level', 'DEBUG']) self.service = TestService(ns) self.runloop = self.service.startBG() for t in self.TASKS: self.service.requireTask(t.__name__) def tearDown(self): self.service.stop() self.runloop.join() class SingleTaskTestCase(MultiTaskTestCase): TASK = None @classmethod def setUpClass(cls): if cls.TASK: cls.TASKS = [cls.TASK] super(SingleTaskTestCase, cls).setUpClass() def setUp(self): self.assertNotNone(self.TASK) MultiTaskTestCase.setUp(self) self.task = self.service.requireTask(self.TASK.__name__)
Support unittests that require multiple tasks
Support unittests that require multiple tasks
Python
bsd-3-clause
pshuff/sparts,facebook/sparts,bboozzoo/sparts,fmoo/sparts,facebook/sparts,djipko/sparts,fmoo/sparts,pshuff/sparts,djipko/sparts,bboozzoo/sparts
83b060b573bee654708e5fbb41c9e3b2913e4d9c
generatechangedfilelist.py
generatechangedfilelist.py
import sys import os import commands import fnmatch import re import subprocess, shlex def cmdsplit(args): if os.sep == '\\': args = args.replace('\\', '\\\\') return shlex.split(args) def main(): md5dir = os.path.abspath(sys.argv[1]) list_file = os.path.abspath(sys.argv[2]) prelist = os.path.join(md5dir,"temp","server.md5") postlist = os.path.join(md5dir,"temp","server_reobf.md5") cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist) process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1) difflist,_= process.communicate() with open(list_file, 'w') as fh: fh.write(difflist) if __name__ == '__main__': main()
import sys import os import commands import fnmatch import re import subprocess, shlex mcp_root = os.path.abspath(sys.argv[1]) sys.path.append(os.path.join(mcp_root,"runtime")) from filehandling.srgshandler import parse_srg def cmdsplit(args): if os.sep == '\\': args = args.replace('\\', '\\\\') return shlex.split(args) def main(): list_file = os.path.abspath(sys.argv[2]) prelist = os.path.join(mcp_root,"temp","server.md5") postlist = os.path.join(mcp_root,"temp","server_reobf.md5") cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist) process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1) difflist,_= process.communicate() srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg") classes=dict() for row in srg_data['CL']: classes[row['deobf_name']] = row['obf_name'] with open(list_file, 'w') as fh: for diff in difflist: (clazz,md5)=diff.strip().split() if clazz in classes: clazz=classes[clazz] fh.write("%s\n" %(clazz)) if __name__ == '__main__': main()
Tweak file list script to print obf names
Tweak file list script to print obf names
Python
lgpl-2.1
MinecraftForge/FML,aerospark/FML,aerospark/FML,aerospark/FML
154ebba6f46acac1816e96993619b45ade314ba8
dthm4kaiako/config/__init__.py
dthm4kaiako/config/__init__.py
"""Configuration for Django system.""" __version__ = "0.8.0" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.8.1" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
Increment version number to 0.8.1
Increment version number to 0.8.1
Python
mit
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
139e6acc19040d89f304875c533513c9651f2906
budget_proj/budget_app/filters.py
budget_proj/budget_app/filters.py
from django.db.models import CharField from django_filters import rest_framework as filters from . import models class DefaultFilterMeta: """ Set our default Filter configurations to DRY up the FilterSet Meta classes. """ # Let us filter by all fields except id exclude = ('id',) # We prefer case insensitive matching on CharFields filter_overrides = { CharField: { 'filter_class': filters.CharFilter, 'extra': lambda f: { 'lookup_expr': 'iexact', }, }, } class OcrbFilter(filters.FilterSet): class Meta(DefaultFilterMeta): model = models.OCRB class KpmFilter(filters.FilterSet): class Meta(DefaultFilterMeta): model = models.KPM class BudgetHistoryFilter(filters.FilterSet): class Meta(DefaultFilterMeta): model = models.BudgetHistory class LookupCodeFilter(filters.FilterSet): class Meta(DefaultFilterMeta): model = models.LookupCode
from django.db.models import CharField from django_filters import rest_framework as filters from . import models class CustomFilterBase(filters.FilterSet): """ Extends Filterset to populate help_text from the associated model field. Works with swagger but not the builtin docs. """ @classmethod def filter_for_field(cls, f, name, lookup_expr): result = super().filter_for_field(f, name, lookup_expr) if 'help_text' not in result.extra: result.extra['help_text'] = f.help_text return result class DefaultFilterMeta: """ Defaults for: - enable filtering by all model fields except `id` - ignoring upper/lowercase when on CharFields """ # Let us filter by all fields except id exclude = ('id',) # We prefer case insensitive matching on CharFields filter_overrides = { CharField: { 'filter_class': filters.CharFilter, 'extra': lambda f: { 'lookup_expr': 'iexact', }, }, } class OcrbFilter(CustomFilterBase): class Meta(DefaultFilterMeta): model = models.OCRB class KpmFilter(CustomFilterBase): class Meta(DefaultFilterMeta): model = models.KPM class BudgetHistoryFilter(CustomFilterBase): class Meta(DefaultFilterMeta): model = models.BudgetHistory class LookupCodeFilter(CustomFilterBase): class Meta(DefaultFilterMeta): model = models.LookupCode
Upgrade Filters fields to use docs from model fields
Upgrade Filters fields to use docs from model fields
Python
mit
jimtyhurst/team-budget,hackoregon/team-budget,hackoregon/team-budget,hackoregon/team-budget,jimtyhurst/team-budget,jimtyhurst/team-budget
891ca8ee117f462a1648e954b756f1d29a5f527c
tests/test_errors.py
tests/test_errors.py
"""Tests for errors.py""" import aiohttp def test_bad_status_line1(): err = aiohttp.BadStatusLine(b'') assert str(err) == "b''" def test_bad_status_line2(): err = aiohttp.BadStatusLine('Test') assert str(err) == 'Test'
"""Tests for errors.py""" import aiohttp def test_bad_status_line1(): err = aiohttp.BadStatusLine(b'') assert str(err) == "b''" def test_bad_status_line2(): err = aiohttp.BadStatusLine('Test') assert str(err) == 'Test' def test_fingerprint_mismatch(): err = aiohttp.FingerprintMismatch('exp', 'got', 'host', 8888) expected = '<FingerprintMismatch expected=exp got=got host=host port=8888>' assert expected == repr(err)
Add a test for FingerprintMismatch repr
Add a test for FingerprintMismatch repr
Python
apache-2.0
jettify/aiohttp,esaezgil/aiohttp,z2v/aiohttp,arthurdarcet/aiohttp,pfreixes/aiohttp,z2v/aiohttp,mind1master/aiohttp,KeepSafe/aiohttp,mind1master/aiohttp,juliatem/aiohttp,hellysmile/aiohttp,esaezgil/aiohttp,esaezgil/aiohttp,arthurdarcet/aiohttp,panda73111/aiohttp,pfreixes/aiohttp,z2v/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,moden-py/aiohttp,singulared/aiohttp,AraHaanOrg/aiohttp,KeepSafe/aiohttp,arthurdarcet/aiohttp,hellysmile/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,jettify/aiohttp,panda73111/aiohttp,alex-eri/aiohttp-1,moden-py/aiohttp,playpauseandstop/aiohttp,jettify/aiohttp,KeepSafe/aiohttp,rutsky/aiohttp,juliatem/aiohttp,AraHaanOrg/aiohttp,mind1master/aiohttp,rutsky/aiohttp,panda73111/aiohttp,Eyepea/aiohttp,moden-py/aiohttp,rutsky/aiohttp
4bcc0aae53def04e16e87499b1321256ff35a7c1
pyconll/__init__.py
pyconll/__init__.py
""" A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
""" A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['conllable', 'exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
Add conllable to all list.
Add conllable to all list.
Python
mit
pyconll/pyconll,pyconll/pyconll
48dcf46b048c94437b957616a363f9f64447b8da
pyinfra/__init__.py
pyinfra/__init__.py
''' Welcome to pyinfra. ''' import logging # Global flag set True by `pyinfra_cli.__main__` is_cli = False # Global pyinfra logger logger = logging.getLogger('pyinfra') # Setup package level version from .version import __version__ # noqa # Trigger pseudo_* creation from . import pseudo_modules # noqa # Trigger fact index creation from . import facts # noqa # Trigger module imports from . import operations # noqa # pragma: no cover # Initialise base classes - this sets the pseudo modules to point at the underlying # class objects (Host, etc), which makes ipython/etc work as expected. pseudo_modules.init_base_classes()
''' Welcome to pyinfra. ''' import logging # Global flag set True by `pyinfra_cli.__main__` is_cli = False # Global pyinfra logger logger = logging.getLogger('pyinfra') # Setup package level version from .version import __version__ # noqa # Trigger pseudo_* creation from . import pseudo_modules # noqa # Initialise base classes - this sets the pseudo modules to point at the underlying # class objects (Host, etc), which makes ipython/etc work as expected. pseudo_modules.init_base_classes() # TODO: remove these! They trigger an import and index of every operation/fact. This # is not ideal and explicit imports are much better. from . import facts # noqa from . import operations # noqa
Add todo to remove import of all operations/facts when loading pyinfra.
Add todo to remove import of all operations/facts when loading pyinfra.
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
e056dc3581785fe34123189cccd9901e1e9afe71
pylatex/__init__.py
pylatex/__init__.py
# flake8: noqa """ A library for creating Latex files. .. :copyright: (c) 2014 by Jelte Fennema. :license: MIT, see License for more details. """ from .document import Document from .math import Math, VectorName, Matrix from .package import Package from .section import Section, Subsection, Subsubsection from .table import Table, MultiColumn, MultiRow, Tabular from .pgfplots import TikZ, Axis, Plot from .graphics import Figure, SubFigure, MatplotlibFigure from .lists import Enumerate, Itemize, Description from .quantities import Quantity from .base_classes import Command
# flake8: noqa """ A library for creating Latex files. .. :copyright: (c) 2014 by Jelte Fennema. :license: MIT, see License for more details. """ from .document import Document from .math import Math, VectorName, Matrix from .package import Package from .section import Section, Subsection, Subsubsection from .table import Table, MultiColumn, MultiRow, Tabular, Tabu, LongTable, \ LongTabu from .pgfplots import TikZ, Axis, Plot from .graphics import Figure, SubFigure, MatplotlibFigure from .lists import Enumerate, Itemize, Description from .quantities import Quantity from .base_classes import Command
Add Tabu, LongTable and LongTabu global import
Add Tabu, LongTable and LongTabu global import
Python
mit
sebastianhaas/PyLaTeX,sebastianhaas/PyLaTeX,votti/PyLaTeX,ovaskevich/PyLaTeX,JelteF/PyLaTeX,bjodah/PyLaTeX,votti/PyLaTeX,jendas1/PyLaTeX,bjodah/PyLaTeX,jendas1/PyLaTeX,JelteF/PyLaTeX,ovaskevich/PyLaTeX
117e4f59720de9d13ddb4eaa439915addb616f1d
tests/cli/test_pinout.py
tests/cli/test_pinout.py
from __future__ import ( unicode_literals, absolute_import, print_function, division, ) str = type('') import pytest import gpiozero.cli.pinout as pinout def test_args_incorrect(): with pytest.raises(SystemExit) as ex: pinout.parse_args(['--nonexistentarg']) assert ex.value.code == 2 def test_args_color(): args = pinout.parse_args([]) assert args.color is None args = pinout.parse_args(['--color']) assert args.color is True args = pinout.parse_args(['--monochrome']) assert args.color is False def test_args_revision(): args = pinout.parse_args(['--revision', '000d']) assert args.revision == '000d' def test_help(capsys): with pytest.raises(SystemExit) as ex: pinout.parse_args(['--help']) out, err = capsys.readouterr() assert 'GPIO pinout' in out assert ex.value.code == 0
from __future__ import ( unicode_literals, absolute_import, print_function, division, ) str = type('') import pytest from gpiozero.cli import pinout def test_args_incorrect(): with pytest.raises(SystemExit) as ex: pinout.parse_args(['--nonexistentarg']) assert ex.value.code == 2 def test_args_color(): args = pinout.parse_args([]) assert args.color is None args = pinout.parse_args(['--color']) assert args.color is True args = pinout.parse_args(['--monochrome']) assert args.color is False def test_args_revision(): args = pinout.parse_args(['--revision', '000d']) assert args.revision == '000d' def test_help(capsys): with pytest.raises(SystemExit) as ex: pinout.parse_args(['--help']) out, err = capsys.readouterr() assert 'GPIO pinout' in out assert ex.value.code == 0
Use from to import rather than rename
Use from to import rather than rename
Python
bsd-3-clause
waveform80/gpio-zero,MrHarcombe/python-gpiozero,RPi-Distro/python-gpiozero
d814c9c131f2c2957173302f7c4c1cbf2b719b45
check_rfc_header.py
check_rfc_header.py
#!/usr/bin/env python # -*- encoding: utf-8 import os from travistooling import ROOT def get_rfc_readmes(repo): rfcs_dir = os.path.join(repo, 'docs', 'rfcs') for root, _, filenames in os.walk(rfcs_dir): for f in filenames: if f == 'README.md': yield os.path.join(root, f) print('*** Checking RFC headers') for f in get_rfc_readmes(ROOT): print('*** Checking header for %s' % os.path.relpath(f, start=ROOT)) filename = os.path.basename(os.path.dirname(f)) number, name = filename.split('-', 1) contents = open(f).read() header = contents.splitlines()[:3] assert header[0].startswith('# RFC %03d: ' % int(number)) assert header[1] == '' print(f, name) print(header)
#!/usr/bin/env python # -*- encoding: utf-8 import datetime as dt import os from travistooling import git, ROOT def get_rfc_readmes(repo): rfcs_dir = os.path.join(repo, 'docs', 'rfcs') for root, _, filenames in os.walk(rfcs_dir): for f in filenames: if f == 'README.md': yield os.path.join(root, f) if __name__ == '__main__': print('*** Checking RFC headers') for f in get_rfc_readmes(ROOT): print('*** Checking header for %s' % os.path.relpath(f, start=ROOT)) filename = os.path.basename(os.path.dirname(f)) number, name = filename.split('-', 1) contents = open(f).read() header = contents.splitlines()[:3] update_timestamp = git('log', '-1', '--format=%ct', f) last_updated = dt.datetime.fromtimestamp(int(update_timestamp)) assert header[0].startswith('# RFC %03d: ' % int(number)) assert header[1] == '' expected_date_str = '**Last updated: %s.**' % last_updated.strftime('%d %B %Y') assert header[2] == expected_date_str, (header[2], expected_date_str)
Check update dates in the RFC headers
Check update dates in the RFC headers
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
d29410b39af1165ba520e7ecad7e6e9c36a7fd2f
test/test_basic.py
test/test_basic.py
#!/usr/bin/env python3 #coding=UTF-8 import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker as Key Key = Key() def test_google_average(): result = searchcolor.google_average('Death', 10, Key.api(), Key.cse()) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255
#!/usr/bin/env python3 #coding=UTF-8 import os import sys #installed import pytest #local sys.path.append(os.path.split(os.path.split(__file__)[0])[0]) import searchcolor from api_keys import GoogleKeyLocker from api_keys import BingKeyLocker from api_keys import MSCSKeyLocker GKL = GoogleKeyLocker() BKL = BingKeyLocker() MSCSKL = MSCSKeyLocker() def test_google_average(): result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_bing_average(): result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255 def test_mscs_average(): result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8) assert result.get('name') == 'Death' assert result.get('red') >= 0 and result.get('red') <= 255 assert result.get('green') >= 0 and result.get('green') <= 255 assert result.get('blue') >= 0 and result.get('blue') <= 255
Add tests for bing and mscs
Add tests for bing and mscs
Python
mit
Tathorack/searchcolor,Tathorack/searchcolor
3751daef539d26b6909f142949293c20da3f8fe5
test/test_sleep.py
test/test_sleep.py
""" Tests for POSIX-compatible `sleep`. https://pubs.opengroup.org/onlinepubs/9699919799/utilities/sleep.html """ import time from helpers import check_version, run def test_version(): """Check that we're using Boreutil's implementation.""" assert check_version("sleep") def test_missing_args(): """No args => error of the form `sleep: ...`""" assert run(["sleep"]).stderr.startswith("sleep:") assert run(["sleep"]).returncode > 0 def test_extra_args(): """Too many args => error of the form `sleep: ...`""" assert run(["sleep", "a", "b"]).stderr.startswith("sleep:") assert run(["sleep", "a", "b"]).returncode > 0 def test_help(): """Passing -h or --help should print help text.""" assert run(["sleep", "-h"]).stdout.split(' ')[0] == 'Usage:' assert run(["sleep", "--help"]).stdout.split(' ')[0] == 'Usage:' assert run(["sleep", "-h"]).returncode > 0 assert run(["sleep", "--help"]).returncode > 0 def test_main(): """Running `sleep 1` should run successfully.""" start = time.time() ret = run(["sleep", "1"]) end = time.time() assert len(ret.stdout) == 0 assert len(ret.stderr) == 0 assert ret.returncode == 0 assert (end - start) >= 2.0
""" Tests for POSIX-compatible `sleep`. https://pubs.opengroup.org/onlinepubs/9699919799/utilities/sleep.html """ import time from helpers import check_version, run def test_version(): """Check that we're using Boreutil's implementation.""" assert check_version("sleep") def test_missing_args(): """No args => error of the form `sleep: ...`""" assert run(["sleep"]).stderr.startswith("sleep:") assert run(["sleep"]).returncode > 0 def test_extra_args(): """Too many args => error of the form `sleep: ...`""" assert run(["sleep", "a", "b"]).stderr.startswith("sleep:") assert run(["sleep", "a", "b"]).returncode > 0 def test_help(): """Passing -h or --help should print help text.""" assert run(["sleep", "-h"]).stdout.split(' ')[0] == 'Usage:' assert run(["sleep", "--help"]).stdout.split(' ')[0] == 'Usage:' assert run(["sleep", "-h"]).returncode > 0 assert run(["sleep", "--help"]).returncode > 0 def test_main(): """Running `sleep 1` should run successfully.""" start = time.time() ret = run(["sleep", "1"]) end = time.time() assert len(ret.stdout) == 0 assert len(ret.stderr) == 0 assert ret.returncode == 0 assert (end - start) >= 1.0
Fix `sleep` test. How did this pass locally before?!
Fix `sleep` test. How did this pass locally before?!
Python
isc
duckinator/boreutils,duckinator/boreutils
01f3aaf8c0b2351ea41b854142263f2d52c03239
comics/comics/perrybiblefellowship.py
comics/comics/perrybiblefellowship.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2019-06-12" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): page = self.parse_page(entry.link) images = page.root.xpath("//div[@id='comic']/img") crawler_images = [] for image in images: title = entry.title crawler_images.append(CrawlerImage(image.get("src"), title))
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2019-06-12" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): page = self.parse_page(entry.link) images = page.src("div#comic img", allow_multiple=True) crawler_images = [] for image in images: title = entry.title crawler_images.append(CrawlerImage(image, title)) return crawler_images
Use CSS selector instead of xpath for "The Perry Bible Fellowship"
Use CSS selector instead of xpath for "The Perry Bible Fellowship"
Python
agpl-3.0
jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics
cde63b076027345486e4e836a02811962ad5bcaa
tests/test_completion.py
tests/test_completion.py
import os import subprocess import sys from pathlib import Path import typer from typer.testing import CliRunner from first_steps import tutorial001 as mod runner = CliRunner() app = typer.Typer() app.command()(mod.main) def test_show_completion(): result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --show-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout def test_install_completion(): bash_completion_path: Path = Path.home() / ".bash_completion" text = bash_completion_path.read_text() result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --install-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) new_text = bash_completion_path.read_text() assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text bash_completion_path.write_text(text)
import os import subprocess import sys from pathlib import Path import typer from typer.testing import CliRunner from first_steps import tutorial001 as mod runner = CliRunner() app = typer.Typer() app.command()(mod.main) def test_show_completion(): result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --show-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout def test_install_completion(): bash_completion_path: Path = Path.home() / ".bash_completion" text = "" if bash_completion_path.is_file(): text = bash_completion_path.read_text() result = subprocess.run( [ "bash", "-c", f"{sys.executable} -m coverage run {mod.__file__} --install-completion", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", env={**os.environ, "SHELL": "/bin/bash"}, ) new_text = bash_completion_path.read_text() assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text bash_completion_path.write_text(text)
Fix test completion, check for bash completion file before running
:bug: Fix test completion, check for bash completion file before running
Python
mit
tiangolo/typer,tiangolo/typer
3ab0e72d5e4031bb07c6ce2e8e9e71db07b55f24
tests/test_funcmakers.py
tests/test_funcmakers.py
from collections import defaultdict import pytest from funcy.funcmakers import * def test_callable(): assert make_func(lambda x: x + 42)(0) == 42 def test_int(): assert make_func(0)('abc') == 'a' assert make_func(2)([1,2,3]) == 3 assert make_func(1)({1: 'a'}) == 'a' with pytest.raises(IndexError): make_func(1)('a') with pytest.raises(TypeError): make_func(1)(42) def test_slice(): assert make_func(slice(1, None))('abc') == 'bc' def test_str(): assert make_func('\d+')('ab42c') == '42' assert make_func('\d+')('abc') is None assert make_pred('\d+')('ab42c') is True assert make_pred('\d+')('abc') is False def test_dict(): assert make_func({1: 'a'})(1) == 'a' with pytest.raises(KeyError): make_func({1: 'a'})(2) d = defaultdict(int, a=42) assert make_func(d)('a') == 42 assert make_func(d)('b') == 0 def test_set(): s = set([1,2,3]) assert make_func(s)(1) is True assert make_func(s)(4) is False
from collections import defaultdict import pytest from funcy.funcmakers import * def test_callable(): assert make_func(lambda x: x + 42)(0) == 42 def test_int(): assert make_func(0)('abc') == 'a' assert make_func(2)([1,2,3]) == 3 assert make_func(1)({1: 'a'}) == 'a' with pytest.raises(IndexError): make_func(1)('a') with pytest.raises(TypeError): make_func(1)(42) def test_slice(): assert make_func(slice(1, None))('abc') == 'bc' def test_str(): assert make_func(r'\d+')('ab42c') == '42' assert make_func(r'\d+')('abc') is None assert make_pred(r'\d+')('ab42c') is True assert make_pred(r'\d+')('abc') is False def test_dict(): assert make_func({1: 'a'})(1) == 'a' with pytest.raises(KeyError): make_func({1: 'a'})(2) d = defaultdict(int, a=42) assert make_func(d)('a') == 42 assert make_func(d)('b') == 0 def test_set(): s = set([1,2,3]) assert make_func(s)(1) is True assert make_func(s)(4) is False
Fix tests in Python 3.6
Fix tests in Python 3.6
Python
bsd-3-clause
Suor/funcy
c37500894b309a691009b87b1305935ee57648cb
tests/test_test.py
tests/test_test.py
import pytest from web_test_base import * """ A class to test new features without running all of the tests. Usage: py.test tests/test_test.py -rsx """ class TestTest(WebTestBase): urls_to_get = [ "http://aidtransparency.net/" ] text_to_find = [ ("information", '//*[@id="home-strapline"]/h1') ] def test_locate_text(self, loaded_request, text_to_find): """ Tests that each page contains lthe specified text at the required location. """ result = self._get_text_from_xpath(loaded_request, text_to_find[1]) assert self._substring_in_list(text_to_find[0], result)
import pytest from web_test_base import * """ A class to test new features without running all of the tests. Usage: py.test tests/test_test.py -rsx """ class TestTest(WebTestBase): urls_to_get = [ "http://iatistandard.org/" , "http://iatistandard.org/202/namespaces-extensions/" ] text_to_find = [ ("technical publishing framework", '//*[@id="home-strapline"]/h1') ] def test_locate_text(self, loaded_request, text_to_find): """ Tests that each page contains lthe specified text at the required location. """ result = self._get_text_from_xpath(loaded_request, text_to_find[1]) assert self._substring_in_list(text_to_find[0], result)
Add test text finding that fails
Add test text finding that fails This indicates that a different method of specifying how and where to find text within a document is required.
Python
mit
IATI/IATI-Website-Tests
dd9dfa86fe0f7cb8d95b580ff9ae62753fb19026
gefion/checks/base.py
gefion/checks/base.py
# -*- coding: utf-8 -*- """Base classes.""" import time class Result(object): """Provides results of a Check. Attributes: availability (bool): Availability, usually reflects outcome of a check. runtime (float): Time consumed running the check, in seconds. message (string): Additional explainations for the result. timestamp (int): UTC timestamp of the check. """ def __init__(self, availability, runtime, message, timestamp=time.time()): """Initialise Result. Args: See class attributes. """ self.availability = availability self.runtime = runtime self.message = message self.timestamp = timestamp @property def api_serialised(self): """Return serialisable data for API monitor assignments.""" return {'availability': self.availability, 'runtime': self.runtime, 'message': self.message, 'timestamp': self.timestamp} class Check(object): """Performs checks for availability of resources. This should be inherited by checking implementations. """ def __init__(self, **kwargs): """Initialise Check.""" pass def check(self): """Check if specified resource is availability. Called without arguments. Returns: gefion.checkers.Result """ raise NotImplementedError
# -*- coding: utf-8 -*- """Base classes.""" import time class Result(object): """Provides results of a Check. Attributes: availability (bool): Availability, usually reflects outcome of a check. runtime (float): Time consumed running the check, in seconds. message (string): Additional explainations for the result. timestamp (int): UTC timestamp of the check. """ def __init__(self, availability, runtime, message, timestamp=time.time()): """Initialise Result. Args: See class attributes. """ self.availability = availability self.runtime = runtime self.message = message self.timestamp = timestamp @property def api_serialised(self): """Return serialisable data for API result submissions.""" return {'availability': self.availability, 'runtime': self.runtime, 'message': self.message, 'timestamp': self.timestamp} class Check(object): """Performs checks for availability of resources. This should be inherited by checking implementations. """ def __init__(self, **kwargs): """Initialise Check.""" pass def check(self): """Check if specified resource is availabile. Called without arguments. Returns: gefion.checkers.Result """ raise NotImplementedError
Fix typos in Result and Check docstrings
Fix typos in Result and Check docstrings
Python
bsd-3-clause
dargasea/gefion
0720397d5c47e2af33dbe9fdf8f25b95ce620106
octavia/common/base_taskflow.py
octavia/common/base_taskflow.py
# Copyright 2014-2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import concurrent.futures from oslo_config import cfg from taskflow import engines as tf_engines CONF = cfg.CONF class BaseTaskFlowEngine(object): """This is the task flow engine Use this engine to start/load flows in the code """ def __init__(self): self.executor = concurrent.futures.ThreadPoolExecutor( max_workers=CONF.task_flow.max_workers) def _taskflow_load(self, flow, **kwargs): eng = tf_engines.load( flow, engine=CONF.task_flow.engine, executor=self.executor, never_resolve=CONF.task_flow.disable_revert, **kwargs) eng.compile() eng.prepare() return eng
# Copyright 2014-2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import concurrent.futures import datetime from oslo_config import cfg from taskflow import engines as tf_engines CONF = cfg.CONF class BaseTaskFlowEngine(object): """This is the task flow engine Use this engine to start/load flows in the code """ def __init__(self): # work around for https://bugs.python.org/issue7980 datetime.datetime.strptime('2014-06-19 22:47:16', '%Y-%m-%d %H:%M:%S') self.executor = concurrent.futures.ThreadPoolExecutor( max_workers=CONF.task_flow.max_workers) def _taskflow_load(self, flow, **kwargs): eng = tf_engines.load( flow, engine=CONF.task_flow.engine, executor=self.executor, never_resolve=CONF.task_flow.disable_revert, **kwargs) eng.compile() eng.prepare() return eng
Work around strptime threading issue
Work around strptime threading issue There is an open bug[1] in python strptime when used in multi-threaded applications. We have seen this occur in the Octavia test jobs[2]. This patch works around the bug by loading strptime early. [1] https://bugs.python.org/issue7980 [2] https://logs.opendev.org/37/673337/12/check/ \ octavia-v2-act-stdby-iptables-dsvm-py2-scenario/440c965/controller/logs \ /screen-o-cw.txt.gz?level=ERROR#_Aug_09_23_54_29_426364 Change-Id: I932ad625595333e97b0ead074ce64a7341af338d
Python
apache-2.0
openstack/octavia,openstack/octavia,openstack/octavia
2b9d702b6efd922069ceb44540b1ea7118e3f84b
gensysinfo.py
gensysinfo.py
#!/usr/bin/env python3 import psutil import os import time import math blocks = ['▁', 'β–‚', 'β–ƒ', 'β–„', 'β–…', 'β–†', 'β–‡', 'β–ˆ'] def create_bar(filled): if filled > 1: low = str(int(filled)) high = str(int(filled + 1)) filled = filled - int(filled) filled = int(filled * 100) if filled < 50: color = "green" elif filled < 80: color = "yellow" else: color = "red" block = math.floor(filled / (100 / 7) + 0.5) bar = '#[fg=' + color + ']β–•' bar += blocks[block] bar += '▏' if filled >= 100: bar += str(filled) else: bar += "{0:2}%".format(filled) bar += '#[fg=default]' return bar while True: meminfo = psutil.virtual_memory() numcpus = psutil.cpu_count() with open(os.path.expanduser("~/.memblock"), "w") as memblock: memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total)) with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock: cpuutilblock.write(create_bar(psutil.cpu_percent() / 100)) time.sleep(20)
#!/usr/bin/env python3 import psutil import os import time import math blocks = ['▁', 'β–‚', 'β–ƒ', 'β–„', 'β–…', 'β–†', 'β–‡', 'β–ˆ'] def create_bar(filled): filled = int(filled * 100) if filled < 50: color = "green" elif filled < 80: color = "yellow" else: color = "red" bar = '#[fg=' + color + ']β–•' if filled < 100: block = math.floor(filled / (100 / 7) + 0.5) bar += blocks[block] else: bar += blocks[7] bar += '▏' if filled >= 100: bar += str(filled) else: bar += "{0:2}%".format(filled) bar += '#[fg=default]' return bar while True: meminfo = psutil.virtual_memory() numcpus = psutil.cpu_count() with open(os.path.expanduser("~/.memblock"), "w") as memblock: memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total)) with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock: cpuutilblock.write(create_bar(psutil.cpu_percent() / 100)) time.sleep(20)
Allow over 100 again for when load becomes available
Allow over 100 again for when load becomes available
Python
mit
wilfriedvanasten/miscvar,wilfriedvanasten/miscvar,wilfriedvanasten/miscvar
93380d1574438f4e70145e0bbcde4c3331ef5fd3
massa/domain.py
massa/domain.py
# -*- coding: utf-8 -*- from sqlalchemy import ( Column, Date, Integer, MetaData, Numeric, String, Table, ) def define_tables(metadata): Table('measurement', metadata, Column('id', Integer, primary_key=True), Column('weight', Numeric(4, 1), nullable=False), Column('code', String(25), nullable=False), Column('note', String(140), nullable=True), Column('date_measured', Date(), nullable=False), ) class Db(object): def __init__(self, engine): self._meta = MetaData(engine) define_tables(self._meta) def make_tables(self): self._meta.create_all() def drop_tables(self): self._meta.drop_all() @property def measurement(self): return self._meta.tables['measurement'] class MeasurementService(object): def __init__(self, table): self._table = table def create(self, **kwargs): i = self._table.insert() i.execute(**kwargs)
# -*- coding: utf-8 -*- from sqlalchemy import ( Column, Date, Integer, MetaData, Numeric, String, Table, ) def define_tables(metadata): Table('measurement', metadata, Column('id', Integer, primary_key=True), Column('weight', Numeric(4, 1), nullable=False), Column('code', String(25), nullable=False), Column('note', String(140), nullable=True), Column('date_measured', Date(), nullable=False), ) class Db(object): def __init__(self, engine): self._meta = MetaData(engine) define_tables(self._meta) def make_tables(self): self._meta.create_all() def drop_tables(self): self._meta.drop_all() @property def measurement(self): return self._meta.tables['measurement'] class MeasurementService(object): def __init__(self, table): self._table = table def find_all(self): s = self._table.select() return s.execute() def create(self, **kwargs): i = self._table.insert() i.execute(**kwargs)
Add a method to find all measurements.
Add a method to find all measurements.
Python
mit
jaapverloop/massa
71f062a6db2a87fba57353f5a11ec2e63620a7dd
ctf-app.py
ctf-app.py
from flask import Flask, render_template, request app = Flask(__name__) @app.route('/', methods=['GET']) def home(): return render_template('home.html') if __name__ == '__main__': app.run(debug=True)
from flask import Flask, render_template, request app = Flask(__name__) @app.route('/') def home(): return render_template('home.html') @app.route('/login') def join_team(): return render_template('join.html') @app.route('/submit') def submit_flag(): return render_template('submit.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/contact') def contact(): return render_template('contact.html') if __name__ == '__main__': app.run(debug=True)
Add a bunch of placeholder routes
Add a bunch of placeholder routes
Python
mit
WhiteHatCP/wrath-ctf-framework,DeltaHeavy/wrath-ctf-framework,DeltaHeavy/wrath-ctf-framework,WhiteHatCP/wrath-ctf-framework,DeltaHeavy/wrath-ctf-framework,WhiteHatCP/wrath-ctf-framework,WhiteHatCP/wrath-ctf-framework,DeltaHeavy/wrath-ctf-framework