commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
7175712bd646a126747ec8ca7c7ce3cfe41bd007
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Dapi',
version='1.0',
description='DevAssistant Package Index',
author='Miro Hroncok',
author_email='[email protected]',
url='https://github.com/hroncok/dapi',
license='AGPLv3',
install_requires=[
'Django==1.6',
'psycopg2',
'South',
'daploader>=0.0.5',
'PyYAML',
'python-social-auth',
'django-taggit',
'django-simple-captcha',
'django-haystack',
'whoosh',
'djangorestframework',
'django-gravatar2',
'markdown2',
'Markdown',
],
dependency_links = [
'git+git://github.com/omab/python-social-auth.git@c5dd3339#egg=python-social-auth',
'git+git://github.com/tomchristie/django-rest-framework.git@03b4c60b#egg=djangorestframework',
'https://pypi.python.org/packages/source/d/daploader/daploader-0.0.5.tar.gz'
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Dapi',
version='1.0',
description='DevAssistant Package Index',
author='Miro Hroncok',
author_email='[email protected]',
url='https://github.com/hroncok/dapi',
license='AGPLv3',
install_requires=[
'Django==1.6',
'psycopg2',
'South',
'daploader>=0.0.5',
'PyYAML',
'python-social-auth',
'django-taggit',
'django-simple-captcha',
'django-haystack',
'whoosh',
'djangorestframework',
'django-gravatar2',
'markdown2',
'Markdown',
],
dependency_links = [
'git+git://github.com/omab/python-social-auth.git@c5dd3339#egg=python-social-auth',
'git+git://github.com/tomchristie/django-rest-framework.git@03b4c60b#egg=djangorestframework',
]
)
|
Remove (no longer needed) dependency link for daploader
|
Remove (no longer needed) dependency link for daploader
|
Python
|
agpl-3.0
|
devassistant/dapi,devassistant/dapi,devassistant/dapi
|
3e7624aeeff08eb03935b29286a754de59ea318d
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages, setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def requirements(fname):
return [line.strip()
for line in open(os.path.join(os.path.dirname(__file__), fname))]
setup(
name='slacklogger',
version='0.2.0',
author='Tatsuya NAKAMURA',
author_email='[email protected]',
description='A tool for logging messages on your Slack term.',
license='MIT',
url='https://github.com/nkmrtty/SlackLogger/',
packages=find_packages(),
keywords=['slack', 'logging', 'api'],
install_requires=['slackclient']
)
|
import os
from setuptools import find_packages, setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def requirements(fname):
return [line.strip()
for line in open(os.path.join(os.path.dirname(__file__), fname))]
setup(
name='slacklog',
version='0.2.0',
author='Tatsuya NAKAMURA',
author_email='[email protected]',
description='A tool for logging messages on your Slack term.',
license='MIT',
url='https://github.com/nkmrtty/slacklog/',
packages=find_packages(),
keywords=['slack', 'logging', 'api'],
install_requires=['slackclient']
)
|
Modify the name of package
|
Modify the name of package
|
Python
|
mit
|
nkmrtty/SlackLogger
|
f9f65019230f8c694a61053d571093cd87db4950
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'gdaxcli',
packages = ['gdaxcli'],
version = '0.1',
description = 'Commandline client for trading on GDAX',
author = 'Son Pham',
author_email = '[email protected]',
url = 'https://github.com/sonph/gdaxcli',
download_url = 'https://github.com/sonph/gdaxcli/archive/0.1.0.zip',
keywords = ['gdax', 'cli', 'tool', 'client', 'cryptocurrency', 'bitcoin',
'ethereum', 'trading'],
classifiers = [],
)
# To upload to PyPI:
# python setup.py register -r pypi
# python setup.py sdist upload -r pypi
|
from distutils.core import setup
setup(
name = 'gdaxcli',
packages = ['gdaxcli'],
version = '0.1',
description = 'Commandline client for trading on GDAX',
author = 'Son Pham',
author_email = '[email protected]',
url = 'https://github.com/sonph/gdaxcli',
download_url = 'https://github.com/sonph/gdaxcli/archive/0.1.0.zip',
keywords = ['gdax', 'cli', 'tool', 'client', 'cryptocurrency', 'bitcoin',
'ethereum', 'trading'],
classifiers = [],
)
# To upload to testpypi.python.org:
# python setup.py register -r pypitest
# python setup.py sdist upload -r pypitest
# To upload to pypi.python.org:
# python setup.py register -r pypi
# python setup.py sdist upload -r pypi
|
Add instructions for uploading to pypitest
|
Add instructions for uploading to pypitest
|
Python
|
mit
|
sonph/gdaxcli,sonph/gdaxcli
|
6a4fa01005d1023e79bc5f214a14285727bb096b
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
import setuptools
import unittest
def discover_tests():
test_loader = unittest.TestLoader()
return test_loader.discover('.', pattern='*_test.py')
if __name__ == '__main__':
setuptools.setup(
name='mesonwrap',
version='0.2.0',
author='The Meson development team',
license='Apache 2',
url='https://github.com/mesonbuild/mesonwrap',
packages=[
'mesonwrap',
'mesonwrap.tools',
'wrapweb',
],
include_package_data=True,
install_requires=[
'Flask',
'GitPython',
'PyGithub',
'cachetools',
'retrying',
'requests',
'requests-ftp',
],
entry_points={
'console_scripts': [
'mesonwrap=mesonwrap.cli:Command',
],
},
test_suite='setup.discover_tests',
)
|
#!/usr/bin/env python3
import setuptools
import unittest
def discover_tests():
test_loader = unittest.TestLoader()
return test_loader.discover('.', pattern='*_test.py')
if __name__ == '__main__':
setuptools.setup(
name='mesonwrap',
version='0.2.0',
author='The Meson development team',
license='Apache 2',
url='https://github.com/mesonbuild/mesonwrap',
packages=[
'mesonwrap',
'mesonwrap.tools',
'wrapweb',
],
package_data={
'wrapweb': ['templates/*.html'],
},
install_requires=[
'Flask',
'GitPython',
'PyGithub',
'cachetools',
'retrying',
'requests',
'requests-ftp',
],
entry_points={
'console_scripts': [
'mesonwrap=mesonwrap.cli:Command',
],
},
test_suite='setup.discover_tests',
)
|
Revert "Use include_package_data=True instead of manual package_data specification"
|
Revert "Use include_package_data=True instead of manual package_data specification"
This reverts commit e8458e83b7929340c73ae0308af28b5c8d18faac.
|
Python
|
apache-2.0
|
mesonbuild/wrapweb,mesonbuild/wrapweb,mesonbuild/wrapweb
|
be85f552edf4eeeae5f4f2e53db0c03aa037970d
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='utf9',
version='0.2',
description='Encode and decode text with UTF-9 (IEEE RFC4042)',
install_requires=['bitarray'],
url='http://github.com/enricobacis/utf9',
author='Enrico Bacis',
author_email='[email protected]',
license='MIT',
packages=['utf9'],
keywords='utf9 encode decode rfc4042')
|
from setuptools import setup
with open('README.rst') as README:
long_description = README.read()
long_description[long_description.index('Description'):]
setup(name='utf9',
version='0.3',
description='Encode and decode text using UTF-9.',
long_description=long_description,
install_requires=['bitarray'],
url='http://github.com/enricobacis/utf9',
author='Enrico Bacis',
author_email='[email protected]',
license='MIT',
packages=['utf9'],
keywords='utf9 encode decode rfc4042')
|
Add long_description and bump to version 0.3
|
Add long_description and bump to version 0.3
|
Python
|
mit
|
enricobacis/utf9
|
8ab6e70e8c04936ee7c6608a97014b06856e7a2f
|
setup.py
|
setup.py
|
from setuptools import Extension, find_packages, setup
arpreq = Extension('arpreq', sources=['arpreq/arpreq.c'],
extra_compile_args=['-std=c99'])
setup(name='arpreq',
version='0.1.0',
description="Query the Kernel ARP cache for the MAC address "
"corresponding to IP address",
packages=find_packages(exclude=["*.tests"]),
ext_modules=[arpreq],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: System Administrators',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Networking',
],
)
|
from setuptools import Extension, find_packages, setup
arpreq = Extension('arpreq', sources=['arpreq/arpreq.c'],
extra_compile_args=['-std=c99'])
setup(name='arpreq',
author='Sebastian Schrader',
author_email='[email protected]',
url='https://github.com/sebschrader/python-arpreq',
version='0.1.0',
description="Query the Kernel ARP cache for the MAC address "
"corresponding to IP address",
packages=find_packages(exclude=["*.tests"]),
ext_modules=[arpreq],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: System Administrators',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Networking',
],
)
|
Add author and home page information
|
Add author and home page information
|
Python
|
mit
|
sebschrader/python-arpreq,sebschrader/python-arpreq,sebschrader/python-arpreq,sebschrader/python-arpreq
|
ae65aa843a2da11f4a237ce52de3b034826c40e8
|
setup.py
|
setup.py
|
#!/usr/bin/env python
VERSION = "0.5.1"
from setuptools import setup, find_packages
classifiers = [
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Topic :: Text Processing :: Linguistic'
]
GITHUB_URL = 'http://github.com/LuminosoInsight/luminoso-api-client-python'
setup(
name="luminoso-api",
version=VERSION,
maintainer='Luminoso Technologies, Inc.',
maintainer_email='[email protected]',
url=GITHUB_URL,
download_url='%s/tarball/v%s' % (GITHUB_URL, VERSION),
platforms=["any"],
description="Python client library for communicating with the Luminoso REST API",
classifiers=classifiers,
packages=find_packages(),
install_requires=[
'requests >= 1.2.1, < 3.0',
'ftfy >= 3.3, < 5',
],
entry_points={
'console_scripts': [
'lumi-upload = luminoso_api.upload:main',
'lumi-json-stream = luminoso_api.json_stream:main',
]},
)
|
#!/usr/bin/env python
VERSION = "0.5.1"
from setuptools import setup, find_packages
classifiers = [
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Topic :: Text Processing :: Linguistic'
]
GITHUB_URL = 'http://github.com/LuminosoInsight/luminoso-api-client-python'
setup(
name="luminoso-api",
version=VERSION,
maintainer='Luminoso Technologies, Inc.',
maintainer_email='[email protected]',
url=GITHUB_URL,
download_url='%s/tarball/v%s' % (GITHUB_URL, VERSION),
platforms=["any"],
description="Python client library for communicating with the Luminoso REST API",
classifiers=classifiers,
packages=find_packages(exclude=('tests',)),
install_requires=[
'requests >= 1.2.1, < 3.0',
'ftfy >= 3.3, < 5',
],
entry_points={
'console_scripts': [
'lumi-upload = luminoso_api.upload:main',
'lumi-json-stream = luminoso_api.json_stream:main',
]},
)
|
Exclude the tests directory when finding packages (for consistency and future-proofing).
|
Exclude the tests directory when finding packages (for consistency and future-proofing).
|
Python
|
mit
|
LuminosoInsight/luminoso-api-client-python
|
64ecd6694bc1aeeece09a4b3485f53aee02e5962
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='django-pagelets',
version=__import__('pagelets').__version__,
author='Caktus Consulting Group',
author_email='[email protected]',
packages=find_packages(exclude=['sample_project']),
include_package_data=True,
url='http://github.com/caktus/django-pagelets/',
license='BSD',
description='Simple, flexible app for integrating static, unstructured '
'content in a Django site',
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
)
|
from setuptools import setup, find_packages
setup(
name='django-pagelets',
version=__import__('pagelets').__version__,
author='Caktus Consulting Group',
author_email='[email protected]',
packages=find_packages(exclude=['sample_project']),
include_package_data=True,
url='http://github.com/caktus/django-pagelets/',
license='BSD',
description='Simple, flexible app for integrating static, unstructured '
'content in a Django site',
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
)
|
Add some Python version classifiers
|
Add some Python version classifiers
|
Python
|
bsd-3-clause
|
caktus/django-pagelets,caktus/django-pagelets,caktus/django-pagelets,caktus/django-pagelets
|
1e6bd140ebc2fbc4227af7859b3df47a6f876f95
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
version = '6.5.1'
extras_require = {
'images': [
'Pillow>=2.8.1,<2.9',
],
}
setup(
name='incuna-test-utils',
packages=find_packages(),
include_package_data=True,
version=version,
description='Custom TestCases and other test helpers for Django apps',
long_description=open('README.md').read(),
author='Incuna',
author_email='[email protected]',
url='https://github.com/incuna/incuna-test-utils/',
install_requires=[],
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Testing',
],
)
|
from setuptools import find_packages, setup
version = '6.5.1'
extras_require = {
'images': [
'Pillow>=2.8.1,<2.9',
],
}
setup(
name='incuna-test-utils',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
description='Custom TestCases and other test helpers for Django apps',
long_description=open('README.md').read(),
author='Incuna',
author_email='[email protected]',
url='https://github.com/incuna/incuna-test-utils/',
install_requires=[],
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Testing',
],
)
|
Exclude tests folder from package
|
Exclude tests folder from package
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
ac000dab8aff1a80f20cc525bf799f6a9893dee3
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from mamba import __version__
setup(name='mamba',
version=__version__,
description="The definitive testing tool for Python. Born under the banner of Behavior Driven Development.",
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'
],
keywords='',
author=u'Néstor Salceda',
author_email='[email protected]',
url='http://nestorsalceda.github.io/mamba',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from mamba import __version__
setup(name='mamba',
version=__version__,
description="The definitive testing tool for Python. Born under the banner of Behavior Driven Development.",
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'
],
keywords='',
author=u'Néstor Salceda',
author_email='[email protected]',
url='http://nestorsalceda.github.io/mamba',
license='MIT/X11',
packages=find_packages(exclude=['ez_setup', 'examples', 'spec', 'spec.*']),
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'mamba = mamba.cli:main'
]
})
|
Exclude 'spec' package from installation.
|
Exclude 'spec' package from installation.
This prevents some weird behavior when running
specs for another package because mamba will load
its own spec package since is first in the PYTHONPATH.
|
Python
|
mit
|
eferro/mamba,angelsanz/mamba,alejandrodob/mamba,nestorsalceda/mamba,dex4er/mamba,jaimegildesagredo/mamba,markng/mamba
|
14a706fe52d3bc316fc5d3a130824b7353e9efed
|
setup.py
|
setup.py
|
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
]
)
|
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
}
)
|
Add minidump and xbe backends to extras_require
|
Add minidump and xbe backends to extras_require
|
Python
|
bsd-2-clause
|
angr/cle
|
3b21b6e9edf19edc6569e383e708965a17c2ce0b
|
setup.py
|
setup.py
|
import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.0.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="[email protected]",
maintainer="xgfone",
maintainer_email="[email protected]",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
import os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
README_FILE = os.path.join(ROOT_DIR, "README.rst")
with open(README_FILE) as f:
long_description = f.read()
setup(
name="xutils",
version="1.1.0",
description="A Fragmentary Python Library, no any third-part dependencies.",
long_description=long_description,
author="xgfone",
author_email="[email protected]",
maintainer="xgfone",
maintainer_email="[email protected]",
url="https://github.com/xgfone/xutils",
packages=["xutils"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
Set the version to 1.1.0
|
Set the version to 1.1.0
|
Python
|
mit
|
xgfone/pycom,xgfone/xutils
|
e23fe0e2e0d4cd3ddc01e4292a3d757963979472
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2015 edX
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
"""Set up for xblock-utils"""
import os
import os.path
from setuptools import setup
setup(
name='xblock-utils',
version='0.1a0',
description='Various utilities for XBlocks',
packages=[
'xblockutils',
],
install_requires=[
'XBlock',
]
)
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2015 edX
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
"""Set up for xblock-utils"""
import os
import os.path
from setuptools import setup
def package_data(pkg, root_list):
"""Generic function to find package_data for `pkg` under `root`."""
data = []
for root in root_list:
for dirname, _, files in os.walk(os.path.join(pkg, root)):
for fname in files:
data.append(os.path.relpath(os.path.join(dirname, fname), pkg))
return {pkg: data}
setup(
name='xblock-utils',
version='0.1a0',
description='Various utilities for XBlocks',
packages=[
'xblockutils',
],
install_requires=[
'XBlock',
],
package_data=package_data("xblockutils", ["public", "templates"]),
)
|
Fix static data missing if not installed in dev mode
|
Fix static data missing if not installed in dev mode
|
Python
|
agpl-3.0
|
openfun/xblock-utils2,open-craft/xblock-utils,open-craft/xblock-utils,openfun/xblock-utils2,edx/xblock-utils,edx/xblock-utils,edx/xblock-utils,openfun/xblock-utils2,open-craft/xblock-utils
|
06820a8be62577448dbd5e77cb28ce51f9960869
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
#!/usr/bin/env python
import os
from glob import glob
from distutils.core import setup
setup(
name='whisper',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Fixed size round-robin style database',
py_modules=['whisper'],
scripts=glob('bin/*'),
)
|
Make whisper build a usable RPM via bdist_RPM
|
Make whisper build a usable RPM via bdist_RPM
|
Python
|
apache-2.0
|
cbowman0/whisper,jjneely/whisper,graphite-server/whisper,graphite-project/whisper,piotr1212/whisper,obfuscurity/whisper,deniszh/whisper,acdha/whisper,akbooer/whisper,penpen/whisper,kerlandsson/whisper,alexandreboisvert/whisper
|
5651b1a790d1c37162d328ece8673616f8d4bc98
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='fes',
version='1.0.0.dev1',
description='Future Event Service',
long_description=long_description,
url='https://github.com/erprice/fes',
author='Evan Price',
author_email='[email protected]',
license='Apache',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7'
],
keywords='hbase redis rest json',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['flask', 'hashlib', 'redis', 'ordereddict', 'requests', 'nose'],
package_data={
'hbase_schema': ['schema'],
'readme': ['README.md'],
'license': ['LICENSE.txt'],
'todo': ['TODO.txt'],
'gitignore': ['.gitignore']
},
)
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='fes',
version='1.0.0.dev1',
description='Future Event Service',
long_description=long_description,
url='https://github.com/erprice/fes',
author='Evan Price',
author_email='[email protected]',
license='Apache',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7'
],
keywords='hbase redis rest json',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['redis', 'ordereddict', 'requests', 'nose', 'flask'],
package_data={
'hbase_schema': ['schema'],
'readme': ['README.md'],
'license': ['LICENSE.txt'],
'todo': ['TODO.txt'],
'gitignore': ['.gitignore']
},
)
|
Remove double install hashlib module. It's a prereq of flask.
|
Remove double install hashlib module. It's a prereq of flask.
|
Python
|
apache-2.0
|
erprice/fes,erprice/fes
|
052765f6e5e9b95ad18d15931d05716b720ca73d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os, sys
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(
name="statprof",
version="0.1.2",
author="Bryan O'Sullivan",
author_email="[email protected]",
description="Statistical profiling for Python",
license=read('LICENSE'),
keywords="profiling",
url="http://packages.python.org/statprof",
py_modules=['statprof'],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
],
**extra
)
|
#!/usr/bin/env python
import os, sys
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(
name="statprof",
version="0.1.2",
author="Bryan O'Sullivan",
author_email="[email protected]",
description="Statistical profiling for Python",
license=read('LICENSE'),
keywords="profiling",
url="http://packages.python.org/statprof",
py_modules=['statprof'],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
**extra
)
|
Add classifiers for supported Python versions.
|
Add classifiers for supported Python versions.
|
Python
|
lgpl-2.1
|
smarkets/statprof,bos/statprof.py
|
542ab347d5a0195c88be212d966253d4385b8af5
|
api.py
|
api.py
|
import webapp2
from google.appengine.api import channel
from google.appengine.api import users
open_channels = set()
class ChannelDidConnect(webapp2.RequestHandler):
def post(self):
print "Got connection"
open_channels.add(self.request.get("from"))
class ChannelDisconnect(webapp2.RequestHandler):
def post(self):
print "Got disconnection"
channelId = self.request.get("from")
if channelId in open_channels:
open_channels.remove(channelId)
class ChannelRequest(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if not user:
self.response.write({"token": ""})
return
token = channel.create_channel(user.user_id())
self.response.write(
"{\"token\": \"%s\"}" % token
)
class Message(webapp2.RequestHandler):
def post(self):
# Only accept messages from logged in users
user = users.get_current_user()
if not user:
return
print open_channels
for channelId in open_channels:
channel.send_message(channelId,
"message=%s&author=%s" % (self.request.POST["message"],
self.request.POST["author"]))
app = webapp2.WSGIApplication([
('/api/channel', ChannelRequest),
('/api/message', Message),
('/_ah/channel/connected/', ChannelDidConnect),
('/_ah/channel/disconnected/', ChannelDisconnect),
])
|
import webapp2
from google.appengine.api import channel
from google.appengine.api import users
open_channels = set()
class ChannelDidConnect(webapp2.RequestHandler):
def post(self):
print "Got connection"
open_channels.add(self.request.get("from"))
class ChannelDisconnect(webapp2.RequestHandler):
def post(self):
print "Got disconnection"
channelId = self.request.get("from")
if channelId in open_channels:
open_channels.remove(channelId)
class ChannelRequest(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if not user:
self.response.write({"token": ""})
return
token = channel.create_channel(user.user_id())
self.response.write(
"{\"token\": \"%s\"}" % token
)
class Message(webapp2.RequestHandler):
def post(self):
self.handleRequest()
def get(self):
self.handleRequest()
def handleRequest(self):
print open_channels
for channelId in open_channels:
channel.send_message(channelId,
"message=%s&author=%s" % (self.request.params["message"],
self.request.params["author"]))
app = webapp2.WSGIApplication([
('/api/channel', ChannelRequest),
('/api/message', Message),
('/_ah/channel/connected/', ChannelDidConnect),
('/_ah/channel/disconnected/', ChannelDisconnect),
])
|
Allow messages to come in through GET
|
Allow messages to come in through GET
|
Python
|
mit
|
misterwilliam/gae-channels-sample,misterwilliam/gae-channels-sample,misterwilliam/gae-channels-sample
|
5a8ba813dc3dc40044f2fe9365dfde477edc4f6f
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='django-pager-duty',
description='PagerDuty Filtering and On Call Display',
author='Paul Traylor',
url='http://github.com/kfdm/django-pager-duty/',
version='0.2.0',
packages=['pagerduty'],
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='django-pager-duty',
description='PagerDuty Filtering and On Call Display',
author='Paul Traylor',
url='http://github.com/kfdm/django-pager-duty/',
version='0.2.1',
packages=['pagerduty'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
Fix package data and bump to 0.2.1
|
Fix package data and bump to 0.2.1
|
Python
|
mit
|
kfdm-archive/django-pager-duty
|
29bb9a9298136475138962bb63f0305e9221628f
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tablo',
description='A PostGIS table to feature service app for Django',
keywords='feature service, map server, postgis, django',
version='1.3.0',
packages=['tablo', 'tablo.migrations', 'tablo.interfaces', 'tablo.interfaces.arcgis'],
install_requires=[
'Django>=1.11.28,<2.0', 'sqlparse>=0.1.18', 'pyproj', 'six', 'pandas==0.24.*',
'django-tastypie==0.14.*', 'psycopg2', 'Pillow>=2.9.0', 'django-storages>=1.5.2',
'boto3>=1.4.4', 'sqlalchemy==1.3.*', 'geoalchemy2==0.6.*'
],
test_suite='tablo.tests.runtests.runtests',
tests_require=['django-nose', 'rednose'],
url='http://github.com/consbio/tablo',
license='BSD',
)
|
from setuptools import setup
setup(
name='tablo',
description='A PostGIS table to feature service app for Django',
keywords='feature service, map server, postgis, django',
version='1.3.0',
packages=['tablo', 'tablo.migrations', 'tablo.interfaces', 'tablo.interfaces.arcgis'],
install_requires=[
'Django>=1.11.29,<2.0', 'sqlparse>=0.1.18', 'pyproj', 'six', 'pandas==0.24.*',
'django-tastypie==0.14.*', 'psycopg2', 'Pillow>=2.9.0', 'django-storages>=1.5.2',
'boto3>=1.4.4', 'sqlalchemy==1.3.*', 'geoalchemy2==0.6.*'
],
test_suite='tablo.tests.runtests.runtests',
tests_require=['django-nose', 'rednose'],
url='http://github.com/consbio/tablo',
license='BSD',
)
|
Upgrade minimum Django version past security vulnerability
|
Upgrade minimum Django version past security vulnerability
|
Python
|
bsd-3-clause
|
consbio/tablo
|
8bc57a3a09709f8c12bfa98869674680483b162c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name="datafork",
version="0.0.1",
description="Forkable global state",
packages=['datafork'],
author="Martin Atkins",
author_email="[email protected]",
classifiers=[
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
]
)
|
from distutils.core import setup
setup(
name="datafork",
version="dev",
description="Forkable global state",
packages=['datafork'],
author="Martin Atkins",
author_email="[email protected]",
classifiers=[
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
]
)
|
Set the mainline version to "dev".
|
Set the mainline version to "dev".
Rather than constantly upping the version number in the master branch,
we'll just set it to "dev" and set version numbers only in tags so that
we can easily spot the difference between a build from master and an
actual release.
|
Python
|
mit
|
apparentlymart/python-datafork
|
c12b0e1c3253f17b71cbb4095c83c73d0f2ea188
|
setup.py
|
setup.py
|
"""
RTRSS
-----
RSS feeds for popular bittorrent tracker
"""
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
|
"""
RTRSS
-----
RSS feeds for popular bittorrent tracker
"""
from setuptools import setup
with open('reqs/production.txt') as f:
_requirements = f.read().splitlines()
setup(
name='rtrss',
version='0.3',
author='notapresent',
author_email='[email protected]',
url='https://github.com/notapresent/rtrss',
description='RSS feeds for popular bittorrent tracker',
long_description=__doc__,
license='Apache 2.0',
download_url='https://github.com/notapresent/rtrss/archive/master.zip',
install_requires=_requirements,
entry_points={
'console_scripts': [
'rtrssmgr = rtrss.worker:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Flask',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
]
)
|
Add rtrssmgr command entry point
|
Add rtrssmgr command entry point
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
c69b7b80c75e170f1b83d1204fb531544a9fc831
|
rpifake/__init__.py
|
rpifake/__init__.py
|
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
print('Warning, not in RPi, using mock GPIO')
# Idea taken from RPLCD who commented it as being from:
# reddit.com/r/Python/comments/5eddp5/mock_testing_rpigpio
import mock
from .gpio import Gpio as FakeGpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Do the test if we have RPi.GPIO or not
ON_RPI = True
try:
import RPi.GPIO
except ImportError:
ON_RPI = False
if not ON_RPI:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
|
# After this function, any futher calls to import RPi.GPIO
# will instead import .gpio.Gpio instead
def patch_fake_gpio():
import sys
import mock
from .gpio import Gpio as FakeGpio
MockRPi = mock.MagicMock()
MockRPi.GPIO = FakeGpio()
modules = {
'RPi': MockRPi,
'RPi.GPIO': MockRPi.GPIO,
}
sys.modules.update(modules)
is_active = True
# Do the test if we have RPi.GPIO or not
ON_RPI = True
try:
import RPi.GPIO
except ImportError:
ON_RPI = False
if not ON_RPI:
patch_fake_gpio()
# now that the patching is done, we can import RPLCD anywhere
|
Make override more global, not just within patch scope
|
Make override more global, not just within patch scope
|
Python
|
mit
|
rfarley3/lcd-restful,rfarley3/lcd-restful
|
9a2e56c5721fad4df75339feac6676897f40ac01
|
src/tpn/data_io.py
|
src/tpn/data_io.py
|
#!/usr/bin/env python
import zipfile
import cPickle
import numpy as np
"""
track_obj: {
frames: 1 by n numpy array,
anchors: 1 by n numpy array,
features: m by n numpy array,
scores: c by n numpy array,
boxes: 4 by n numpy array,
rois: 4 by n numpy array
}
"""
def save_track_proto_to_zip(track_proto, save_file):
zf = zipfile.ZipFile(save_file, 'w')
print "Writing to zip file {}...".format(save_file)
for track_id, track in enumerate(track_proto['tracks']):
track_obj = {}
for key in track[0]:
track_obj[key] = np.asarray([box[key] for box in track])
zf.writestr('{:06d}.pkl'.format(track_id),
cPickle.dumps(track_obj, cPickle.HIGHEST_PROTOCOL))
if (track_id + 1) % 1000 == 0:
print "\t{} tracks written.".format(track_id + 1)
print "\tTotally {} tracks written.".format(track_id + 1)
zf.close()
|
#!/usr/bin/env python
import zipfile
import cPickle
import numpy as np
"""
track_obj: {
frames: 1 by n numpy array,
anchors: 1 by n numpy array,
features: m by n numpy array,
scores: c by n numpy array,
boxes: 4 by n numpy array,
rois: 4 by n numpy array
}
"""
def save_track_proto_to_zip(track_proto, save_file):
zf = zipfile.ZipFile(save_file, 'w', allowZip64=True)
print "Writing to zip file {}...".format(save_file)
for track_id, track in enumerate(track_proto['tracks']):
track_obj = {}
for key in track[0]:
track_obj[key] = np.asarray([box[key] for box in track])
zf.writestr('{:06d}.pkl'.format(track_id),
cPickle.dumps(track_obj, cPickle.HIGHEST_PROTOCOL))
if (track_id + 1) % 1000 == 0:
print "\t{} tracks written.".format(track_id + 1)
print "\tTotally {} tracks written.".format(track_id + 1)
zf.close()
|
Enable zip64 to store tracks larger than 2GB.
|
Enable zip64 to store tracks larger than 2GB.
|
Python
|
mit
|
myfavouritekk/TPN
|
59d44ba76a9b2f98375fa2f893dabc0376de6f82
|
localeurl/models.py
|
localeurl/models.py
|
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs') or {}
locale = utils.supported_language(reverse_kwargs.pop(
'locale', translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
|
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from django.contrib.auth import views as auth_views
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs') or {}
locale = utils.supported_language(reverse_kwargs.pop(
'locale', translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
def redirect_to_login(next, login_url, *args, **kwargs):
if not login_url:
login_url = settings.LOGIN_URL
login_url = utils.locale_url(login_url, translation.get_language())
return django_redirect_to_login(next, login_url, *args, **kwargs)
django_redirect_to_login = None
def patch_redirect_to_login():
"""
Monkey-patches the redirect_to_login function. Will not patch twice.
"""
global django_redirect_to_login
if auth_views.redirect_to_login is not redirect_to_login:
django_redirect_to_login = auth_views.redirect_to_login
auth_views.redirect_to_login = redirect_to_login
if settings.USE_I18N:
patch_reverse()
patch_redirect_to_login()
|
Patch redirect_to_login to maintain locale
|
Patch redirect_to_login to maintain locale
Signed-off-by: Simon Luijk <[email protected]>
|
Python
|
mit
|
simonluijk/django-localeurl
|
7cd3cc1a6a023a093c9c6d8df4dd1b9d6c871faa
|
auto/src/rabird/auto/mouse_xdotool.py
|
auto/src/rabird/auto/mouse_xdotool.py
|
'''
@date 2015-02-21
@author Hong-She Liang <[email protected]>
'''
import os
import re
import subprocess
from .mouse_constant import *
class Mouse(object):
## return current mouse absolute position
@classmethod
def position(cls):
p = subprocess.Popen(["xdotool", "getmouselocation"], stdout=subprocess.PIPE)
output = p.communicate()[0]
matched = re.match(".*x:(\d+)\s*y:(\d+)\s*.*", output)
return [int(matched.group(1)), int(matched.group(2))]
@classmethod
def move(cls, position):
os.system("xdotool mousemove --sync %s %s" % (int(position[0]), int(position[1])))
##
@classmethod
def button_up(cls, button_type = ButtonType.LEFT ):
if ButtonType.LEFT == button_type:
os.system("xdotool mouseup 1")
elif ButtonType.RIGHT == button_type:
os.system("xdotool mouseup 3")
elif ButtonType.MIDDLE == button_type:
os.system("xdotool mouseup 2")
@classmethod
def button_down(cls, button_type = ButtonType.LEFT ):
if ButtonType.LEFT == button_type:
os.system("xdotool mousedown 1")
elif ButtonType.RIGHT == button_type:
os.system("xdotool mousedown 3")
elif ButtonType.MIDDLE == button_type:
os.system("xdotool mousedown 2")
|
'''
@date 2015-02-21
@author Hong-She Liang <[email protected]>
'''
import os
import re
import subprocess
from .mouse_constant import *
class Mouse(object):
## return current mouse absolute position
@classmethod
def position(cls):
output = subprocess.check_output(["xdotool", "getmouselocation"])
matched = re.match(".*x:(\d+)\s*y:(\d+)\s*.*", output)
return [int(matched.group(1)), int(matched.group(2))]
@classmethod
def move(cls, position):
subprocess.call(["xdotool", "mousemove", "--sync", position[0], position[1]])
##
@classmethod
def button_up(cls, button_type = ButtonType.LEFT ):
if ButtonType.LEFT == button_type:
subprocess.call(["xdotool", "mouseup", "1"])
elif ButtonType.RIGHT == button_type:
subprocess.call(["xdotool", "mouseup", "3"])
elif ButtonType.MIDDLE == button_type:
subprocess.call(["xdotool", "mouseup", "2"])
@classmethod
def button_down(cls, button_type = ButtonType.LEFT ):
if ButtonType.LEFT == button_type:
subprocess.call(["xdotool", "mousedown", "1"])
elif ButtonType.RIGHT == button_type:
subprocess.call(["xdotool", "mousedown", "3"])
elif ButtonType.MIDDLE == button_type:
subprocess.call(["xdotool", "mousedown", "2"])
|
Convert all os.system() to subprocess.check_output().
|
Convert all os.system() to subprocess.check_output().
|
Python
|
apache-2.0
|
starofrainnight/rabird.core,starofrainnight/rabird.auto
|
ad069a50ec7a4b4e6b1dac679e071279e128c824
|
been/source/markdowndirectory.py
|
been/source/markdowndirectory.py
|
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
from been.core import DirectorySource, source_registry
from hashlib import sha1
import re
import unicodedata
import time
import markdown
# slugify from Django source (BSD license)
def slugify(value):
value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
class MarkdownDirectory(DirectorySource):
kind = 'markdown'
def process_event(self, event):
md = markdown.Markdown(extensions=['meta'])
event['content'] = md.convert(event['raw'])
event['title'] = ' '.join(md.Meta.get('title', [event['filename']]))
event['author'] = ' '.join(md.Meta.get('author', ['']))
event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])]))
event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]]))
if md.Meta.get('published'):
# Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT)
event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S')))
event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest()
if time.gmtime() < event['timestamp']:
return None
else:
return event
source_registry.add(MarkdownDirectory)
|
Allow MarkdownDirectory events to specify author.
|
Allow MarkdownDirectory events to specify author.
|
Python
|
bsd-3-clause
|
chromakode/been
|
f286dc748670c67845df226ca7d400e745dd502f
|
awx/main/notifications/email_backend.py
|
awx/main/notifications/email_backend.py
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
return body_actual
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
return body_actual
|
Fix an issue with the email notifier
|
Fix an issue with the email notifier
Incorrect body format assumptions in the email notifier
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx
|
945e8a256e384ed441fc8f9ec4cf15743ecf5b22
|
snakeeyes/tests/conftest.py
|
snakeeyes/tests/conftest.py
|
import pytest
from snakeeyes.app import create_app
@pytest.yield_fixture(scope='session')
def app():
"""
Setup our flask test app, this only gets executed once.
:return: Flask app
"""
params = {
'DEBUG': False,
'TESTING': True,
'WTF_CSRF_ENABLED': False
}
_app = create_app(settings_override=params)
# Establish an application context before running the tests.
ctx = _app.app_context()
ctx.push()
yield _app
ctx.pop()
@pytest.yield_fixture(scope='function')
def client(app):
"""
Setup an app client, this gets executed for each test function.
:param app: Pytest fixture
:return: Flask app client
"""
yield app.test_client()
|
import pytest
from snakeeyes.app import create_app
@pytest.fixture(scope='session')
def app():
"""
Setup our flask test app, this only gets executed once.
:return: Flask app
"""
params = {
'DEBUG': False,
'TESTING': True,
'WTF_CSRF_ENABLED': False
}
_app = create_app(settings_override=params)
# Establish an application context before running the tests.
ctx = _app.app_context()
ctx.push()
yield _app
ctx.pop()
@pytest.fixture(scope='function')
def client(app):
"""
Setup an app client, this gets executed for each test function.
:param app: Pytest fixture
:return: Flask app client
"""
yield app.test_client()
|
Fix deprecration warnings for pytest
|
Fix deprecration warnings for pytest
|
Python
|
mit
|
nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask
|
ae1c6c020225423bcfaff5b3c830345467d87b2d
|
tedx/views.py
|
tedx/views.py
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from .forms import RegistrationForm
from .models import Registration
import utils
def handle_registration(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.save(commit=False)
# If user is logged-in, let's try sending a tweet!
if request.user.is_authenticated():
registration.user = request.user
utils.create_tweet(request.user)
registration.save()
return HttpResponseRedirect(reverse('tedx:thanks'))
else:
form = RegistrationForm()
context = {'form': form}
return render(request, 'tedx/index.html', context)
def list_registration(request):
list_registration = Registration.objects.all()
context = {'list_registration' : list_registration}
return render(request, 'tedx/list_registration.html', context)
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect
from django.shortcuts import render
from clubs.models import Team
from .forms import RegistrationForm
from .models import Registration
import utils
def handle_registration(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
registration = form.save(commit=False)
# If user is logged-in, let's try sending a tweet!
if request.user.is_authenticated():
registration.user = request.user
utils.create_tweet(request.user)
registration.save()
return HttpResponseRedirect(reverse('tedx:thanks'))
else:
form = RegistrationForm()
context = {'form': form}
return render(request, 'tedx/index.html', context)
@login_required
def list_registration(request):
tedx_team = Team.objects.get(code_name="tedx_2017_registration")
is_tedx_member = tedx_team.members.filter(pk=request.user.pk).exists() or\
tedx_team.coordinator == request.user
if not request.user.is_superuser and\
not is_tedx_member:
raise PermissionDenied
list_registration = Registration.objects.all()
context = {'list_registration' : list_registration}
return render(request, 'tedx/list_registration.html', context)
|
Add permission check for TEDx
|
Add permission check for TEDx
|
Python
|
agpl-3.0
|
osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz
|
7c9648107a4b3e9c06954b794b76675e32f458f4
|
calicoctl/calico_ctl/__init__.py
|
calicoctl/calico_ctl/__init__.py
|
__version__ = "0.15.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.10.0-dev"
__felix_version__ = "1.3.0a6-dev"
|
__version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
|
Update docs to version v0.16.0
|
Update docs to version v0.16.0
|
Python
|
apache-2.0
|
TrimBiggs/calico-containers,caseydavenport/calico-docker,TrimBiggs/calico-docker,caseydavenport/calico-docker,projectcalico/calico-docker,projectcalico/calico-containers,caseydavenport/calico-containers,quater/calico-containers,Metaswitch/calico-docker,caseydavenport/calico-containers,Metaswitch/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-docker,caseydavenport/calico-containers,insequent/calico-docker,quater/calico-containers,insequent/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-containers,projectcalico/calico-docker
|
52138f389d5b8ef3e2cd25179716b36170b94242
|
packages/dash-html-components/dash_html_components/__init__.py
|
packages/dash-html-components/dash_html_components/__init__.py
|
import os
import dash as _dash
import sys as _sys
current_path = os.path.dirname(os.path.abspath(__file__))
_dash.development.component_loader.load_components(
os.path.join(current_path, '../lib/metadata.json'),
['content', 'id', 'key', 'className', 'style', 'dependencies'],
globals(),
_sys._getframe(1).f_globals.get('__name__', '__main__')
)
|
import os
import dash as _dash
import sys as _sys
current_path = os.path.dirname(os.path.abspath(__file__))
_dash.development.component_loader.load_components(
os.path.join(current_path, 'metadata.json'),
['content', 'id', 'key', 'className', 'style', 'dependencies'],
globals(),
_sys._getframe(1).f_globals.get('__name__', '__main__')
)
|
Load metadata path from current directory
|
Load metadata path from current directory
|
Python
|
mit
|
plotly/dash,plotly/dash,plotly/dash,plotly/dash,plotly/dash
|
a47ab820870caebcc00daf32531cfad3abd2e532
|
src/engine/SCons/Platform/darwin.py
|
src/engine/SCons/Platform/darwin.py
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
|
Python
|
mit
|
andrewyoung1991/scons,timj/scons,timj/scons,andrewyoung1991/scons,andrewyoung1991/scons,timj/scons,andrewyoung1991/scons,timj/scons,andrewyoung1991/scons,timj/scons,andrewyoung1991/scons,timj/scons,andrewyoung1991/scons,andrewyoung1991/scons,timj/scons,timj/scons,andrewyoung1991/scons,timj/scons
|
a2430b67423ce036d2a96541e86d356ace04db69
|
Twitch/cogs/words.py
|
Twitch/cogs/words.py
|
from twitchio.ext import commands
@commands.cog()
class Words:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def audiodefine(self, ctx, word):
url = f"http://api.wordnik.com:80/v4/word.json/{word}/audio"
params = {"useCanonical": "false", "limit": 1, "api_key": self.bot.WORDNIK_API_KEY}
async with self.bot.aiohttp_session.get(url, params = params) as resp:
data = await resp.json()
if data:
await ctx.send(f"{data[0]['word'].capitalize()}: {data[0]['fileUrl']}")
else:
await ctx.send("Word or audio not found.")
@commands.command()
async def define(self, ctx, word):
url = f"http://api.wordnik.com:80/v4/word.json/{word}/definitions"
params = {"limit": 1, "includeRelated": "false", "useCanonical": "false", "includeTags": "false",
"api_key": self.bot.WORDNIK_API_KEY}
async with self.bot.aiohttp_session.get(url, params = params) as resp:
data = await resp.json()
if data:
await ctx.send(data[0]["word"].capitalize() + ": " + data[0]["text"])
else:
await ctx.send("Definition not found.")
|
from twitchio.ext import commands
@commands.cog()
class Words:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def audiodefine(self, ctx, word):
url = f"http://api.wordnik.com:80/v4/word.json/{word}/audio"
params = {"useCanonical": "false", "limit": 1, "api_key": self.bot.WORDNIK_API_KEY}
async with self.bot.aiohttp_session.get(url, params = params) as resp:
data = await resp.json()
if data:
await ctx.send(f"{data[0]['word'].capitalize()}: {data[0]['fileUrl']}")
else:
await ctx.send("Word or audio not found.")
@commands.command()
async def define(self, ctx, word):
url = f"http://api.wordnik.com:80/v4/word.json/{word}/definitions"
params = {"limit": 1, "includeRelated": "false", "useCanonical": "false", "includeTags": "false",
"api_key": self.bot.WORDNIK_API_KEY}
async with self.bot.aiohttp_session.get(url, params = params) as resp:
data = await resp.json()
if data:
await ctx.send(f"{data[0]['word'].capitalize()}: {data[0]['text']}")
else:
await ctx.send("Definition not found.")
|
Use f-string for define command
|
[TwitchIO] Use f-string for define command
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
7be606951b22d77a53274d014cd94aae30af93f5
|
samples/oauth2_for_devices.py
|
samples/oauth2_for_devices.py
|
# -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print "Enter the following code at %s: %s" % (flow_info.verification_url,
flow_info.user_code)
print "Then press Enter."
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print "Access token:", credentials.access_token
print "Refresh token:", credentials.refresh_token
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
|
# -*- coding: utf-8 -*-
# See: https://developers.google.com/accounts/docs/OAuth2ForDevices
import httplib2
from six.moves import input
from oauth2client.client import OAuth2WebServerFlow
from googleapiclient.discovery import build
CLIENT_ID = "some+client+id"
CLIENT_SECRET = "some+client+secret"
SCOPES = ("https://www.googleapis.com/auth/youtube",)
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
# Step 1: get user code and verification URL
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
flow_info = flow.step1_get_device_and_user_codes()
print("Enter the following code at {0}: {1}".format(flow_info.verification_url,
flow_info.user_code))
print("Then press Enter.")
input()
# Step 2: get credentials
# https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
credentials = flow.step2_exchange(device_flow_info=flow_info)
print("Access token: {0}".format(credentials.access_token))
print("Refresh token: {0}".format(credentials.refresh_token))
# Get YouTube service
# https://developers.google.com/accounts/docs/OAuth2ForDevices#callinganapi
youtube = build("youtube", "v3", http=credentials.authorize(httplib2.Http()))
|
Fix example to be Python3 compatible, use format()
|
Fix example to be Python3 compatible, use format()
Both print() and format() are compatible from 2.6. Also, format() is much nicer to use for internationalization since you can define the location of your substitutions. It works similarly to Java and .net's format() as well. Great stuff!
Should I tackle the other examples as well, or is piece meal all right?
|
Python
|
apache-2.0
|
googleapis/oauth2client,jonparrott/oauth2client,google/oauth2client,jonparrott/oauth2client,clancychilds/oauth2client,googleapis/oauth2client,google/oauth2client,clancychilds/oauth2client
|
033ee318e6694ca4b251adf759d8cb62a100fdca
|
tickertape.py
|
tickertape.py
|
#!/usr/bin/env python
"""Display stock quotes on LEDs."""
import RPi.GPIO as GPIO
from seven_segment import print_leds
from ystockquote import get_price, get_change
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
print_leds(' ')
symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK']
ticker_string = ''
print "Downloading", len(symbols), "symbols..."
for s in symbols:
ticker_string += (s + ' ' + get_price(s) + ' ' + get_change(s) + ' ')
print "Done!"
print_leds(ticker_string, pins, 1)
#print_leds('try', pins, 1)
GPIO.cleanup()
|
#!/usr/bin/env python
"""Display stock quotes on LEDs."""
import RPi.GPIO as GPIO
from seven_segment import print_leds
from ystockquote import get_price, get_change
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
print_leds(' ', pins)
symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK']
for s in symbols:
try:
ticker_string = s + ' ' + get_price(s) + ' ' + get_change(s) + ' '
except KeyboardInterrupt:
break
print_leds(ticker_string, pins, 1)
GPIO.cleanup()
|
Improve ctrl-c handling. Fix the clear-display line.
|
Improve ctrl-c handling. Fix the clear-display line.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
2f2695b9a63de7c6695036c34c2db129c22d244a
|
example_project/manage.py
|
example_project/manage.py
|
#!/usr/bin/env python
import django
import os
import sys
if django.VERSION < (1, 5):
sys.stderr.write("ERROR: guardian's example project must be run with "
"Django 1.5 or later!\n")
sys.exit(1)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import django
import os
import sys
if django.VERSION < (1, 5):
sys.stderr.write("ERROR: guardian's example project must be run with "
"Django 1.8 or later!\n")
sys.exit(1)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Update required version of guardian in example project
|
Update required version of guardian in example project
|
Python
|
bsd-2-clause
|
rmgorman/django-guardian,lukaszb/django-guardian,rmgorman/django-guardian,lukaszb/django-guardian,benkonrath/django-guardian,lukaszb/django-guardian,benkonrath/django-guardian,rmgorman/django-guardian,benkonrath/django-guardian
|
18b4d5196fe18479e4aedfaa9030e5ba443d6a85
|
test_input/test70.py
|
test_input/test70.py
|
'test checking constant conditions'
# __pychecker__ = ''
def func1(x):
'should not produce a warning'
if 1:
pass
while 1:
print x
break
assert x, 'test'
return 0
def func2(x):
'should produce a warning'
__pychecker__ = 'constant1'
if 1:
pass
while 1:
print x
break
return 0
def func3(x):
'should produce a warning'
if 21:
return 1
if 31:
return 2
assert(x, 'test')
assert(5, 'test')
assert 5, 'test'
if 'str':
return 3
return 4
def func4(x):
'should not produce a warning'
if x == 204 or x == 201 or 200 <= x < 300:
x = 0
if x == 1:
pass
while x == 'str':
print x
break
return 0
|
'test checking constant conditions'
# __pychecker__ = ''
def func1(x):
'should not produce a warning'
if 1:
pass
while 1:
print x
break
assert x, 'test'
return 0
def func2(x):
'should produce a warning'
__pychecker__ = 'constant1'
if 1:
pass
while 1:
print x
break
return 0
def func3(x):
'should produce a warning'
if 21:
return 1
if 31:
return 2
assert(x, 'test')
assert(5, 'test')
assert 5, 'test'
if 'str':
return 3
return 4
def func4(x):
'should not produce a warning'
if x == 204 or x == 201 or 200 <= x < 300:
x = 0
if x == 1:
pass
while x == 'str':
print x
break
return 0
def func5(need_quotes, text):
'should not produce a warning'
return (need_quotes) and ('"%s"' % text) or (text)
|
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
|
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
return (need_quotes) and ('"%s"' % text) or (text)
The following warning was generated:
Using a conditional statement with a constant value ("%s")
This was because even the stack wasn't modified after a BINARY_MODULO
to say the value on the stack was no longer const.
|
Python
|
bsd-3-clause
|
mitar/pychecker,mitar/pychecker
|
1f7f058d97c6599401558a280d85affed7fb5394
|
testing/hdiv_proj.py
|
testing/hdiv_proj.py
|
from __future__ import absolute_import, print_function, division
from firedrake import *
mesh = UnitSquareMesh(2, 2)
RT = FiniteElement("RT", triangle, 1)
V = FunctionSpace(mesh, RT)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(V)
x = SpatialCoordinate(mesh)
f.project(as_vector([x[1], x[0]]))
r = Function(V)
a = inner(u, v)*dx
L = inner(f, v)*dx
solve(a == L, r)
V_d = FunctionSpace(mesh, BrokenElement(RT))
phi_d = TestFunction(V_d)
r_d = assemble(inner(r, phi_d)*dx)
ref = assemble(inner(f, phi_d)*dx)
print(errornorm(r_d, ref))
|
from __future__ import absolute_import, print_function, division
from firedrake import *
mesh = UnitSquareMesh(2, 2)
RT = FiniteElement("RT", triangle, 1)
V = FunctionSpace(mesh, RT)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(V)
x = SpatialCoordinate(mesh)
assemble(42*dot(v, FacetNormal(mesh))*ds, tensor=f)
r = Function(V)
a = inner(u, v)*dx
L = inner(f, v)*dx
solve(a == L, r)
V_d = FunctionSpace(mesh, BrokenElement(RT))
phi_d = TestFunction(V_d)
r_d = assemble(inner(r, phi_d)*dx)
ref = assemble(inner(f, phi_d)*dx)
projection_rd = project(f, V_d)
print(errornorm(r_d, ref))
|
Update projection experiment for HDiv functions
|
Update projection experiment for HDiv functions
|
Python
|
mit
|
thomasgibson/firedrake-hybridization
|
b172ee1c1ed76836e570e08f0624a456c07cc388
|
get_max_audio.py
|
get_max_audio.py
|
"""
Test obtaining peak audio values with PyAV:
time python pyav_audio_vol.py ~/Videos/sample.mp4
Reference:
https://ffmpeg.org/doxygen/trunk/group__lavu__sampfmts.html
"""
import sys
import av
import numpy as np
# import matplotlib.pyplot as plt
video_file = sys.argv[1]
container = av.open(video_file)
audioStream = container.streams.audio[0]
audio_max = np.fromiter((np.abs(frame.to_ndarray()).max() for frame in
container.decode(audioStream)), np.float)
print(audio_max.max())
# with open("out.bin", "bw") as out_f:
# np.save(out_f,audio_max)
# plt.plot(audio_max)
# plt.show()
|
"""
Test obtaining peak audio values with PyAV:
time python pyav_audio_vol.py ~/Videos/sample.mp4
Reference:
https://ffmpeg.org/doxygen/trunk/group__lavu__sampfmts.html
"""
import sys
import av
import numpy as np
import matplotlib.pyplot as plt
video_file = sys.argv[1]
container = av.open(video_file)
audioStream = container.streams.audio[0]
def rms(x): # https://stackoverflow.com/a/28398092 o_O noice!
return np.sqrt(np.vdot(x, x)/x.size)
audio_max = np.fromiter((rms(frame.to_ndarray()) for frame in
container.decode(audioStream)), np.float)
print(audio_max.max())
# with open("out.bin", "bw") as out_f:
# np.save(out_f,audio_max)
plt.plot(audio_max)
plt.show()
|
Use RMS for better filtering
|
Use RMS for better filtering
|
Python
|
bsd-2-clause
|
yan123/BitBox,yan123/QABox,yan123/QABox
|
0d90da8fca411033d66c3a82096a67e12fca830e
|
util/time_multi.py
|
util/time_multi.py
|
#!/usr/bin/python
#
# Small helper for perftest runs.
#
import os
import sys
import subprocess
def main():
count = int(sys.argv[1])
time_min = None
for i in xrange(count):
cmd = [
'time',
'-f', '%U',
'--quiet',
sys.argv[2], # cmd
sys.argv[3] # testcase
]
#print(repr(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
retval = p.wait()
#print(i, retval, stdout, stderr)
if retval != 0:
print 'n/a'
return
time = float(stderr)
#print(i, time)
if time_min is None:
time_min = time
else:
time_min = min(time_min, time)
# /usr/bin/time has only two digits of resolution
print('%.02f' % time_min)
if __name__ == '__main__':
main()
|
#!/usr/bin/python
#
# Small helper for perftest runs.
#
import os
import sys
import subprocess
def main():
count = int(sys.argv[1])
time_min = None
for i in xrange(count):
cmd = [
'time',
'-f', '%U',
'--quiet',
sys.argv[2], # cmd
sys.argv[3] # testcase
]
#print(repr(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
retval = p.wait()
#print(i, retval, stdout, stderr)
if retval == 139:
print 'segv'
sys.exit(1)
elif retval != 0:
print 'n/a'
sys.exit(1)
time = float(stderr)
#print(i, time)
if time_min is None:
time_min = time
else:
time_min = min(time_min, time)
# /usr/bin/time has only two digits of resolution
print('%.02f' % time_min)
sys.exit(0)
if __name__ == '__main__':
main()
|
Add exitcode and segv check for timing util
|
Add exitcode and segv check for timing util
|
Python
|
mit
|
reqshark/duktape,jmptrader/duktape,harold-b/duktape,chenyaqiuqiu/duktape,harold-b/duktape,nivertech/duktape,zeropool/duktape,harold-b/duktape,zeropool/duktape,harold-b/duktape,chenyaqiuqiu/duktape,jmptrader/duktape,harold-b/duktape,chenyaqiuqiu/duktape,kphillisjr/duktape,skomski/duktape,nivertech/duktape,skomski/duktape,zeropool/duktape,kphillisjr/duktape,tassmjau/duktape,eddieh/duktape,reqshark/duktape,markand/duktape,jmptrader/duktape,svaarala/duktape,skomski/duktape,svaarala/duktape,nivertech/duktape,tassmjau/duktape,eddieh/duktape,nivertech/duktape,zeropool/duktape,zeropool/duktape,sloth4413/duktape,nivertech/duktape,tassmjau/duktape,eddieh/duktape,sloth4413/duktape,chenyaqiuqiu/duktape,eddieh/duktape,reqshark/duktape,nivertech/duktape,tassmjau/duktape,markand/duktape,reqshark/duktape,reqshark/duktape,jmptrader/duktape,jmptrader/duktape,harold-b/duktape,eddieh/duktape,nivertech/duktape,sloth4413/duktape,harold-b/duktape,zeropool/duktape,markand/duktape,tassmjau/duktape,tassmjau/duktape,skomski/duktape,markand/duktape,reqshark/duktape,nivertech/duktape,markand/duktape,eddieh/duktape,kphillisjr/duktape,sloth4413/duktape,kphillisjr/duktape,svaarala/duktape,chenyaqiuqiu/duktape,eddieh/duktape,svaarala/duktape,kphillisjr/duktape,skomski/duktape,sloth4413/duktape,svaarala/duktape,zeropool/duktape,skomski/duktape,chenyaqiuqiu/duktape,jmptrader/duktape,jmptrader/duktape,sloth4413/duktape,eddieh/duktape,sloth4413/duktape,reqshark/duktape,jmptrader/duktape,kphillisjr/duktape,harold-b/duktape,nivertech/duktape,nivertech/duktape,chenyaqiuqiu/duktape,svaarala/duktape,kphillisjr/duktape,svaarala/duktape,markand/duktape,svaarala/duktape,svaarala/duktape,harold-b/duktape,zeropool/duktape,chenyaqiuqiu/duktape,kphillisjr/duktape,skomski/duktape,kphillisjr/duktape,reqshark/duktape,skomski/duktape,jmptrader/duktape,reqshark/duktape,sloth4413/duktape,markand/duktape,tassmjau/duktape,markand/duktape,skomski/duktape,tassmjau/duktape,zeropool/duktape,skomski/duktape,sloth4413/duktape,eddieh/duktape,chenyaqiuqiu/duktape,tassmjau/duktape,tassmjau/duktape,jmptrader/duktape,sloth4413/duktape,zeropool/duktape,chenyaqiuqiu/duktape,kphillisjr/duktape,eddieh/duktape,reqshark/duktape,markand/duktape,harold-b/duktape,markand/duktape
|
3d7459f7b1d40bb94a64d3f08d41942feabf4c12
|
manager/jobs/migrations/0003_auto_20200723_0219.py
|
manager/jobs/migrations/0003_auto_20200723_0219.py
|
# Generated by Django 3.0.8 on 2020-07-23 02:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0002_auto_20200708_2203'),
]
operations = [
migrations.AlterField(
model_name='job',
name='id',
field=models.BigAutoField(help_text='An autoincrementing integer to allow selecting jobs in the order they were created.', primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='job',
name='method',
field=models.CharField(choices=[('parallel', 'parallel'), ('series', 'series'), ('chain', 'chain'), ('clean', 'clean'), ('archive', 'archive'), ('pull', 'pull'), ('push', 'push'), ('decode', 'decode'), ('encode', 'encode'), ('convert', 'convert'), ('compile', 'compile'), ('build', 'build'), ('execute', 'execute'), ('session', 'session'), ('sleep', 'sleep')], help_text='The job method.', max_length=32),
),
]
|
# Generated by Django 3.0.8 on 2020-07-23 02:19.
# Manually modified by Nokome Bentley on 2020-11-09 because the original
# `AlterField` operation was causing the following error on Postgres:
# django.db.utils.ProgrammingError: operator class "varchar_pattern_ops" does not accept data type bigint
# In production, that error had to be dealt with manually, but it still
# meant that creating a new Postgres DB (e.g. in development) would fail.
# Therefore, it was changes to a `RemoveField` + `CreateField`.
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0002_auto_20200708_2203'),
]
operations = [
migrations.RemoveField(
model_name='job',
name='id',
),
migrations.AddField(
model_name='job',
name='id',
field=models.BigAutoField(help_text='An autoincrementing integer to allow selecting jobs in the order they were created.', primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='job',
name='method',
field=models.CharField(choices=[('parallel', 'parallel'), ('series', 'series'), ('chain', 'chain'), ('clean', 'clean'), ('archive', 'archive'), ('pull', 'pull'), ('push', 'push'), ('decode', 'decode'), ('encode', 'encode'), ('convert', 'convert'), ('compile', 'compile'), ('build', 'build'), ('execute', 'execute'), ('session', 'session'), ('sleep', 'sleep')], help_text='The job method.', max_length=32),
),
]
|
Allow migration to run on Postgres
|
chore(Jobs): Allow migration to run on Postgres
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
5b8edee2b6fa13fc1b05e15961d8b7920e6f9739
|
csunplugged/general/urls.py
|
csunplugged/general/urls.py
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^$", views.GeneralIndexView.as_view(), name="home"),
url(r"^about/$", views.GeneralAboutView.as_view(), name="about"),
url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"),
url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"),
url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"),
url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"),
]
|
"""URL routing for the general application."""
from django.conf.urls import url
from . import views
urlpatterns = [
url(
r"^$",
views.GeneralIndexView.as_view(),
name="home"
),
url(
r"^about/$",
views.GeneralAboutView.as_view(),
name="about"
),
url(
r"^computational-thinking/$",
views.ComputationalThinkingView.as_view(),
name="computational_thinking"
),
url(
r"^contact/$",
views.GeneralContactView.as_view(),
name="contact"
),
url(
r"^people/$",
views.GeneralPeopleView.as_view(),
name="people"
),
url(
r"^principles/$",
views.GeneralPrinciplesView.as_view(),
name="principles"
),
]
|
Update URLs file to match layout of other URLs files
|
Update URLs file to match layout of other URLs files
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
48d18015b2cf55638cc7bda43ae3751789fa2323
|
games/management/commands/autocreate_steamdb_installers.py
|
games/management/commands/autocreate_steamdb_installers.py
|
import json
from django.core.management.base import BaseCommand
from games import models
from accounts.models import User
class Command(BaseCommand):
def handle(self, *args, **options):
with open("steamdb.json") as steamdb_file:
steamdb = json.loads(steamdb_file.read())
steam_runner = models.Runner.objects.get(slug='steam')
user = User.objects.get(username='strider')
for steamapp in steamdb:
if steamapp['linux_status'] == 'Game Works':
appid = steamapp['appid']
name = steamapp['name']
try:
game = models.Game.objects.get(steamid=int(appid))
except models.Game.DoesNotExist:
continue
current_installer = game.installer_set.all()
if current_installer:
continue
self.stdout.write("Creating installer for %s" % name)
installer = models.Installer()
installer.runner = steam_runner
installer.user = user
installer.game = game
installer.set_default_installer()
installer.published = True
installer.save()
|
import json
from django.core.management.base import BaseCommand
from games import models
from accounts.models import User
class Command(BaseCommand):
def handle(self, *args, **options):
with open("steamdb.json") as steamdb_file:
steamdb = json.loads(steamdb_file.read())
steam_runner = models.Runner.objects.get(slug='steam')
user = User.objects.get(username='strider')
for steamapp in steamdb:
if steamapp['linux_status'].startswith('Game Works'):
appid = steamapp['appid']
name = steamapp['name']
try:
game = models.Game.objects.get(steamid=int(appid))
except models.Game.DoesNotExist:
continue
current_installer = game.installer_set.all()
if current_installer:
continue
self.stdout.write("Creating installer for %s" % name)
installer = models.Installer()
installer.runner = steam_runner
installer.user = user
installer.game = game
installer.set_default_installer()
installer.published = True
installer.save()
|
Update installer autocreate for games with no icon
|
Update installer autocreate for games with no icon
|
Python
|
agpl-3.0
|
lutris/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,Turupawn/website,Turupawn/website,lutris/website
|
8b4a3741e8e88ae854cc7c075f752bc7238d0875
|
lookupy/__init__.py
|
lookupy/__init__.py
|
"""
lookupy
~~~~~~~
Lookupy is a Python library that provides a Django QuerySet like
interface to query (select and filter) data (list of dicts)
"""
from .lookupy import Collection, Q
__all__ = ["Collection", "Q"]
|
Allow 'Collection' & 'Q' to be imported from pkg
|
Allow 'Collection' & 'Q' to be imported from pkg
|
Python
|
mit
|
naiquevin/lookupy
|
|
0c89a78d3a0574ef491d3695366cd786b4c3f950
|
indico/migrations/versions/20200904_1543_f37d509e221c_add_user_profile_picture_column.py
|
indico/migrations/versions/20200904_1543_f37d509e221c_add_user_profile_picture_column.py
|
"""Add column for profile picture type to User
Revision ID: f37d509e221c
Revises: c997dc927fbc
Create Date: 2020-09-04 15:43:18.413156
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
from indico.modules.users.models.users import ProfilePictureSource
# revision identifiers, used by Alembic.
revision = 'f37d509e221c'
down_revision = 'c997dc927fbc'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('users',
sa.Column('picture_source', PyIntEnum(ProfilePictureSource), nullable=False, server_default='0'),
schema='users')
op.alter_column('users', 'picture_source', server_default=None, schema='users')
op.execute('UPDATE users.users SET picture_source = 3 WHERE picture IS NOT NULL')
def downgrade():
op.drop_column('users', 'picture_source', schema='users')
|
"""Add column for profile picture type to User
Revision ID: f37d509e221c
Revises: c997dc927fbc
Create Date: 2020-09-04 15:43:18.413156
"""
from enum import Enum
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
# revision identifiers, used by Alembic.
revision = 'f37d509e221c'
down_revision = 'c997dc927fbc'
branch_labels = None
depends_on = None
class _ProfilePictureSource(int, Enum):
standard = 0
identicon = 1
gravatar = 2
custom = 3
def upgrade():
op.add_column('users',
sa.Column('picture_source', PyIntEnum(_ProfilePictureSource), nullable=False, server_default='0'),
schema='users')
op.alter_column('users', 'picture_source', server_default=None, schema='users')
op.execute('UPDATE users.users SET picture_source = 3 WHERE picture IS NOT NULL')
def downgrade():
op.drop_column('users', 'picture_source', schema='users')
|
Use embedded enum in alembic revision
|
Use embedded enum in alembic revision
Unlikely to matter here but like this it will work correctly even in a
future where someone may add new sources to the original enum (in that
case this particular revision should not add those newer ones, which
would be the case when using the imported enum)
|
Python
|
mit
|
DirkHoffmann/indico,indico/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,indico/indico,pferreir/indico,ThiefMaster/indico,pferreir/indico,pferreir/indico,pferreir/indico,indico/indico,ThiefMaster/indico,DirkHoffmann/indico,DirkHoffmann/indico,ThiefMaster/indico
|
75fc7888cf4db89ae55324b325afc44452799cb3
|
filesystems/interfaces.py
|
filesystems/interfaces.py
|
from zope.interface import Interface
from filesystems import _PY3
class Path(Interface):
def __str__():
"""
Render the path as a string.
"""
if _PY3:
def __truediv__(other):
"""
Traverse to a child of this path.
"""
def __fspath__(other):
"""
Render the path as a string.
"""
else:
def __div__(other):
"""
Render the path as a string.
"""
def basename():
"""
The tail component of this path.
"""
def dirname():
"""
The head components of this path.
"""
def heritage():
"""
The top-down set of this path's parents.
"""
def descendant(*segments):
"""
Traverse to a descendant of this path.
"""
def parent():
"""
Traverse to the parent of this path.
"""
def sibling(name):
"""
Traverse to a sibling of this path.
"""
def relative_to(path):
"""
Resolve a path relative to this one.
"""
|
from zope.interface import Interface
from filesystems import _PY3
class Path(Interface):
def __str__():
"""
Render the path as a string.
"""
if _PY3:
def __fspath__():
"""
Render the path as a string.
"""
def __truediv__(other):
"""
Traverse to a child of this path.
"""
else:
def __div__(other):
"""
Traverse to a child of this path.
"""
def basename():
"""
The tail component of this path.
"""
def dirname():
"""
The head components of this path.
"""
def heritage():
"""
The top-down set of this path's parents.
"""
def descendant(*segments):
"""
Traverse to a descendant of this path.
"""
def parent():
"""
Traverse to the parent of this path.
"""
def sibling(name):
"""
Traverse to a sibling of this path.
"""
def relative_to(path):
"""
Resolve a path relative to this one.
"""
|
Fix the fspath interface and div docstring.
|
Fix the fspath interface and div docstring.
|
Python
|
mit
|
Julian/Filesystems
|
26421bf58344f241452064ae0648d6eb9435d28a
|
vaje/vaje2.py
|
vaje/vaje2.py
|
# -*- coding: utf-8 -*-
# Algoritmi z drugih vaj
def euclid(m, n):
"""Evklidov algoritem"""
while n != 0:
m, n = n, m % n
return m
def exteuclid(m, n):
"""
Razširjeni Evklidov algoritem.
Vrača (g, a, b), kjer je g = gcd(m, n) in g = a*m + b*n.
Če je g = 1, velja a mod n = m^-1 mod n in b mod m = n^-1 mod m.
"""
p, q, r, s = 1, 0, 0, 1
while n != 0:
k = m//n
m, n = n, m - k*n
p, q = q, p - k*q
r, s = s, r - k*s
return (m, p, r)
def karatsuba(m, n, b):
"""Množenje z metodo deli in vladaj"""
if b == 0:
return m*n
m1, m2 = m >> b, m & ~(-1 << b)
n1, n2 = n >> b, n & ~(-1 << b)
mn1 = karatsuba(m1, n1, b//2)
mn2 = karatsuba(m2, n2, b//2)
mn0 = karatsuba(m1+m2, n1+n2, b//2) - mn1 - mn2
return (mn1 << 2*b) + (mn0 << b) + mn2
|
# -*- coding: utf-8 -*-
# Algoritmi z drugih vaj
def euclid(m, n):
"""Evklidov algoritem"""
while n != 0:
m, n = n, m % n
return m
def karatsuba(m, n, b):
"""Množenje z metodo deli in vladaj"""
if b == 0:
return m*n
m1, m2 = m >> b, m & ~(-1 << b)
n1, n2 = n >> b, n & ~(-1 << b)
mn1 = karatsuba(m1, n1, b//2)
mn2 = karatsuba(m2, n2, b//2)
mn0 = karatsuba(m1+m2, n1+n2, b//2) - mn1 - mn2
return (mn1 << 2*b) + (mn0 << b) + mn2
|
Revert "Dodan razširjeni Evklidov algoritem"
|
Revert "Dodan razširjeni Evklidov algoritem"
This reverts commit d64e1391b49aca2cdf04d5b136d902bb7331a3f6.
|
Python
|
mit
|
jaanos/PSA1,markun9/PSA1
|
944188858ed41ae43ef950649dc9e3aa9613b5d9
|
amqpy/concurrency.py
|
amqpy/concurrency.py
|
import logging
import time
from functools import wraps
from . import compat
compat.patch() # monkey-patch time.perf_counter
log = logging.getLogger('amqpy')
def synchronized(lock_name):
"""Decorator for automatically acquiring and releasing lock for method call
This decorator accesses the `lock_name` :class:`threading.Lock` attribute of the instance that
the wrapped method is bound to. The lock is acquired (blocks indefinitely) before the method is
called. After the method has executed, the lock is released.
Decorated methods should not be long-running operations, since the lock is held for the duration
of the method's execution.
:param lock_name: name of :class:`threading.Lock` object
"""
def decorator(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
lock = getattr(self, lock_name)
acquired = lock.acquire(False)
if not acquired:
# log.debug('> Wait to acquire lock for [{}]'.format(f.__qualname__))
start_time = time.perf_counter()
lock.acquire()
tot_time = time.perf_counter() - start_time
if tot_time > 10:
# only log if waited for more than 10s to acquire lock
log.warn('Acquired lock for [{}] in: {:.3f}s'.format(f.__qualname__, tot_time))
try:
retval = f(self, *args, **kwargs)
finally:
lock.release()
return retval
return wrapper
return decorator
|
import logging
import time
from functools import wraps
from . import compat
compat.patch() # monkey-patch time.perf_counter
log = logging.getLogger('amqpy')
def synchronized(lock_name):
"""Decorator for automatically acquiring and releasing lock for method call
This decorator accesses the `lock_name` :class:`threading.Lock` attribute of the instance that
the wrapped method is bound to. The lock is acquired (blocks indefinitely) before the method is
called. After the method has executed, the lock is released.
Decorated methods should not be long-running operations, since the lock is held for the duration
of the method's execution.
:param lock_name: name of :class:`threading.Lock` object
"""
def decorator(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
lock = getattr(self, lock_name)
acquired = lock.acquire(False)
if not acquired:
# log.debug('> Wait to acquire lock for [{}]'.format(f.__qualname__))
start_time = time.perf_counter()
lock.acquire()
tot_time = time.perf_counter() - start_time
if tot_time > 5:
# only log if waited for more than 10s to acquire lock
log.warn('Acquired lock for [{}] in: {:.3f}s'.format(f.__qualname__, tot_time))
try:
retval = f(self, *args, **kwargs)
finally:
lock.release()
return retval
return wrapper
return decorator
|
Reduce warning time to 5s
|
Reduce warning time to 5s
|
Python
|
mit
|
veegee/amqpy,gst/amqpy
|
faf047e7ac3b9a703ffd76bd3c5de2e3ef5d93b6
|
dear_astrid/test/test_rtm_importer.py
|
dear_astrid/test/test_rtm_importer.py
|
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring
from __future__ import absolute_import
from unittest import TestCase
from nose.tools import *
from mock import *
from dear_astrid.rtm.importer import Importer as rtmimp
class TestRTMImport(TestCase):
def setUp(self):
self.patches = dict(
time = patch('time.sleep'),
rtm = patch('rtm.createRTM'),
)
self.mocks = dict()
for (k, v) in self.patches.iteritems():
self.mocks[k] = v.start()
def test_sleep_before_rtm(self):
imp = rtmimp(['task'])
imp._rtm = Mock()
assert not self.mocks['time'].called
# assert that it is our mock object
assert_equal(imp.rtm, imp._rtm)
self.mocks['time'].assert_called_once_with(1)
# test calling other methods
imp.rtm.foo.bar
self.mocks['time'].assert_has_calls([ call(1), call(1) ])
# not used this time
assert not self.mocks['rtm'].called
|
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring
from __future__ import absolute_import
from unittest import TestCase
from nose.tools import *
from mock import *
from dear_astrid.rtm.importer import Importer as rtmimp
class TestRTMImport(TestCase):
def setUp(self):
self.patches = dict(
time = patch('time.sleep'),
rtm = patch('rtm.createRTM'),
)
self.mocks = dict()
for (k, v) in self.patches.items():
self.mocks[k] = v.start()
def test_sleep_before_rtm(self):
imp = rtmimp(['task'])
imp._rtm = Mock()
assert not self.mocks['time'].called
# assert that it is our mock object
assert_equal(imp.rtm, imp._rtm)
self.mocks['time'].assert_called_once_with(1)
# test calling other methods
imp.rtm.foo.bar
self.mocks['time'].assert_has_calls([ call(1), call(1) ])
# not used this time
assert not self.mocks['rtm'].called
|
Change iteritems() to items() for future compatibility
|
Change iteritems() to items() for future compatibility
|
Python
|
mit
|
rwstauner/dear_astrid,rwstauner/dear_astrid
|
27c3972a57e09faf35f86b82b35eb815dadc4688
|
mediachain/reader/dynamo.py
|
mediachain/reader/dynamo.py
|
import boto3
def get_table(name):
dynamo = boto3.resource('dynamo')
return dynamo.Table(name)
def get_object(reference):
table = get_table('mediachain')
obj = table.get_item(Key={'multihash': reference})
byte_string = obj['Item']['data']
if byte_string is None:
raise KeyError('Could not find key <%s> in Dynamo'.format(reference))
return cbor.loads(byte_string)
|
import boto3
import cbor
def get_table(name):
dynamo = boto3.resource('dynamodb',
endpoint_url='http://localhost:8000',
region_name='us-east-1',
aws_access_key_id='',
aws_secret_access_key='')
return dynamo.Table(name)
def get_object(reference):
table = get_table('Mediachain')
obj = table.get_item(Key={'multihash': reference})
if obj is None:
raise KeyError('Could not find key <%s> in Dynamo'.format(reference))
byte_string = obj['Item']['data'].value
return cbor.loads(byte_string)
|
Make get_object pull appropriate fields
|
Make get_object pull appropriate fields
Temporarily set up dynamo to work internally
|
Python
|
mit
|
mediachain/mediachain-client,mediachain/mediachain-client
|
86ee90e0816a8d533ede916a5751ab69942dd48a
|
laynger.py
|
laynger.py
|
#import sublime
import sublime_plugin
class laynger(sublime_plugin.TextCommand):
def run(self, edit, opt='center'):
window = self.view.window()
layout = window.get_layout()
if len(layout['cols']) > 3:
return
if opt == u'center':
layout['cols'][1] = 0.5
elif opt == u'right':
layout['cols'][1] += 0.01
else:
layout['cols'][1] -= 0.01
window.run_command('set_layout', layout)
|
#import sublime
import sublime_plugin
class laynger(sublime_plugin.TextCommand):
def run(self, edit, opt='center'):
window = self.view.window()
layout = window.get_layout()
if len(layout['cols']) != 2:
return
if opt == u'center':
layout['cols'][1] = 0.5
elif opt == u'right':
layout['cols'][1] += 0.01
else:
layout['cols'][1] -= 0.01
window.run_command('set_layout', layout)
|
Work only with 2 colunms
|
Work only with 2 colunms
|
Python
|
mit
|
amaslenn/Laynger
|
9a38f329ab0f40f20e2dcb0e0acf0f29a2b4c1fd
|
tests_py3/px_test.py
|
tests_py3/px_test.py
|
import sys
from px import px
from unittest.mock import patch
def test_main():
args = ['px', '--install']
with patch("px.px.install") as install_mock:
px._main(args)
install_mock.assert_called_once_with(args)
with patch("px.px_top.top") as top_mock:
px._main(['px', '--top'])
top_mock.assert_called_once()
with patch("px.px_top.top") as top_mock:
px._main(['ptop'])
top_mock.assert_called_once()
with patch("builtins.print") as print_mock:
px._main(['px', '--help'])
print_mock.assert_called_once_with(px.__doc__)
# FIXME: Test --version
# FIXME: Test 'px 1'
# FIXME: Test 'px root'
# FIXME: Test just 'px'
|
import sys
from px import px
from unittest.mock import patch
@patch('px.px.install')
def test_cmdline_install(mock):
args = ['px', '--install']
px._main(args)
mock.assert_called_once_with(args)
@patch("px.px_top.top")
def test_cmdline_top(mock):
px._main(['px', '--top'])
mock.assert_called_once()
@patch("px.px_top.top")
def test_cmdline_ptop(mock):
px._main(['ptop'])
mock.assert_called_once()
@patch("builtins.print")
def test_cmdline_help(mock):
px._main(['px', '--help'])
mock.assert_called_once_with(px.__doc__)
# FIXME: Test --version
# FIXME: Test 'px 1'
# FIXME: Test 'px root'
# FIXME: Test just 'px'
|
Split command line tests into individuals
|
Split command line tests into individuals
|
Python
|
mit
|
walles/px,walles/px
|
3f764874dbb805d661d38719bb4e78b6a52f9f79
|
parser/timestamp.py
|
parser/timestamp.py
|
from datetime import datetime
DT_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
def get_timestamp():
"""
Serialize actual datetime provided as simplified ISO 8601 (without timezone)
string
:type datetime: datetime
:param datetime: datetime object to convert to string
:return: serialized datetime
:rtype: str
"""
return datetime.now().strftime(DT_FORMAT)
|
from datetime import datetime
DT_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
def get_timestamp():
"""
Serialize actual datetime provided as simplified ISO 8601 (without timezone)
string
:return: serialized datetime
:rtype: str
"""
return datetime.now().strftime(DT_FORMAT)
|
Remove docs for non-existing function parameters
|
Remove docs for non-existing function parameters
|
Python
|
mit
|
m4tx/techswarm-receiver
|
9c36417af3364b77853b62d9a924d5693e44dce0
|
fabfile.py
|
fabfile.py
|
# -*- coding: UTF-8 -*-
from fabric.api import *
#def dev()
#def prod()
#def setup_host()
def hello():
print("Hello world!")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
clean_db()
test()
|
# -*- coding: UTF-8 -*-
from fabric.api import *
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
clean_db()
test()
|
Remove noob fab method :-)
|
Remove noob fab method :-)
|
Python
|
bsd-3-clause
|
rinfo/fst,kamidev/autobuild_fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst
|
1ba61fc10e2d77f16314da74b5346ddaa7fc9500
|
sendinel/backend/scheduler.py
|
sendinel/backend/scheduler.py
|
import time
from datetime import datetime
from django.core.management import setup_environ
from sendinel import settings
setup_environ(settings)
from sendinel.backend.models import ScheduledEvent
def run(run_only_one_time = False):
while True:
dueEvents = ScheduledEvent.objects \
.filter(state__exact = 'new') \
.filter(send_time__lte=datetime.now())
for event in dueEvents:
try:
data = event.sendable.get_data_for_sending()
print "Trying to send: %s" % str(event.sendable)
except Exception as e:
print "Failed to get data for " + event + " exception " + str(e)
event.state = "failed"
event.save()
continue
# TODO error handling
try:
for entry in data:
print " sending: %s" % str(entry)
entry.send()
except Exception as e:
print "Failed to send: " + str(entry)
event.state = "failed"
event.save()
event.state = 'sent'
event.save()
del data
del dueEvents
#TODO Exception Handling
if run_only_one_time: break
time.sleep(5)
if __name__ == "__main__":
run()
|
import time
from datetime import datetime
from django.core.management import setup_environ
from sendinel import settings
setup_environ(settings)
from sendinel.backend.models import ScheduledEvent
def run(run_only_one_time = False):
while True:
dueEvents = ScheduledEvent.objects \
.filter(state__exact = 'new') \
.filter(send_time__lte=datetime.now())
for event in dueEvents:
try:
data = event.sendable.get_data_for_sending()
print "Trying to send: %s" % str(event.sendable)
except Exception as e:
print "Failed to get data for " + event + " exception " + str(e)
event.state = "failed"
event.save()
continue
# TODO error handling
try:
for entry in data:
print " sending: %s" % str(entry)
entry.send()
except Exception as e:
print "Failed to send: " + str(entry) + " exception " + str(e)
event.state = "failed"
event.save()
event.state = 'sent'
event.save()
del data
del dueEvents
#TODO Exception Handling
if run_only_one_time: break
time.sleep(5)
if __name__ == "__main__":
run()
|
Debug prints should show something.
|
Debug prints should show something.
|
Python
|
mit
|
Sendinel/Sendinel,Sendinel/Sendinel,Sendinel/Sendinel,Sendinel/Sendinel
|
c94598b8ce59b98213367b54164b1051d56a28da
|
scene.py
|
scene.py
|
import bpy
class Scene:
"""Scene object"""
def __init__(self, filepath, render_engine='CYCLES'):
self.filepath = filepath
self.render_engine = 'CYCLES'
def setup(self):
self._cleanup()
bpy.context.scene.render.filepath = self.filepath
bpy.context.scene.render.engine = self.render_engine
bpy.context.scene.frame_start = 1
bpy.context.scene.frame_end = 1
def render(self):
bpy.ops.render.render(animation=True)
def _cleanup(self):
"""Delete everything"""
bpy.ops.object.delete(use_global=False)
|
import bpy
class Scene:
"""Scene object"""
def __init__(self, filepath, render_engine='CYCLES'):
self.filepath = filepath
self.render_engine = 'CYCLES'
def setup(self):
self._cleanup()
bpy.context.scene.render.filepath = self.filepath
bpy.context.scene.render.engine = self.render_engine
bpy.context.scene.frame_start = 1
bpy.context.scene.frame_end = 1
def render(self, samples=50):
bpy.context.scene.cycles.samples = samples
bpy.ops.render.render(animation=True)
def _cleanup(self):
"""Delete everything"""
bpy.ops.object.delete(use_global=False)
|
Allow to set render samples
|
Allow to set render samples
|
Python
|
mit
|
josuemontano/blender_wrapper
|
4aa4a52adc2b4cdf9e021a4191211c47763a9962
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
import os
from setuptools import setup, find_packages
here = os.path.dirname(__file__)
version_file = os.path.join(here, 'src/iptools/__init__.py')
d = {}
execfile(version_file, d)
version = d['__version__']
setup(
name = 'iptools',
version = version,
description = 'Python utilites for manipulating IP addresses',
long_description = "Utilities for manipulating IP addresses including a class that can be used to include CIDR network blocks in Django's INTERNAL_IPS setting.",
url = 'http://python-iptools.googlecode.com',
author = 'Bryan Davis',
author_email = '[email protected]',
license = 'BSD',
platforms = ['any',],
package_dir = {'': 'src'},
packages = find_packages('src'),
include_package_data = True,
test_suite='iptools.test_iptools',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
'Topic :: Internet',
],
zip_safe=False,
)
|
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
import os
from setuptools import setup, find_packages
here = os.path.dirname(__file__)
version_file = os.path.join(here, 'src/iptools/__init__.py')
d = {}
execfile(version_file, d)
version = d['__version__']
setup(
name = 'iptools',
version = version,
description = 'Python utilites for manipulating IP addresses',
long_description = "Utilities for manipulating IP addresses including a class that can be used to include CIDR network blocks in Django's INTERNAL_IPS setting.",
url = 'http://python-iptools.googlecode.com',
download_url = 'http://pypi.python.org/packages/source/i/iptools/',
author = 'Bryan Davis',
author_email = '[email protected]',
license = 'BSD',
platforms = ['any',],
package_dir = {'': 'src'},
packages = find_packages('src'),
include_package_data = True,
test_suite='iptools.test_iptools',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
'Topic :: Internet',
],
zip_safe=False,
)
|
Set download_url to pypi directory.
|
Set download_url to pypi directory.
|
Python
|
bsd-2-clause
|
ancat/python-iptools,malonlabe/python-iptools,timmerk/python-iptools
|
9bc09627813c446bc5e155f92a2f03a7bde56d88
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
version = '0.1.0'
setup(name='mytardisbf',
version=version,
description="Bioformats App for MyTardis",
long_description="""\
Bioformats App for extracting metadata and thumbnails from microscopy images\
in MyTardis""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mytardis bioformats',
author='Keith Schulze',
author_email='[email protected]',
url='',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
'numpy>=1.9',
'scipy>=0.15',
'javabridge>=1.0',
'python-bioformats>=1.0'
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.2.0'
setup(name='mytardisbf',
version=version,
description="Bioformats App for MyTardis",
long_description="""\
Bioformats App for extracting metadata and thumbnails from microscopy images\
in MyTardis""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mytardis bioformats',
author='Keith Schulze',
author_email='[email protected]',
url='',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
'numpy>=1.9',
'scipy>=0.15',
'javabridge>=1.0',
'python-bioformats>=1.0'
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
Prepare for the next development cycle
|
Prepare for the next development cycle
|
Python
|
mit
|
keithschulze/mytardisbf
|
c55a20928c3f98fee19d20f7fb0dc3bb9be1acb4
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""Tuttle"""
import sys
from tuttle import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Tuttle needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='tuttle',
version=__version__,
author='Lexman',
author_email='[email protected]',
description='Make for data',
long_description='Reliably create data from source as a team in an industrial environment... A tool for '
'continuous data processing',
platforms=['Linux', 'Windows'],
url='http://tuttle.lexman.org/',
install_requires=['nose'],
license='MIT',
packages=find_packages(),
data_files=[],
scripts=[
'bin/tuttle',
],
package_data={
#'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
)
|
#!/usr/bin/env python
"""Tuttle"""
import sys
from tuttle import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Tuttle needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='tuttle',
version=__version__,
author='Lexman',
author_email='[email protected]',
description='Make for data',
long_description='Reliably create data from source as a team in an industrial environment... A tool for '
'continuous data processing',
platforms=['Linux', 'Windows'],
url='http://tuttle.lexman.org/',
license='MIT',
packages=['tuttle', 'tuttle.report'],
#data_files=[],
scripts=[
'bin/tuttle',
],
include_package_data = True,
package_data = {
'tuttle.report' : ['*.html', 'html_report_assets/*'],
},
)
|
Embed required non python files in packaging : html and js files for report
|
Embed required non python files in packaging : html and js files for report
|
Python
|
mit
|
lexman/tuttle,lexman/tuttle,lexman/tuttle
|
995f06a33bf92dcff185a50f84743323170a8b7a
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="[email protected]",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
|
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="[email protected]",
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
|
Use io.open with encoding='utf-8' and flake8 compliance
|
Use io.open with encoding='utf-8' and flake8 compliance
|
Python
|
bsd-3-clause
|
faassen/bowerstatic,faassen/bowerstatic
|
5eabda987c43c727ea03a8bd56c32e4c1490c918
|
setup.py
|
setup.py
|
"""setup.py file."""
import uuid
from setuptools import setup
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm-ansible",
version='0.7.1',
packages=["napalm_ansible"],
author="David Barroso, Kirk Byers, Mircea Ulinic",
author_email="[email protected], [email protected]",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm-base",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-ansible=napalm_ansible:main',
],
}
)
|
"""setup.py file."""
import uuid
from setuptools import setup
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm-ansible",
version='0.7.2',
packages=["napalm_ansible"],
author="David Barroso, Kirk Byers, Mircea Ulinic",
author_email="[email protected], [email protected]",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm-base",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'napalm-ansible=napalm_ansible:main',
],
}
)
|
Tag error, rolling to 0.7.2
|
Tag error, rolling to 0.7.2
|
Python
|
apache-2.0
|
napalm-automation/napalm-ansible,napalm-automation/napalm-ansible
|
a09ac33998debcfa502e650e72e59acb265ffeac
|
setup.py
|
setup.py
|
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/_axTime3.pyx"],
extra_compile_args=compile_args)]
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='[email protected]',
py_modules=['Chandra.axTime3', 'Chandra.Time'],
version='3.19.1',
zip_safe=False,
test_suite="Chandra.test_Time",
packages=['Chandra'],
package_dir={'Chandra': 'Chandra'},
ext_modules=cythonize(extensions),
)
|
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/_axTime3.pyx"],
extra_compile_args=compile_args)]
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='[email protected]',
py_modules=['Chandra.axTime3', 'Chandra.Time'],
version='3.20.0',
zip_safe=False,
test_suite="Chandra.test_Time",
packages=['Chandra'],
package_dir={'Chandra': 'Chandra'},
ext_modules=cythonize(extensions),
)
|
Update minor version for end of 2016 leapsec
|
Update minor version for end of 2016 leapsec
|
Python
|
bsd-3-clause
|
sot/Chandra.Time,sot/Chandra.Time,sot/Chandra.Time
|
7566ba4a701d8da431b6987a7a921b297152615e
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name="manhattan",
version='0.2',
description='Robust Server-Side Analytics',
long_description='',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
keywords='',
url='http://github.com/cartlogic/manhattan',
author='Scott Torborg',
author_email='[email protected]',
install_requires=[
'sqlalchemy>=0.7',
'webob',
'redis>=2.7.2',
'pytz',
'pyzmq',
'simplejson',
],
license='MIT',
packages=['manhattan'],
entry_points=dict(
console_scripts=[
'manhattan-server=manhattan.server:main',
'manhattan-client=manhattan.client:main',
'manhattan-log-server=manhattan.log.remote:server',
]
),
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
from setuptools import setup, find_packages
setup(name="manhattan",
version='0.2',
description='Robust Server-Side Analytics',
long_description='',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
keywords='',
url='http://github.com/cartlogic/manhattan',
author='Scott Torborg',
author_email='[email protected]',
install_requires=[
'sqlalchemy>=0.7',
'webob',
'redis>=2.7.2',
'pytz',
'pyzmq',
'simplejson',
],
license='MIT',
packages=find_packages(),
entry_points=dict(
console_scripts=[
'manhattan-server=manhattan.server:main',
'manhattan-client=manhattan.client:main',
'manhattan-log-server=manhattan.log.remote:server',
]
),
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
Use find_packages() instead of manually specifying package name, so that subpackages get included
|
Use find_packages() instead of manually specifying package name, so that subpackages get included
|
Python
|
mit
|
storborg/manhattan
|
e36744422c615581d366cbbbc3213896e5652f68
|
setup.py
|
setup.py
|
from os import path
from setuptools import setup
from subprocess import check_call
from distutils.command.build import build
from setuptools.command.develop import develop
def get_submodules():
if path.exists('.git'):
check_call(['rm', '-rf', 'pagedown/static/pagedown'])
check_call(['git', 'reset', '--hard'])
check_call(['git', 'submodule', 'init'])
check_call(['git', 'submodule', 'update'])
class build_with_submodules(build):
def run(self):
get_submodules()
build.run(self)
class develop_with_submodules(develop):
def run(self):
get_submodules()
develop.run(self)
setup(
name="django-pagedown",
version="0.1.0",
author="Timmy O'Mahony",
author_email="[email protected]",
url="https://github.com/timmyomahony/django-pagedown",
description=("A django app that allows the easy addition of Stack Overflow's 'PageDown' markdown editor to a django form field"),
long_description=open('README.md').read(),
packages=['pagedown'],
include_package_data=True,
install_requires=[
"Django >= 1.3",
],
license='LICENSE.txt',
cmdclass={"build": build_with_submodules, "develop": develop_with_submodules},
)
|
from os import path
from setuptools import setup
from subprocess import check_call
from distutils.command.build import build
from setuptools.command.develop import develop
def get_submodules():
if path.exists('.git'):
check_call(['rm', '-rf', 'pagedown/static/pagedown'])
check_call(['git', 'reset', '--hard'])
check_call(['git', 'submodule', 'init'])
check_call(['git', 'submodule', 'update'])
class build_with_submodules(build):
def run(self):
get_submodules()
build.run(self)
class develop_with_submodules(develop):
def run(self):
get_submodules()
develop.run(self)
setup(
name="django-pagedown",
version="0.1.0",
author="Timmy O'Mahony",
author_email="[email protected]",
url="https://github.com/timmyomahony/django-pagedown",
description=("A django app that allows the easy addition of Stack Overflow's 'PageDown' markdown editor to a django form field"),
long_description=open('README.md').read(),
packages=['pagedown'],
include_package_data=True,
install_requires=[
"Django >= 1.3",
],
license='LICENSE.txt',
cmdclass={"build": build_with_submodules, "develop": develop_with_submodules},
zip_safe=False,
)
|
Declare egg to not be zip-safe
|
Declare egg to not be zip-safe
If the egg is installed zipped, `python manage.py collectstatic` will fail to find the resources under `static`.
|
Python
|
bsd-3-clause
|
timmyomahony/django-pagedown,timmyomahony/django-pagedown,kaushalaman/django-pagedown,kaushalaman/django-pagedown,ministryofjustice/django-pagedown,ministryofjustice/django-pagedown,timmyomahony/django-pagedown,ministryofjustice/django-pagedown,kaushalaman/django-pagedown
|
0d40f65817666b5e82028ef9e0a07ddfc922affb
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='Py-Authorize',
version='1.0.1.2',
author='Vincent Catalano',
author_email='[email protected]',
url='https://github.com/vcatalano/py-authorize',
download_url='',
description='A full-featured Python API for Authorize.net.',
long_description=__doc__,
license='MIT',
install_requires=[
'colander',
],
packages=[
'authorize',
'authorize.apis',
],
classifiers=[
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business :: Financial',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import setup
setup(
name='Py-Authorize',
version='1.0.1.3',
author='Vincent Catalano',
author_email='[email protected]',
url='https://github.com/vcatalano/py-authorize',
download_url='',
description='A full-featured Python API for Authorize.net.',
long_description=__doc__,
license='MIT',
install_requires=[
'colander',
],
packages=[
'authorize',
'authorize.apis',
],
classifiers=[
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Office/Business :: Financial',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Fix exception handling to extend colander.Invalid
|
Fix exception handling to extend colander.Invalid
|
Python
|
mit
|
aryeh/py-authorize,ClearcodeHQ/py-authorize,vcatalano/py-authorize,uglycitrus/py-authorize
|
36b35f463f27028ae015bbaea3e2bafbfeb7ddb3
|
setup.py
|
setup.py
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='[email protected]',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan', 'arabic', "germanic"],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.116',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='[email protected]',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['gitpython',
'nltk',
'python-crfsuite',
'pyuca',
'pyyaml',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin', 'chinese', 'sanskrit', 'pali', 'tibetan', 'arabic', "germanic"],
license='MIT',
long_description='The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.', # pylint: disable=C0301,
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.117',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
Bump vers for square Aramaic Unicode
|
Bump vers for square Aramaic Unicode
#975 from @D-K-E
|
Python
|
mit
|
cltk/cltk,kylepjohnson/cltk,diyclassics/cltk,D-K-E/cltk
|
25d4fabce29433bc1381a03a44a67cc588688e7f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Jason Davies
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB>=0.5dev_r125'],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Jason Davies
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'CouchDB-FUSE',
version = '0.1',
description = 'CouchDB FUSE module',
long_description = \
"""This is a Python FUSE module for CouchDB. It allows CouchDB document
attachments to be mounted on a virtual filesystem and edited directly.""",
author = 'Jason Davies',
author_email = '[email protected]',
license = 'BSD',
url = 'http://code.google.com/p/couchdb-fuse/',
zip_safe = True,
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
],
packages = ['couchdbfuse'],
entry_points = {
'console_scripts': [
'couchmount = couchdbfuse:main',
],
},
install_requires = ['CouchDB >= 0.5'],
)
|
Update couchdb-python dependency version to 0.5 in anticipation of its release.
|
Update couchdb-python dependency version to 0.5 in anticipation of its release.
git-svn-id: fdb8975c015a424b33c0997a6b0d758f3a24819f@9 bfab2ddc-a81c-11dd-9a07-0f3041a8e97c
|
Python
|
bsd-3-clause
|
cozy-labs/cozy-fuse,jasondavies/couchdb-fuse
|
de3f474502e781dacc0b182ee8d50d729468c576
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'pycolors2',
py_modules = ['colors',],
version = '0.0.3',
author = 'Chris Gilmer',
author_email = '[email protected]',
maintainer = 'Chris Gilmer',
maintainer_email = '[email protected]',
url = 'http://github.com/chrisgilmerproj/pycolors2',
license = 'MIT license',
description = """ Tool to color code python output """,
long_description = open('README.markdown').read(),
requires = [],
classifiers = (
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System',
'Topic :: Terminals',
'Topic :: Utilities',
),
)
|
from distutils.core import setup
setup(
name = 'pycolors2',
py_modules = ['colors',],
version = '0.0.3',
author = 'Chris Gilmer',
author_email = '[email protected]',
maintainer = 'Chris Gilmer',
maintainer_email = '[email protected]',
url = 'http://github.com/chrisgilmerproj/pycolors2',
license = 'MIT license',
description = """ Tool to color code python output """,
long_description = open('README.markdown').read(),
requires = [],
classifiers = (
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System',
'Topic :: Terminals',
'Topic :: Utilities',
),
)
|
Add trove classifiers for language support
|
Add trove classifiers for language support
|
Python
|
mit
|
chrisgilmerproj/pycolors2
|
1f9c09b52269b2c0310317ff252dfdd702fe3462
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
__version__ = "1.0"
setup(
name="awsenv",
version=__version__,
description="Opinionated configuration",
author="Location Labs",
author_email="[email protected]",
url="http://locationlabs.com",
packages=find_packages(exclude=["*.tests"]),
setup_requires=[
"nose>=1.3.7"
],
install_requires=[
"botocore>=1.3.1",
],
tests_require=[
"mock>=1.3.0",
"coverage>=4.0.1",
],
test_suite="awsenv.tests",
entry_points={
"console_scripts": [
"awsenv = awsenv.main:main",
]
}
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
__version__ = "1.0"
setup(
name="awsenv",
version=__version__,
description="Opinionated configuration",
author="Location Labs",
author_email="[email protected]",
url="http://locationlabs.com",
packages=find_packages(exclude=["*.tests"]),
setup_requires=[
"nose>=1.3.7"
],
install_requires=[
"botocore>=1.3.1",
],
tests_require=[
"mock>=1.0.1",
"coverage>=4.0.1",
],
test_suite="awsenv.tests",
entry_points={
"console_scripts": [
"awsenv = awsenv.main:main",
]
}
)
|
Downgrade mock library for travis compatability.
|
Downgrade mock library for travis compatability.
|
Python
|
apache-2.0
|
locationlabs/awsenv
|
58619ac341bd552594772be9c03aa73c75b55d0b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='dcp',
version='0.3',
description='Basic DCP client',
author='Couchbase',
author_email='[email protected]',
packages=['dcp'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup
setup(
name='dcp-client',
version='0.3',
description='Basic DCP client',
author='Couchbase',
author_email='[email protected]',
packages=['dcp-client'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Rename package from dcp to dcp-client
|
Rename package from dcp to dcp-client
|
Python
|
apache-2.0
|
mattcarabine/python-dcp-client,jbmcgill/python-dcp-client,couchbaselabs/python-dcp-client
|
80980383c32b8e02dc33a6218b734e7073edd43f
|
setup.py
|
setup.py
|
#! /usr/bin/env python3
from setuptools import setup
from dictorm.dictorm import __version__, __doc__ as ddoc
config = {
'name':'dictorm',
'version':str(__version__),
'author':'rolobio',
'author_email':'[email protected]',
'description':ddoc,
'license':'Apache2',
'keywords':'psycopg2 dictionary python dict',
'url':'https://github.com/rolobio/DictORM',
'packages':[
'dictorm',
],
'long_description':ddoc,
'classifiers':[
"Development Status :: 5 - Production/Stable",
"Topic :: Utilities",
],
'setup_requires': ['green>=2.12.0'],
'tests_require': [
'coverage',
'coveralls',
'green>=2.12.0',
'psycopg2',
],
'extras_require':{
'Postgresql': ['psycopg2'],
'testing': ['psycopg2', 'green>=2.12.0', 'coveralls', 'coverage'],
}
}
setup(**config)
|
#! /usr/bin/env python3
from setuptools import setup
from dictorm.dictorm import __version__, __doc__ as ddoc
config = {
'name':'dictorm',
'version':str(__version__),
'author':'rolobio',
'author_email':'[email protected]',
'description':ddoc,
'license':'Apache2',
'keywords':'psycopg2 dictionary python dict',
'url':'https://github.com/rolobio/DictORM',
'packages':[
'dictorm',
],
'long_description':ddoc,
'classifiers':[
"Development Status :: 5 - Production/Stable",
"Topic :: Utilities",
],
'setup_requires': ['green>=2.12.0'],
'tests_require': [
'coverage',
'coveralls',
'green>=2.12.0',
'psycopg2-binary',
],
'extras_require':{
'Postgresql': ['psycopg2-binary'],
'testing': ['psycopg2-binary', 'green>=2.12.0', 'coveralls', 'coverage'],
}
}
setup(**config)
|
Package name for psycopg2 has been changed
|
Package name for psycopg2 has been changed
|
Python
|
apache-2.0
|
rolobio/DictORM,rolobio/pgpydict
|
c8a47fe44ca4ecbf98cccec166a5d22a2597b577
|
setup.py
|
setup.py
|
#!/usr/bin/env python
#
# Copyright 2014 Major Hayden
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Install rpc_differ."""
import sys
from setuptools import setup
required_packages = [
"GitPython",
"jinja2",
"osa_differ>=0.3.6",
"requests"
]
if sys.version_info < (2, 7):
required_packages.append("importlib")
setup(
name='rpc_differ',
version='0.3.6',
author='Major Hayden',
author_email='[email protected]',
description="Find changes between RPC-OpenStack revisions",
install_requires=required_packages,
packages=['rpc_differ'],
include_package_data=True,
url='https://github.com/rcbops/rpc_differ',
entry_points='''
[console_scripts]
rpc-differ = rpc_differ.rpc_differ:run_rpc_differ
'''
)
|
#!/usr/bin/env python
#
# Copyright 2014 Major Hayden
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Install rpc_differ."""
import sys
from setuptools import setup
required_packages = [
"GitPython",
"jinja2",
"osa_differ>=0.3.8",
"requests"
]
if sys.version_info < (2, 7):
required_packages.append("importlib")
setup(
name='rpc_differ',
version='0.3.7',
author='Major Hayden',
author_email='[email protected]',
description="Find changes between RPC-OpenStack revisions",
install_requires=required_packages,
packages=['rpc_differ'],
include_package_data=True,
url='https://github.com/rcbops/rpc_differ',
entry_points='''
[console_scripts]
rpc-differ = rpc_differ.rpc_differ:run_rpc_differ
'''
)
|
Update osa_differ minimum version to 0.3.8
|
Update osa_differ minimum version to 0.3.8
In order to support the use of PR refspecs, we
need to use 0.3.8 and above.
|
Python
|
apache-2.0
|
major/rpc_differ
|
21b3a79db94903d18671c58619601022b0277ccd
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Luhn Algorithm Validator -- long description
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='luhn_algorithm_validator',
version='1.0.0',
description='Luhn Account Number Validator',
long_description=long_description,
# Project homepage
url='https://github.com/garwoodpr/LuhnAlgorithmProof',
# Author details
author='Clint Garwood',
author_email='[email protected]',
# License
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Topic :: Text Processing :: General',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Office/Business :: Financial',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Customer Service',
]
keywords=['Luhn Algorithm', 'account number validation', 'credit card validation', 'text analysis', 'information processing', 'data verification', cryptography, 'numerical decoding',
]
)
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Luhn Algorithm Validator -- long description
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='luhn_algorithm_validator',
version='1.0.1',
packages = ['luhn'],
description='Luhn Account Number Validator',
long_description=long_description,
# Project homepage
url='https://github.com/garwoodpr/LuhnAlgorithmProof',
# Author details
author='Clint Garwood',
author_email='[email protected]',
# License
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Topic :: Text Processing :: General',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Office/Business :: Financial',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Customer Service',
],
keywords=['Luhn Algorithm', 'account number validation', 'credit card validation', 'text analysis', 'information processing', 'data verification', cryptography, 'numerical decoding',
],
)
|
Include luhn package update version 1.0.1
|
Include luhn package update version 1.0.1
|
Python
|
mit
|
garwoodpr/LuhnAlgorithmProof,garwoodpr/LuhnAlgorithmProof
|
ee2d37cd2d088f1bc1aface2322d7216a258754b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='django-treemenus',
version='0.8.8-pre',
description='Tree-structured menuing application for Django.',
author='Julien Phalip',
author_email='[email protected]',
url='http://github.com/jphalip/django-treemenus/',
packages=find_packages(),
package_data={
'treemenus': [
'templates/admin/treemenus/menu/*.html',
'templates/admin/treemenus/menuitem/*.html'
]
},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from setuptools import setup, find_packages
setup(
name='django-treemenus',
version='0.8.8-pre',
description='Tree-structured menuing application for Django.',
author='Julien Phalip',
author_email='[email protected]',
url='http://github.com/jphalip/django-treemenus/',
packages=find_packages(),
package_data={
'treemenus': [
'static/img/treemenus/*.gif',
'templates/admin/treemenus/menu/*.html',
'templates/admin/treemenus/menuitem/*.html',
]
},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
Include static assets in pakage_data
|
Include static assets in pakage_data
|
Python
|
bsd-3-clause
|
artscoop/django-treemenus-plus,artscoop/django-treemenus-plus,jphalip/django-treemenus
|
c2bd843cd383cb55c3a74f48612a4969c893a42e
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='widget-party',
version='2.2.3',
packages=['widget_party'],
install_requires=['django-dashing>=0.2.6', 'Django>=1.6', ],
include_package_data=True,
setup_requires=["setuptools_git >= 0.3"],
license='MIT License',
description='A collection of widgets to add functionality to django-dashing.',
long_description=README,
url='https://github.com/mverteuil/widget-party',
author='Matthew de Verteuil',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Utilities',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='widget-party',
version='2.3',
packages=['widget_party'],
install_requires=['django-dashing>=0.2.6', 'Django>=1.6', ],
include_package_data=True,
setup_requires=["setuptools_git >= 0.3"],
license='MIT License',
description='A collection of widgets to add functionality to django-dashing.',
long_description=README,
url='https://github.com/mverteuil/widget-party',
author='Matthew de Verteuil',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Utilities',
],
)
|
Create 2.3 release with recent PR merges
|
Create 2.3 release with recent PR merges
|
Python
|
mit
|
mverteuil/widget-party,mverteuil/widget-party,mverteuil/widget-party
|
d73ff1c66925613646495a22018e8c8a6ce139a7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup, Command
from distutils.extension import Extension
import os
import numpy as np
from Cython.Distutils import build_ext
os.environ['TEST_DATA_ROOT'] = os.path.abspath("tests/data")
class CramTest(Command):
user_options = [ ]
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
import cram
import sys
test_root = os.path.abspath("tests/cram")
tests = [os.path.join(test_root, test) for test in os.listdir("tests/cram")]
sys.exit(cram.main(tests))
setup(
name="15puzz",
version="0.0.0",
description="15 Puzzle Game",
author="Eric Chlebek",
author_email="[email protected]",
packages=["fifteen"],
#scripts=["scripts/15puzz"],
test_suite="tests.unit",
cmdclass={"cram": CramTest, "build_ext": build_ext},
ext_modules=[
Extension("_c15", ["fifteen/_c15.pyx"], [np.get_include()])
]
)
|
#!/usr/bin/env python
from distutils.core import setup, Command
from distutils.extension import Extension
import os
import numpy as np
from Cython.Distutils import build_ext
from unittest import TextTestRunner, TestLoader
os.environ['TEST_DATA_ROOT'] = os.path.abspath("tests/data")
class UnitTest(Command):
def run(self):
import tests.unit.test_15p
loader = TestLoader()
t = TextTestRunner()
t.run(loader.loadTestsFromModule(tests.unit.test_15p))
user_options = []
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
class CramTest(Command):
user_options = [ ]
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
import cram
import sys
test_root = os.path.abspath("tests/cram")
tests = [os.path.join(test_root, test) for test in os.listdir("tests/cram")]
sys.exit(cram.main(tests))
setup(
name="15puzz",
version="0.0.0",
description="15 Puzzle Game",
author="Eric Chlebek",
author_email="[email protected]",
packages=["fifteen"],
#scripts=["scripts/15puzz"],
cmdclass={"cram": CramTest, "build_ext": build_ext, "test": UnitTest},
ext_modules=[
Extension("_c15", ["fifteen/_c15.pyx"], [np.get_include()])
]
)
|
Fix broken unit test command.
|
Fix broken unit test command.
|
Python
|
mit
|
echlebek/15puzz,echlebek/15puzz
|
df0efa079afbda84b1c09bc4895c84c0ec70861d
|
setup.py
|
setup.py
|
import importlib
from cx_Freeze import setup, Executable
backend_path = importlib.import_module("bcrypt").__path__[0]
backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend")
# Dependencies are automatically detected, but it might need
# fine tuning.
build_exe_options = {
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md",
(backend_path, "lib/.libs_cffi_backend")
],
"includes": [
"cffi",
"numpy",
"numpy.core._methods",
"numpy.lib",
"numpy.lib.format",
"raven.processors"
],
"packages": [
"_cffi_backend",
"appdirs",
"asyncio",
"bcrypt",
"cffi",
"idna",
"motor",
"packaging",
"uvloop"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.7"
]
importlib.import_module("virtool")
setup(name='virtool', executables=executables, options=options, classifiers=classifiers, python_requires=">=3.6")
|
import importlib
from cx_Freeze import setup, Executable
backend_path = importlib.import_module("bcrypt").__path__[0]
backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend")
# Dependencies are automatically detected, but it might need
# fine tuning.
build_exe_options = {
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md",
(backend_path, "lib/.libs_cffi_backend")
],
"includes": [
"cffi",
"numpy",
"numpy.core._methods",
"numpy.lib",
"numpy.lib.format",
"raven.processors"
],
"packages": [
"_cffi_backend",
"appdirs",
"asyncio",
"bcrypt",
"cffi",
"idna",
"motor",
"packaging",
"ssl",
"uvloop"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.7"
]
importlib.import_module("virtool")
setup(name='virtool', executables=executables, options=options, classifiers=classifiers, python_requires=">=3.6")
|
Add SSL to cx-freeze packages
|
Add SSL to cx-freeze packages
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool
|
67e53d151971a56c1375c2fa9a8afcb1bfd609d5
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tldr',
version="0.1.3",
author='Felix Yan',
author_email='[email protected]',
url='https://github.com/felixonmars/tldr-python-client',
description='command line client for tldr',
license='MIT',
py_modules=['tldr'],
scripts=['tldr.py'],
install_requires=['six', 'termcolor', 'colorama'],
tests_require=[
'pytest-runner',
],
entry_points={
'console_scripts': ['tldr = tldr:main']
},
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: SunOS/Solaris",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Utilities",
"Topic :: System"
]
)
|
from setuptools import setup
setup(
name='tldr',
version="0.1.3",
author='Felix Yan',
author_email='[email protected]',
url='https://github.com/felixonmars/tldr-python-client',
description='command line client for tldr',
license='MIT',
py_modules=['tldr'],
scripts=['tldr.py'],
install_requires=['six', 'termcolor', 'colorama'],
tests_require=[
'pytest-runner',
],
entry_points={
'console_scripts': ['tldr = tldr:main']
},
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: SunOS/Solaris",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
"Topic :: System"
]
)
|
Add Python 3.4 and 3.5 to classifiers
|
Add Python 3.4 and 3.5 to classifiers
|
Python
|
mit
|
Anmol-Singh-Jaggi/tldr-python-client,tldr-pages/tldr-python-client
|
cf8dc3b9ff61b045233b252f20e2f68587911514
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="arxiv",
version="0.2.1",
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
download_url="https://github.com/lukasschwab/arxiv.py/tarball/0.2.1",
)
|
from setuptools import setup
setup(
name="arxiv",
version="0.2.2",
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="[email protected]",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
download_url="https://github.com/lukasschwab/arxiv.py/tarball/0.2.2",
)
|
Resolve python3 error, version++ for release
|
Resolve python3 error, version++ for release
|
Python
|
mit
|
lukasschwab/arxiv.py
|
83b40ad7c62becd316cf54aba90e31bd6ef29b21
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
import kamboo
requires = ['boto3']
if sys.version_info[:2] == (2, 6):
# For python2.6 we have to require argparse since it
# was not in stdlib until 2.7.
requires.append('argparse>=1.1')
setup_options = dict(
name='kamboo',
version=kamboo.__version__,
description='',
long_description=open('README.rst').read(),
author='Henry Huang',
author_email='[email protected]',
url='https://github.com/henrysher/kamboo',
scripts=[],
packages=find_packages('.', exclude=['tests*']),
package_dir={'kamboo': 'kamboo'},
package_data={'kamboo': []},
install_requires=requires,
license="Apache License 2.0",
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
),
)
setup(**setup_options)
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
import kamboo
requires = ['boto3']
if sys.version_info[:2] == (2, 6):
# For python2.6 we have to require argparse since it
# was not in stdlib until 2.7.
requires.append('argparse>=1.1')
setup_options = dict(
name='kamboo',
version=kamboo.__version__,
description='To build and distribute AMI images' +
' or EC2 snapshots across accounts and regions',
long_description=open('README.rst').read(),
author='Henry Huang',
author_email='[email protected]',
url='https://github.com/henrysher/kamboo',
scripts=[],
packages=find_packages('.', exclude=['tests*']),
package_dir={'kamboo': 'kamboo'},
package_data={'kamboo': []},
install_requires=requires,
license="Apache License 2.0",
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
),
)
setup(**setup_options)
|
Add the description in the PyPI package
|
Add the description in the PyPI package
|
Python
|
apache-2.0
|
henrysher/kamboo,henrysher/kamboo
|
4a28457070e2aaa08eced2651bc437ca052af739
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="spinoff",
version="0.3dev",
packages=find_packages(),
data_files=[('twisted/plugins', ['twisted/plugins/startnode.py'])],
install_requires=[
'twisted>=12.0',
'txcoroutine',
'txzmq',
],
author="Erik Allik",
author_email="[email protected]",
)
|
from setuptools import setup, find_packages
setup(
name="spinoff",
version="0.3dev",
packages=find_packages(),
data_files=[('twisted/plugins', ['twisted/plugins/startnode.py'])],
install_requires=[
'twisted>=12.0',
'txcoroutine',
'txzmq==0.5.1',
],
dependency_links=[
'https://github.com/eallik/txZMQ/tarball/master#egg=txzmq-0.5.1'
],
author="Erik Allik",
author_email="[email protected]",
)
|
Use the custom fork of github.com/eallik/txzmq as the dependency instead of the original one at github.com/smira/txzmq
|
Use the custom fork of github.com/eallik/txzmq as the dependency instead of the original one at github.com/smira/txzmq
|
Python
|
bsd-2-clause
|
eallik/spinoff
|
720bd5b2f3c422af6bd1c70850fb2f67a773def0
|
slack.py
|
slack.py
|
#!/usr/bin/env python
import requests
import calendar
from datetime import datetime, timedelta
from settings import _token, _domain, _user, _time, _pretty
if __name__ == '__main__':
while 1:
files_list_url = 'https://slack.com/api/files.list'
date = str(calendar.timegm((datetime.now() + timedelta(-_time)).utctimetuple()))
data = {"token": _token, "ts_to": date, "user": _user}
response = requests.post(files_list_url, data=data)
if len(response.json()["files"]) == 0:
break
for f in response.json()["files"]:
print("Deleting file " + f["name"] + "...")
timestamp = str(calendar.timegm(datetime.now().utctimetuple()))
delete_url = "https://" + _domain + ".slack.com/api/files.delete?t=" + timestamp
requests.post(delete_url, data={
"token": _token,
"file": f["id"],
"set_active": "true",
"_attempts": "1"})
print("DONE!")
|
#!/usr/bin/env python
import requests
import calendar
from datetime import datetime, timedelta
from settings import _token, _domain, _user, _time, _pretty
def delete_my_files():
while 1:
files_list_url = 'https://slack.com/api/files.list'
date = str(calendar.timegm((datetime.now() + timedelta(-_time)).utctimetuple()))
data = {"token": _token, "ts_to": date, "user": _user}
response = requests.post(files_list_url, data=data)
if len(response.json()["files"]) == 0:
break
for f in response.json()["files"]:
print("Deleting file " + f["name"] + "...")
timestamp = str(calendar.timegm(datetime.now().utctimetuple()))
delete_url = "https://" + _domain + ".slack.com/api/files.delete?t=" + timestamp
requests.post(delete_url, data={
"token": _token,
"file": f["id"],
"set_active": "true",
"_attempts": "1"})
print("DONE!")
if __name__ == '__main__':
delete_my_files()
|
Move the main items into a function
|
Move the main items into a function
|
Python
|
mit
|
marshallhumble/slack_app,marshallhumble/slack_app
|
16516b1ec44e3e44d2dc96a6f3d021268ce4e71d
|
osgtest/tests/test_84_xrootd.py
|
osgtest/tests/test_84_xrootd.py
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
Fix test if server started in xrootd cleanup code
|
Fix test if server started in xrootd cleanup code
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17920 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
75b02b3cafcc34dca143de9143f14c3c7c29c97c
|
transmutagen/tests/test_coefficients.py
|
transmutagen/tests/test_coefficients.py
|
import pytest
slow = pytest.mark.skipif(
not pytest.config.getoption("--runslow"),
reason="need --runslow option to run"
)
TOTAL_DEGREES = 30
from .crv_coeffs import coeffs as correct_coeffs
from ..cram import get_CRAM_from_cache, CRAM_coeffs
# @slow
@pytest.mark.parametrize('degree', range(1, TOTAL_DEGREES+1))
def test_coefficients(degree):
generated_coeffs = {}
expr = get_CRAM_from_cache(degree, 200)
generated_coeffs[degree] = CRAM_coeffs(expr, 20,
decimal_rounding=True)
# pytest won't show the full expr from the assert, so we print it too
print(expr)
assert generated_coeffs[degree] == correct_coeffs[degree], expr
|
import decimal
import pytest
from sympy import re
slow = pytest.mark.skipif(
not pytest.config.getoption("--runslow"),
reason="need --runslow option to run"
)
TOTAL_DEGREES = 30
from .crv_coeffs import coeffs as correct_coeffs
from .partfrac_coeffs import part_frac_coeffs
from ..cram import get_CRAM_from_cache, CRAM_coeffs
from ..partialfrac import thetas_alphas
# @slow
@pytest.mark.parametrize('degree', range(1, TOTAL_DEGREES+1))
def test_coefficients(degree):
generated_coeffs = {}
expr = get_CRAM_from_cache(degree, 200)
generated_coeffs[degree] = CRAM_coeffs(expr, 20,
decimal_rounding=True)
# pytest won't show the full expr from the assert, so we print it too
print(expr)
assert generated_coeffs[degree] == correct_coeffs[degree], expr
@pytest.mark.xfail
@pytest.mark.parametrize('degree', [14, 16])
def test_partial_fraction_coefficients(degree):
generated_coeffs = {}
expr = get_CRAM_from_cache(degree, 200)
thetas, alphas, alpha0 = thetas_alphas(expr, 200)
format_str = '{:.19e}'
correct_coeffs = part_frac_coeffs[degree]
# Thetas in the paper are negative what we have
thetas = [-i for i in thetas]
for theta, real_theta, imag_theta in zip(sorted(thetas, key=re),
correct_coeffs['thetas']['real'],
correct_coeffs['thetas']['imaginary']):
real, imag = theta.as_real_imag()
assert format_str.format(decimal.Decimal(repr(real))) == real_theta
assert format_str.format(decimal.Decimal(repr(imag))) == imag_theta
assert generated_coeffs[degree] == correct_coeffs[degree], expr
|
Add test against Pusa coefficients (skipped for now, as they don't pass)
|
Add test against Pusa coefficients (skipped for now, as they don't pass)
|
Python
|
bsd-3-clause
|
ergs/transmutagen,ergs/transmutagen
|
f468a26893c44411dc1f865b208788373f993918
|
asciibooth/camera.py
|
asciibooth/camera.py
|
import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview()
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
|
import io
# import time
import picamera
from . import config
class Camera:
def __init__(self):
self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION)
self.camera.hflip = True
self.preview_alpha = 200
def capture(self):
stream = io.BytesIO()
self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE)
stream.seek(0)
return stream
def toggle_preview(self):
if self.camera.preview is None:
self.camera.start_preview(hflip = True)
self.camera.preview.alpha = self.preview_alpha
else:
self.camera.stop_preview()
def start_preview(self, alpha=255):
self.camera.start_preview()
self.camera.preview.alpha = alpha
def stop_preview(self):
self.camera.stop_preview()
def stop(self):
self.camera.close()
|
Enable hflip for capture and preview
|
Enable hflip for capture and preview
|
Python
|
cc0-1.0
|
jnv/asciibooth,jnv/asciibooth
|
6f213f17fab236e1222f4e691015dfd867073ae2
|
dbaas/workflow/steps/build_database.py
|
dbaas/workflow/steps/build_database.py
|
# -*- coding: utf-8 -*-
import logging
from base import BaseStep
from logical.models import Database
LOG = logging.getLogger(__name__)
class BuildDatabase(BaseStep):
def __unicode__(self):
return "Creating logical database..."
def do(self, workflow_dict):
try:
if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']:
return False
LOG.info("Creating Database...")
database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra'])
database.team = workflow_dict['team']
database.project = workflow_dict['project']
database.description = workflow_dict['description']
database.save()
workflow_dict['database'] = database
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
LOG.info("Destroying the database....")
workflow_dict['database'].delete()
return True
except Exception, e:
print e
return False
|
# -*- coding: utf-8 -*-
import logging
from base import BaseStep
from logical.models import Database
LOG = logging.getLogger(__name__)
class BuildDatabase(BaseStep):
def __unicode__(self):
return "Creating logical database..."
def do(self, workflow_dict):
try:
if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']:
return False
LOG.info("Creating Database...")
database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra'])
workflow_dict['database'] = database
database.team = workflow_dict['team']
if 'project' in workflow_dict:
database.project = workflow_dict['project']
database.description = workflow_dict['description']
database.save()
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
LOG.info("Destroying the database....")
workflow_dict['database'].delete()
return True
except Exception, e:
print e
return False
|
Check if there is an project key on workflow_dict
|
Check if there is an project key on workflow_dict
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
84f9c189f62c6ab81de952cb9a7e9942237465ec
|
tasks.py
|
tasks.py
|
from invoke import task, Collection
from invocations.packaging import release
# TODO: once this stuff is stable and I start switching my other projects to be
# pytest-oriented, move this into invocations somehow.
@task
def test(c):
"""
Run verbose pytests.
"""
c.run("pytest --verbose --color=yes")
@task
def coverage(c, html=True):
"""
Run coverage with coverage.py.
"""
# NOTE: this MUST use coverage itself, and not pytest-cov, because the
# latter is apparently unable to prevent pytest plugins from being loaded
# before pytest-cov itself is able to start up coverage.py! The result is
# that coverage _always_ skips over all module level code, i.e. constants,
# 'def' lines, etc. Running coverage as the "outer" layer avoids this
# problem, thus no need for pytest-cov.
# NOTE: this does NOT hold true for NON-PYTEST code, so
# pytest-relaxed-USING modules can happily use pytest-cov.
c.run("coverage run --source=pytest_relaxed -m pytest")
if html:
c.run("coverage html")
c.run("open htmlcov/index.html")
ns = Collection(
coverage,
test,
packaging=release,
)
ns.configure({
})
|
from invoke import task, Collection
from invocations.packaging import release
from invocations import pytest as pytests
@task
def coverage(c, html=True):
"""
Run coverage with coverage.py.
"""
# NOTE: this MUST use coverage itself, and not pytest-cov, because the
# latter is apparently unable to prevent pytest plugins from being loaded
# before pytest-cov itself is able to start up coverage.py! The result is
# that coverage _always_ skips over all module level code, i.e. constants,
# 'def' lines, etc. Running coverage as the "outer" layer avoids this
# problem, thus no need for pytest-cov.
# NOTE: this does NOT hold true for NON-PYTEST code, so
# pytest-relaxed-USING modules can happily use pytest-cov.
c.run("coverage run --source=pytest_relaxed -m pytest")
if html:
c.run("coverage html")
c.run("open htmlcov/index.html")
ns = Collection(
coverage,
pytests.test,
packaging=release,
)
|
Use new invocations pytest helper
|
Use new invocations pytest helper
|
Python
|
bsd-2-clause
|
bitprophet/pytest-relaxed
|
87ea6fbc07c547a0c92f0b85811edc0645cb4303
|
pysyte/oss/linux.py
|
pysyte/oss/linux.py
|
"""Linux-specific code"""
import os
from pysyte.types import paths
def xdg_home():
"""path to $XDG_CONFIG_HOME
>>> assert xdg_home() == os.path.expanduser('~/.config')
"""
return paths.environ_path('XDG_CONFIG_HOME', '~/.config')
def xdg_home_config(filename):
"""path to that file in $XDG_CONFIG_HOME
>>> assert xdg_home_config('fred') == os.path.expanduser('~/.config/fred')
"""
return xdg_home() / filename
def xdg_dirs():
"""paths in $XDG_CONFIG_DIRS"""
return paths.environ_paths('XDG_CONFIG_DIRS')
def xdg_homes():
return [xdg_home()]
bash_paste = 'xclip -selection clipboard'
bash_copy = 'xclip -selection clipboard -o'
|
"""Linux-specific code"""
from pysyte.types import paths
def xdg_home():
"""path to $XDG_CONFIG_HOME
>>> assert xdg_home() == paths.path('~/.config').expand()
"""
return paths.environ_path('XDG_CONFIG_HOME', '~/.config')
def xdg_home_config(filename):
"""path to that file in $XDG_CONFIG_HOME
>>> assert xdg_home_config('fred') == paths.path('~/.config/fred').expand()
"""
return xdg_home() / filename
def xdg_dirs():
"""paths in $XDG_CONFIG_DIRS"""
return paths.environ_paths('XDG_CONFIG_DIRS')
def xdg_homes():
return [xdg_home()]
bash_paste = 'xclip -selection clipboard'
bash_copy = 'xclip -selection clipboard -o'
|
Remove unused import of "os"
|
Remove unused import of "os"
|
Python
|
mit
|
jalanb/dotsite
|
b12418acf3883024be965f42ec8d3a16e76d384f
|
special/special_relativity.py
|
special/special_relativity.py
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
class LorentzFactor(object):
SPEED_OF_LIGHT = 299792458
@staticmethod
def get_beta(velocity, is_percent):
if is_percent:
return velocity
return velocity / SPEED_OF_LIGHT
@staticmethod
def lorentz_factor(time, velocity, is_percent):
beta = LorentzFactor.get_beta(velocity, is_percent)
return time / (math.sqrt(1 - beta ** 2))
class TimeDilation(LorentzFactor):
@staticmethod
def get_proper_time(time, velocity, is_percent=True):
return time * TimeDilation.lorentz_factor(time, velocity, is_percent)
@staticmethod
def get_time_relative_ex_observer(time, velocity, is_percent=True):
"""
Dilation relative to an external observer
"""
return time ** 2 / TimeDilation.lorentz_factor(time, velocity, is_percent)
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
class LorentzFactor(object):
SPEED_OF_LIGHT = 299792458
@staticmethod
def get_beta(velocity, is_percent):
if is_percent:
return velocity
return velocity / SPEED_OF_LIGHT
@staticmethod
def lorentz_factor(velocity, is_percent):
beta = LorentzFactor.get_beta(velocity, is_percent)
return 1 / (math.sqrt(1 - beta ** 2))
class TimeDilation(LorentzFactor):
@staticmethod
def get_proper_time(time, velocity, is_percent=True):
return time * TimeDilation.lorentz_factor(velocity, is_percent)
@staticmethod
def get_time_relative_ex_observer(time, velocity, is_percent=True):
"""
Dilation relative to an external observer
"""
return time / TimeDilation.lorentz_factor(velocity, is_percent)
|
Fix error in calculation on lorentz factor
|
Fix error in calculation on lorentz factor
|
Python
|
mit
|
tdsymonds/relativity
|
eb31775a7dbbb2064cf64d85c2bb0912a92f4028
|
train.py
|
train.py
|
import data
import argparse
from model import EDSR
parser = argparse.ArgumentParser()
parser.add_argument("--dataset",default="data/General-100")
parser.add_argument("--imgsize",default=100,type=int)
parser.add_argument("--scale",default=2,type=int)
parser.add_argument("--layers",default=32,type=int)
parser.add_argument("--featuresize",default=256,type=int)
parser.add_argument("--batchsize",default=10,type=int)
parser.add_argument("--savedir",default='saved_models')
parser.add_argument("--iterations",default=1000,type=int)
args = parser.parse_args()
data.load_dataset(args.dataset,args.imgsize)
down_size = args.imgsize//args.scale
network = EDSR(down_size,args.layers,args.featuresize,args.scale)
network.set_data_fn(data.get_batch,(args.batchsize,args.imgsize,down_size),data.get_test_set,(args.imgsize,down_size))
network.train(args.iterations,args.savedir)
|
import data
import argparse
from model import EDSR
parser = argparse.ArgumentParser()
parser.add_argument("--dataset",default="data/General-100")
parser.add_argument("--imgsize",default=100,type=int)
parser.add_argument("--scale",default=2,type=int)
parser.add_argument("--layers",default=32,type=int)
parser.add_argument("--featuresize",default=256,type=int)
parser.add_argument("--batchsize",default=10,type=int)
parser.add_argument("--savedir",default='saved_models')
parser.add_argument("--iterations",default=1000,type=int)
args = parser.parse_args()
data.load_dataset(args.dataset,args.imgsize)
if args.imgsize % args.scale != 0:
print(f"Image size {args.imgsize} is not evenly divisible by scale {arg.scale}")
return
down_size = args.imgsize//args.scale
network = EDSR(down_size,args.layers,args.featuresize,args.scale)
network.set_data_fn(data.get_batch,(args.batchsize,args.imgsize,down_size),data.get_test_set,(args.imgsize,down_size))
network.train(args.iterations,args.savedir)
|
Add warning for mismatched image size and scale
|
Add warning for mismatched image size and scale
|
Python
|
mit
|
jmiller656/EDSR-Tensorflow
|
9c9a2904ad79c2f05dacdabef9f3ad78ae78db3f
|
quarkc/_metadata.py
|
quarkc/_metadata.py
|
# Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"__title__", "__version__",
"__summary__", "__uri__",
"__author__", "__email__",
"__license__", "__copyright__",
]
__title__ = 'datawire-quarkdev-bozzo'
__version__ = '0.5.2'
__summary__ = "Quark: an IDL for high level (micro)service interfaces"
__uri__ = "http://datawire.github.io/quark/"
__author__ = "datawire.io"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "2016 %s" % __author__
|
# Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"__title__", "__version__",
"__summary__", "__uri__",
"__author__", "__email__",
"__license__", "__copyright__",
]
__title__ = 'datawire-quarkdev'
__version__ = '0.5.2'
__summary__ = "Quark: an IDL for high level (micro)service interfaces"
__uri__ = "http://datawire.github.io/quark/"
__author__ = "datawire.io"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "2016 %s" % __author__
|
Revert "[HACK TEST] Rename package for now and try with a registered account for testpypi"
|
Revert "[HACK TEST] Rename package for now and try with a registered account for testpypi"
This reverts commit 3cc3b07dcbe7faac91cf42b40c7ebd4375c4f93d.
|
Python
|
apache-2.0
|
datawire/quark,datawire/quark,datawire/quark,datawire/quark,datawire/quark,datawire/quark
|
31448c1e23d7dfcea65af20432511673533885b6
|
teknologr/registration/mailutils.py
|
teknologr/registration/mailutils.py
|
from django.core.mail import send_mail
# TODO: check whether this should be sent from Phuxivator
def mailApplicantSubmission(context, sender='[email protected]'):
name = context['name']
receiver = context['email']
subject = 'Tack för din medlemsansökan till Teknologföreningen!'
message = '''Hej {name},
Tack för din medlemsansökan till Teknologföreningen!
För att bli en fullständig medlem så är nästa steg att delta i ett Nationsmöte (Namö).
Detta informeras mera senare.
Vid frågor eller ifall du inte ansökt om medlemskap, kontakt {sender}
Detta är ett automatiskt meddelande, du behöver inte svara på det.
'''.format(name=name, sender=sender)
return send_mail(
subject,
message,
sender,
[receiver],
fail_silently=False)
|
from django.core.mail import send_mail
# TODO: check whether this should be sent from Phuxivator
def mailApplicantSubmission(context, sender='[email protected]'):
name = context['name']
receiver = context['email']
subject = 'Tack för din medlemsansökan till Teknologföreningen!'
message = '''Hej {name},
Tack för din medlemsansökan till Teknologföreningen!
För att bli en fullständig medlem så är nästa steg att delta i ett Nationsmöte (Namö).
Detta informeras mera senare.
Vid frågor eller ifall du inte ansökt om medlemskap, kontakt {sender}
Detta är ett automatiskt meddelande, du behöver inte svara på det.
'''.format(name=name, sender=sender)
return send_mail(
subject,
message,
sender,
[receiver],
fail_silently=False)
|
Change registration email sender to Phuxivator
|
Change registration email sender to Phuxivator
|
Python
|
mit
|
Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io
|
d5df109f95dd47d63188e4b1d0017275adae172c
|
pip_package/rlds_version.py
|
pip_package/rlds_version.py
|
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
# python3
"""Package metadata for RLDS.
This is kept in a separate module so that it can be imported from setup.py, at
a time when RLDS's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '1'
_PATCH_VERSION = '3'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
|
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
# python3
"""Package metadata for RLDS.
This is kept in a separate module so that it can be imported from setup.py, at
a time when RLDS's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '1'
_PATCH_VERSION = '4'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
|
Increase Pypi version. New package uploaded to Pypi already.
|
Increase Pypi version. New package uploaded to Pypi already.
PiperOrigin-RevId: 435120795
Change-Id: Idaed3713248afaeefad2eac345d6cd4d6b37bef9
|
Python
|
apache-2.0
|
google-research/rlds,google-research/rlds
|
d604d17e8286b1c95a0faafd6d4fd79af11441ab
|
nn/util.py
|
nn/util.py
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
import functools
import numpy
import tensorflow as tf
def static_shape(tensor):
return tf.convert_to_tensor(tensor).get_shape().as_list()
def static_rank(tensor):
return len(static_shape(tf.convert_to_tensor(tensor)))
def funcname_scope(func_or_name):
if isinstance(func_or_name, str):
def wrapper(func):
func.__name__ = func_or_name
return funcname_scope(func)
return wrapper
func = func_or_name
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.variable_scope(func.__name__):
return func(*args, **kwargs)
return wrapper
def on_device(device_name):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with tf.device(device_name):
return func(*args, **kwargs)
return wrapper
return decorator
def dimension_indices(tensor, start=0):
return list(range(static_rank(tensor)))[start:]
@funcname_scope
def dtype_min(dtype):
return tf.constant(_numpy_min(dtype.as_numpy_dtype))
def _numpy_min(dtype):
return numpy.finfo(dtype).min
@funcname_scope
def dtype_epsilon(dtype):
return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype))
def _numpy_epsilon(dtype):
return numpy.finfo(dtype).eps
def flatten(x):
return tf.reshape(x, [-1])
|
Extend funcname_scope so that it accepts funcnames
|
Extend funcname_scope so that it accepts funcnames
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
832d59557dcf2b73797159b4acb96e727492d8fb
|
maps/api/urls.py
|
maps/api/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^question_set/', views.http_list_question_sets),
url(r'^answer_set/', views.http_create_answer_set),
url(r'^answer/', views.create_answer),
url(r'^question/', views.get_question),
url(r'^contour_tile/(?P<question_id>\d+)', views.contour_tile),
url(r'^map_area/', views.create_map_area),
url(r'^question_set/', views.create_question_set),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^question_set_list/', views.http_list_question_sets),
url(r'^answer_set/', views.http_create_answer_set),
url(r'^answer/', views.create_answer),
url(r'^question/', views.get_question),
url(r'^contour_tile/(?P<question_id>\d+)', views.contour_tile),
url(r'^map_area/', views.create_map_area),
url(r'^question_set/', views.create_question_set),
]
|
Rename API method into question_set_list
|
Rename API method into question_set_list
|
Python
|
mit
|
sevazhidkov/greenland,sevazhidkov/greenland
|
f3e3b43abebfad0fcaa20df8eac20e3cb8c099d6
|
imgproc.py
|
imgproc.py
|
from SimpleCV import *
import numpy
import cv2
def process_image(obj, img, config):
"""
:param obj: Object we're tracking
:param img: Input image
:param config: Controls
:return: Mask with candidates surrounded in a green rectangle
"""
hsv_image = img.toHSV()
segmented = Image(cv2.inRange(hsv_image.getNumpy(),
numpy.array([config.min_hue, config.min_sat, config.min_val]),
numpy.array([config.max_hue, config.max_sat, config.max_val])))
segmented = segmented.dilate(2)
blobs = segmented.findBlobs()
if blobs:
for b in blobs:
if b.radius() > 10:
rect_width = b.minRectWidth()
rect_height = b.minRectHeight()
aspect_ratio = rect_width / rect_height
square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio)
if square_error < 0.1:
# minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2
segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width,
rect_height, color=Color.GREEN, width=6)
# Give the result mask
return segmented.applyLayers()
|
from SimpleCV import *
import numpy
import cv2
def process_image(obj, img, config, each_blob=None):
"""
:param obj: Object we're tracking
:param img: Input image
:param config: Controls
:param each_blob: function, taking a SimpleCV.Blob as an argument, that is called for every candidate blob
:return: Mask with candidates
"""
hsv_image = img.toHSV()
segmented = Image(cv2.inRange(hsv_image.getNumpy(),
numpy.array([config.min_hue, config.min_sat, config.min_val]),
numpy.array([config.max_hue, config.max_sat, config.max_val])))
segmented = segmented.dilate(2)
blobs = segmented.findBlobs()
if blobs:
for b in blobs:
if b.radius() > 10:
rect_width = b.minRectWidth()
rect_height = b.minRectHeight()
aspect_ratio = rect_width / rect_height
square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio)
if square_error < 0.1:
if not each_blob: # default to just outlining
# minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2
segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width,
rect_height, color=Color.GREEN, width=6)
else:
each_blob(b)
# Give the result mask
return segmented.applyLayers()
|
Allow a function to be called whenever a candidate blob is found during image processing
|
Allow a function to be called whenever a candidate blob is found during
image
processing
|
Python
|
mit
|
mstojcevich/Flash-Vision
|
7e464f70157c2ce7382912d65014bf9d872bed75
|
portal/main.py
|
portal/main.py
|
import argparse
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog="BTScreenshotAutomation")
parser.add_argument(
"-a", "--account", help="The name of the account", type=str, required=True)
parser.add_argument(
"-c", "--additionalCharts", nargs="+", help="The name of the additional charts")
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
args = parser.parse_args()
additionalCharts = set()
if args.additionalCharts is not None:
additionalCharts = set(args.additionalCharts)
btSeedingAccount = BTSeeding.BTSeedingAccount(
args.account, additionalCharts)
btSeedingAccount.run(saveDir=args.directory)
|
import argparse
import logging
import Portal
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog="Dynatrace Synthetic Automation")
parser.add_argument(
"-t", "--type", help="The account type: [gpn|dynatrace]", required=True)
parser.add_argument(
"-u", "--username", help="The username for the account", type=str, required=True)
parser.add_argument(
"-p", "--password", help="The password for the account", type=str, required=True)
parser.add_argument(
"-d", "--directory", help="The directory to save the chart screenshots", type=str, default=".")
parser.add_argument(
"-v", "--verbose", help="Display debug message", action="store_true")
parser.add_argument(
"-c", "--chartNames", nargs="+", help="The name of the chart to capture")
args = parser.parse_args()
print(args)
# portal = Portal.DynatracePortal(args.username, args.password)
# portal.login()
# portal.saveChartToScreenshot()
|
Add command line arguments for program
|
Add command line arguments for program
|
Python
|
mit
|
josecolella/Dynatrace-Resources
|
0eb7e6b9a8e4e38793b1e045ab5f0f0a4d4e6777
|
synapse/metrics/resource.py
|
synapse/metrics/resource.py
|
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
import synapse.metrics
METRICS_PREFIX = "/_synapse/metrics"
class MetricsResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self) # Resource is old-style, so no super()
self.hs = hs
def render_GET(self, request):
response = synapse.metrics.render_all()
request.setHeader("Content-Type", "text/plain")
request.setHeader("Content-Length", str(len(response)))
# Encode as UTF-8 (default)
return response.encode()
|
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import synapse.metrics
METRICS_PREFIX = "/_synapse/metrics"
class MetricsResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self) # Resource is old-style, so no super()
self.hs = hs
def render_GET(self, request):
response = synapse.metrics.render_all()
request.setHeader("Content-Type", "text/plain")
request.setHeader("Content-Length", str(len(response)))
# Encode as UTF-8 (default)
return response.encode()
|
Delete unused import of NOT_READY_YET
|
Delete unused import of NOT_READY_YET
|
Python
|
apache-2.0
|
matrix-org/synapse,illicitonion/synapse,iot-factory/synapse,TribeMedia/synapse,iot-factory/synapse,howethomas/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,matrix-org/synapse,howethomas/synapse,TribeMedia/synapse,TribeMedia/synapse,rzr/synapse,iot-factory/synapse,iot-factory/synapse,iot-factory/synapse,illicitonion/synapse,rzr/synapse,TribeMedia/synapse,rzr/synapse,TribeMedia/synapse,matrix-org/synapse,howethomas/synapse,illicitonion/synapse
|
8ef5b15c62960fb9abc43c9b30550faa0c0d7227
|
cactusbot/handler.py
|
cactusbot/handler.py
|
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
try:
response = getattr(handler, "on_" + event)(packet)
except Exception:
self.logger.warning(
"Exception in handler %s:", type(handler).__name__,
exc_info=1)
else:
# TODO: support for multiple responses in an iterable
if response is not None:
yield response
elif response is StopIteration:
break
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
|
"""Handle handlers."""
import logging
from .packet import Packet
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
try:
response = getattr(handler, "on_" + event)(packet)
except Exception:
self.logger.warning(
"Exception in handler %s:", type(handler).__name__,
exc_info=1)
else:
if isinstance(response, Packet):
yield response
elif isinstance(response, (tuple, list)):
yield from response
elif response is StopIteration:
return
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
|
Add support for multiple return `Packet`s from `Handler`s
|
Add support for multiple return `Packet`s from `Handler`s
|
Python
|
mit
|
CactusDev/CactusBot
|
51cdd71cbcbcfd80105cc5ccb5b95f4d79dc593e
|
src/service_deployment_tools/paasta_cli/utils/cmd_utils.py
|
src/service_deployment_tools/paasta_cli/utils/cmd_utils.py
|
#!/usr/bin/env python
"""
Contains helper functions common to all paasta commands or the client
"""
import glob
import os
# List of commands the paasta client can execute
CMDS = None
def paasta_commands():
"""
Read the files names in the cmds directory to determine the various commands
the paasta client is able to execute
:return: a list of string such as ['list','check'] that correspond to a
file in cmds
"""
global CMDS
if CMDS is None:
CMDS = []
path = "%s/cmds/*.py" % os.path.abspath('.')
for file_name in glob.glob(path):
start = file_name.rfind('/') + 1
end = file_name.rfind('.')
CMDS.append(file_name[start:end])
# Remove __init__.py
CMDS.sort()
CMDS.pop(0)
return CMDS
|
#!/usr/bin/env python
"""
Contains helper functions common to all paasta commands or the client
"""
import glob
import os
def paasta_commands():
"""
Read the files names in the cmds directory to determine the various commands
the paasta client is able to execute
:return: a list of string such as ['list','check'] that correspond to a
file in cmds
"""
path = '%s/cmds/*.py' % os.path.abspath('.')
for file_name in glob.glob(path):
basename = os.path.basename(file_name)
root, _ = os.path.splitext(basename)
if root == '__init__':
continue
yield root
|
Clean up directory parsing code
|
Clean up directory parsing code
|
Python
|
apache-2.0
|
gstarnberger/paasta,Yelp/paasta,somic/paasta,Yelp/paasta,gstarnberger/paasta,somic/paasta
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.