commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
54296c607b735ce06b3420efecb312f52876e012
|
Replace warning message with deprecation warning
|
Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags
|
django_react_templatetags/context_processors.py
|
django_react_templatetags/context_processors.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import warnings
def react_context_processor(request):
"""Expose a global list of react components to be processed"""
warnings.warn(
"react_context_processor is no longer required.", DeprecationWarning
)
return {
'REACT_COMPONENTS': [],
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def react_context_processor(request):
"""Expose a global list of react components to be processed"""
print("react_context_processor is no longer required.")
return {
'REACT_COMPONENTS': [],
}
|
mit
|
Python
|
2b7e0d52a8a8764b66d8698800bf18e8adc9dae7
|
fix crash when running fix_loop_duplicates.py
|
rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo
|
dojo/management/commands/fix_loop_duplicates.py
|
dojo/management/commands/fix_loop_duplicates.py
|
from django.core.management.base import BaseCommand
from dojo.utils import fix_loop_duplicates
"""
Author: Marian Gawron
This script will identify loop dependencies in findings
"""
class Command(BaseCommand):
help = 'No input commands for fixing Loop findings.'
def handle(self, *args, **options):
fix_loop_duplicates()
|
from django.core.management.base import BaseCommand
from pytz import timezone
from dojo.utils import fix_loop_duplicates
locale = timezone(get_system_setting('time_zone'))
"""
Author: Marian Gawron
This script will identify loop dependencies in findings
"""
class Command(BaseCommand):
help = 'No input commands for fixing Loop findings.'
def handle(self, *args, **options):
fix_loop_duplicates()
|
bsd-3-clause
|
Python
|
e776ac5b08fa2a7ce299ec68697d330fb8a02fd5
|
upgrade __version__ in __init__.py to 1.4.0
|
millerdev/django-nose,krinart/django-nose,dgladkov/django-nose,sociateru/django-nose,harukaeru/django-nose,Deepomatic/django-nose,millerdev/django-nose,Deepomatic/django-nose,dgladkov/django-nose,alexhayes/django-nose,sociateru/django-nose,brilliant-org/django-nose,franciscoruiz/django-nose,brilliant-org/django-nose,aristiden7o/django-nose,aristiden7o/django-nose,franciscoruiz/django-nose,daineX/django-nose,harukaeru/django-nose,daineX/django-nose,alexhayes/django-nose,krinart/django-nose
|
django_nose/__init__.py
|
django_nose/__init__.py
|
VERSION = (1, 4, 0)
__version__ = '.'.join(map(str, VERSION))
from django_nose.runner import *
from django_nose.testcases import *
# Django < 1.2 compatibility.
run_tests = run_gis_tests = NoseTestSuiteRunner
|
VERSION = (1, 3, 0)
__version__ = '.'.join(map(str, VERSION))
from django_nose.runner import *
from django_nose.testcases import *
# Django < 1.2 compatibility.
run_tests = run_gis_tests = NoseTestSuiteRunner
|
bsd-3-clause
|
Python
|
eec67d43d208b490c9d219b3c38e586597b1fa73
|
Refactor generate_module_objects
|
alexamici/pytest-nodev,nodev-io/pytest-nodev,alexamici/pytest-wish
|
pytest_wish.py
|
pytest_wish.py
|
# -*- coding: utf-8 -*-
import importlib
import inspect
import re
import sys
import pytest
def pytest_addoption(parser):
group = parser.getgroup('wish')
group.addoption('--wish-modules', default=(), nargs='+',
help="Space separated list of module names.")
group.addoption('--wish-includes', nargs='+',
help="Space separated list of regexs matching full object names to include.")
group.addoption('--wish-excludes', default=(), nargs='+',
help="Space separated list of regexs matching full object names to exclude.")
group.addoption('--wish-fail', action='store_true', help="Show wish failures.")
def generate_module_objects(module):
for object_name, object_ in inspect.getmembers(module):
if inspect.getmodule(object_) is module:
yield object_name, object_
def valid_name(name, include_res, exclude_res):
include_name = any(include_re.match(name) for include_re in include_res)
exclude_name = any(exclude_re.match(name) for exclude_re in exclude_res)
return include_name and not exclude_name
def index_modules(modules, include_patterns, exclude_patterns=()):
include_res = [re.compile(pattern) for pattern in include_patterns]
exclude_res = [re.compile(pattern) for pattern in exclude_patterns]
object_index = {}
for module_name, module in modules.items():
for object_name, object_ in generate_module_objects(module):
full_object_name = '{}:{}'.format(module_name, object_name)
if valid_name(full_object_name, include_res, exclude_res):
object_index[full_object_name] = object_
return object_index
def pytest_generate_tests(metafunc):
if 'wish' not in metafunc.fixturenames:
return
wish_modules = metafunc.config.getoption('wish_modules')
for module_name in wish_modules:
importlib.import_module(module_name)
wish_includes = metafunc.config.getoption('wish_includes') or wish_modules
wish_excludes = metafunc.config.getoption('wish_excludes')
# NOTE: 'copy' is needed here because index_modules may unexpectedly trigger a module load
object_index = index_modules(sys.modules.copy(), wish_includes, wish_excludes)
object_items = sorted(object_index.items())
ids, params = list(zip(*object_items)) or [[], []]
metafunc.parametrize('wish', params, ids=ids, scope='module')
wish_fail = metafunc.config.getoption('wish_fail')
if not wish_fail:
metafunc.function = pytest.mark.xfail(metafunc.function)
|
# -*- coding: utf-8 -*-
import importlib
import inspect
import re
import sys
import pytest
def pytest_addoption(parser):
group = parser.getgroup('wish')
group.addoption('--wish-modules', default=(), nargs='+',
help="Space separated list of module names.")
group.addoption('--wish-includes', nargs='+',
help="Space separated list of regexs matching full object names to include.")
group.addoption('--wish-excludes', default=(), nargs='+',
help="Space separated list of regexs matching full object names to exclude.")
group.addoption('--wish-fail', action='store_true', help="Show wish failures.")
def generate_module_objects(module):
for object_name, object_ in inspect.getmembers(module):
obj_module = inspect.getmodule(object_)
if obj_module is not module:
continue
yield object_name, object_
def valid_name(name, include_res, exclude_res):
include_name = any(include_re.match(name) for include_re in include_res)
exclude_name = any(exclude_re.match(name) for exclude_re in exclude_res)
return include_name and not exclude_name
def index_modules(modules, include_patterns, exclude_patterns=()):
include_res = [re.compile(pattern) for pattern in include_patterns]
exclude_res = [re.compile(pattern) for pattern in exclude_patterns]
object_index = {}
for module_name, module in modules.items():
for object_name, object_ in generate_module_objects(module):
full_object_name = '{}:{}'.format(module_name, object_name)
if valid_name(full_object_name, include_res, exclude_res):
object_index[full_object_name] = object_
return object_index
def pytest_generate_tests(metafunc):
if 'wish' not in metafunc.fixturenames:
return
wish_modules = metafunc.config.getoption('wish_modules')
for module_name in wish_modules:
importlib.import_module(module_name)
wish_includes = metafunc.config.getoption('wish_includes') or wish_modules
wish_excludes = metafunc.config.getoption('wish_excludes')
# NOTE: 'copy' is needed here because index_modules may unexpectedly trigger a module load
object_index = index_modules(sys.modules.copy(), wish_includes, wish_excludes)
object_items = sorted(object_index.items())
ids, params = list(zip(*object_items)) or [[], []]
metafunc.parametrize('wish', params, ids=ids, scope='module')
wish_fail = metafunc.config.getoption('wish_fail')
if not wish_fail:
metafunc.function = pytest.mark.xfail(metafunc.function)
|
mit
|
Python
|
93e12746d19161b30e2dade0d71f22242603b0bd
|
Address fix
|
hopkira/k9-chess-angular,hopkira/k9-chess-angular,hopkira/k9-chess-angular,hopkira/k9-chess-angular
|
python/test.py
|
python/test.py
|
import math
from roboclaw import Roboclaw
address = 0x80
rc = Roboclaw("/dev/roboclaw",115200)
rc.Open()
version = rc.ReadVersion(address)
if version[0]==False:
print "GETVERSION Failed"
else:
print repr(version[1])
rc.SetM1VelocityPID(address,3000,300,0,708)
rc.SetM2VelocityPID(address,3000,300,0,720)
rc.WriteNVM(address)
nvm=[0,0,0]
rc.ReadNVM(address)
print str(nvm)
clicks = 300
click_vel = 30
ACCELERATION = 10
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=click_vel,distance1=int(abs(clicks/2)),speed2=int(click_vel),distance2=int(abs(clicks/2)),buffer=1)
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=0,distance1=int(abs(clicks/2)),speed2=0,distance2=int(abs(clicks/2)),buffer=0)
buffers = (0,0,0)
while (buffers[1]!=0x80 and buffers[2]!=0x80):
buffers = rc.ReadBuffers(address);
print "Waiting"
print "Stopping"
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=0,distance1=0,speed2=0,distance2=0,buffer=1)
print "Stop done"
|
import math
from roboclaw import Roboclaw
address = 0x80
rc = Roboclaw("/dev/roboclaw",115200)
rc.Open()
version = rc.ReadVersion(address)
if version[0]==False:
print "GETVERSION Failed"
else:
print repr(version[1])
rc.SetM1VelocityPID(rc_address,3000,300,0,708)
rc.SetM2VelocityPID(rc_address,3000,300,0,720)
rc.WriteNVM(address)
nvm=[0,0,0]
rc.ReadNVM(address)
print str(nvm)
clicks = 300
click_vel = 30
ACCELERATION = 10
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=click_vel,distance1=int(abs(clicks/2)),speed2=int(click_vel),distance2=int(abs(clicks/2)),buffer=1)
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=0,distance1=int(abs(clicks/2)),speed2=0,distance2=int(abs(clicks/2)),buffer=0)
buffers = (0,0,0)
while (buffers[1]!=0x80 and buffers[2]!=0x80):
buffers = rc.ReadBuffers(address);
print "Waiting"
print "Stopping"
rc.SpeedAccelDistanceM1M2(address=address,accel=ACCELERATION,speed1=0,distance1=0,speed2=0,distance2=0,buffer=1)
print "Stop done"
|
unlicense
|
Python
|
b860606c2ce654044131228ddfb741c517ab282e
|
make QLibraryInfo.location works
|
spyder-ide/qtpy
|
qtpy/QtCore.py
|
qtpy/QtCore.py
|
#
# Copyright © 2014-2015 Colin Duquesnoy
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""
Provides QtCore classes and functions.
"""
from . import PYQT6, PYQT5, PYSIDE2, PYSIDE6, PythonQtError
if PYQT6:
from PyQt6 import QtCore
from PyQt6.QtCore import *
from PyQt6.QtCore import pyqtSignal as Signal
from PyQt6.QtCore import pyqtBoundSignal as SignalInstance
from PyQt6.QtCore import pyqtSlot as Slot
from PyQt6.QtCore import pyqtProperty as Property
from PyQt6.QtCore import QT_VERSION_STR as __version__
# For issue #153
from PyQt6.QtCore import QDateTime
QDateTime.toPython = QDateTime.toPyDateTime
# Map missing methods
QCoreApplication.exec_ = QCoreApplication.exec
QEventLoop.exec_ = QEventLoop.exec
QThread.exec_ = QThread.exec
QLibraryInfo.location = QLibraryInfo.path
# Those are imported from `import *`
del pyqtSignal, pyqtBoundSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
# Allow unscoped access for enums inside the QtCore module
from .enums_compat import promote_enums
promote_enums(QtCore)
del QtCore
elif PYQT5:
from PyQt5.QtCore import *
from PyQt5.QtCore import pyqtSignal as Signal
from PyQt5.QtCore import pyqtBoundSignal as SignalInstance
from PyQt5.QtCore import pyqtSlot as Slot
from PyQt5.QtCore import pyqtProperty as Property
from PyQt5.QtCore import QT_VERSION_STR as __version__
# For issue #153
from PyQt5.QtCore import QDateTime
QDateTime.toPython = QDateTime.toPyDateTime
# Those are imported from `import *`
del pyqtSignal, pyqtBoundSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
elif PYSIDE6:
from PySide6.QtCore import *
import PySide6.QtCore
__version__ = PySide6.QtCore.__version__
# obsolete in qt6
Qt.BackgroundColorRole = Qt.BackgroundRole
Qt.TextColorRole = Qt.ForegroundRole
Qt.MidButton = Qt.MiddleButton
# Map DeprecationWarning methods
QCoreApplication.exec_ = QCoreApplication.exec
QEventLoop.exec_ = QEventLoop.exec
QThread.exec_ = QThread.exec
QTextStreamManipulator.exec_ = QTextStreamManipulator.exec
elif PYSIDE2:
from PySide2.QtCore import *
try: # may be limited to PySide-5.11a1 only
from PySide2.QtGui import QStringListModel
except Exception:
pass
import PySide2.QtCore
__version__ = PySide2.QtCore.__version__
else:
raise PythonQtError('No Qt bindings could be found')
|
#
# Copyright © 2014-2015 Colin Duquesnoy
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""
Provides QtCore classes and functions.
"""
from . import PYQT6, PYQT5, PYSIDE2, PYSIDE6, PythonQtError
if PYQT6:
from PyQt6 import QtCore
from PyQt6.QtCore import *
from PyQt6.QtCore import pyqtSignal as Signal
from PyQt6.QtCore import pyqtBoundSignal as SignalInstance
from PyQt6.QtCore import pyqtSlot as Slot
from PyQt6.QtCore import pyqtProperty as Property
from PyQt6.QtCore import QT_VERSION_STR as __version__
# For issue #153
from PyQt6.QtCore import QDateTime
QDateTime.toPython = QDateTime.toPyDateTime
# Map missing methods
QCoreApplication.exec_ = QCoreApplication.exec
QEventLoop.exec_ = QEventLoop.exec
QThread.exec_ = QThread.exec
# Those are imported from `import *`
del pyqtSignal, pyqtBoundSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
# Allow unscoped access for enums inside the QtCore module
from .enums_compat import promote_enums
promote_enums(QtCore)
del QtCore
elif PYQT5:
from PyQt5.QtCore import *
from PyQt5.QtCore import pyqtSignal as Signal
from PyQt5.QtCore import pyqtBoundSignal as SignalInstance
from PyQt5.QtCore import pyqtSlot as Slot
from PyQt5.QtCore import pyqtProperty as Property
from PyQt5.QtCore import QT_VERSION_STR as __version__
# For issue #153
from PyQt5.QtCore import QDateTime
QDateTime.toPython = QDateTime.toPyDateTime
# Those are imported from `import *`
del pyqtSignal, pyqtBoundSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
elif PYSIDE6:
from PySide6.QtCore import *
import PySide6.QtCore
__version__ = PySide6.QtCore.__version__
# obsolete in qt6
Qt.BackgroundColorRole = Qt.BackgroundRole
Qt.TextColorRole = Qt.ForegroundRole
Qt.MidButton = Qt.MiddleButton
# Map DeprecationWarning methods
QCoreApplication.exec_ = QCoreApplication.exec
QEventLoop.exec_ = QEventLoop.exec
QThread.exec_ = QThread.exec
QTextStreamManipulator.exec_ = QTextStreamManipulator.exec
elif PYSIDE2:
from PySide2.QtCore import *
try: # may be limited to PySide-5.11a1 only
from PySide2.QtGui import QStringListModel
except Exception:
pass
import PySide2.QtCore
__version__ = PySide2.QtCore.__version__
else:
raise PythonQtError('No Qt bindings could be found')
|
mit
|
Python
|
34b6d5c04e51e95874141d746dbfc6e16fcca967
|
Use capital letters in all view name words
|
Nikola-K/django_reddit,Nikola-K/django_reddit,Nikola-K/django_reddit
|
reddit/urls.py
|
reddit/urls.py
|
"""django_reddit URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
import views
urlpatterns = [
url(r'^$', views.frontpage, name="Frontpage"),
url(r'^comments/(?P<thread_id>[0-9]+)$', views.comments, name="Thread"),
url(r'^login/$', views.user_login, name="Login"),
url(r'^logout/$', views.user_logout, name="Logout"),
url(r'^register/$', views.register, name="Register"),
url(r'^submit/$', views.submit, name="Submit"),
url(r'^user/(?P<username>\w+)$', views.user_profile, name="User Profile"),
url(r'^profile/edit/$', views.edit_profile, name="Edit Profile"),
url(r'^post/comment/$', views.post_comment, name="Post Comment"),
url(r'^vote/$', views.vote, name="Vote on item"),
url(r'^populate/$', views.test_data, name="Create test data"),
]
|
"""django_reddit URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
import views
urlpatterns = [
url(r'^$', views.frontpage, name="Frontpage"),
url(r'^comments/(?P<thread_id>[0-9]+)$', views.comments, name="Thread"),
url(r'^login/$', views.user_login, name="Login"),
url(r'^logout/$', views.user_logout, name="Logout"),
url(r'^register/$', views.register, name="Register"),
url(r'^submit/$', views.submit, name="Submit"),
url(r'^user/(?P<username>\w+)$', views.user_profile, name="User Profile"),
url(r'^profile/edit/$', views.edit_profile, name="Edit profile"),
url(r'^post/comment/$', views.post_comment, name="Post Comment"),
url(r'^vote/$', views.vote, name="Vote on item"),
url(r'^populate/$', views.test_data, name="Create test data"),
]
|
apache-2.0
|
Python
|
21102267231fbdc171b670d62f5ee8baf7d4d4c4
|
Add multiplication operation
|
MelSchlemming/calc
|
calc.py
|
calc.py
|
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
|
import sys
def add_all(nums):
return sum(nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
|
bsd-3-clause
|
Python
|
f6fdbdc1176cffa0a145170cab583387f26f8649
|
Add module docstring
|
anchavesb/pyCalc
|
calc.py
|
calc.py
|
"""calc.py: A simple python calculator."""
import sys
if __name__ == '__main__':
print(sum(map(int, sys.argv[1:])))
|
import sys
if __name__ == '__main__':
print(sum(map(int, sys.argv[1:])))
|
bsd-3-clause
|
Python
|
5fd51adbbc136adc28725688c7bf1ecf56e978c1
|
Develop (#105)
|
ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints
|
auth/auth_backend.py
|
auth/auth_backend.py
|
"""
auth_backend.py
Peter Zujko (@zujko)
Defines Django authentication backend for shibboleth.
04/05/17
"""
from django.contrib.auth.models import User
class Attributes():
EDU_AFFILIATION = 'urn:oid:1.3.6.1.4.1.4447.1.41'
FIRST_NAME = 'urn:oid:2.5.4.42'
LAST_NAME = 'urn:oid:2.5.4.4'
USERNAME = 'urn:oid:0.9.2342.19200300.100.1.1'
class SAMLSPBackend(object):
def authenticate(self, request, saml_authentication=None):
if not saml_authentication:
return None
if saml_authentication.is_authenticated():
attributes = saml_authentication.get_attributes()
username = attributes[Attributes.USERNAME][0]
first_name = attributes[Attributes.FIRST_NAME][0]
last_name = attributes[Attributes.LAST_NAME][0]
affiliation = attributes.get(Attributes.EDU_AFFILIATION, ['-1'])
try:
# Grab attributes from shib and auth user
user = User.objects.get(username=username)
except User.DoesNotExist:
# If user does not exist in DB, Create a user object and save to DB
user = User(username=username, email=username + "@rit.edu")
user.set_unusable_password()
user.first_name = first_name
user.last_name = last_name
user.save()
# Set user profile attributes
user.profile.full_name = "{} {}".format(first_name, last_name)
user.profile.display_name = "{}{}".format(
first_name[0], last_name[0])
# Set user Affiliation
user.profile.has_access = 0
if 'Student' in affiliation:
user.profile.has_access = 1
user.profile.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
"""
auth_backend.py
Peter Zujko (@zujko)
Defines Django authentication backend for shibboleth.
04/05/17
"""
from django.contrib.auth.models import User
class Attributes():
EDU_AFFILIATION = 'urn:oid:1.3.6.1.4.1.4447.1.41'
FIRST_NAME = 'urn:oid:2.5.4.42'
LAST_NAME = 'urn:oid:2.5.4.4'
USERNAME = 'urn:oid:0.9.2342.19200300.100.1.1'
class SAMLSPBackend(object):
def authenticate(self, request, saml_authentication=None):
if not saml_authentication:
return None
if saml_authentication.is_authenticated():
attributes = saml_authentication.get_attributes()
username = attributes[Attributes.USERNAME][0]
first_name = attributes[Attributes.FIRST_NAME][0]
last_name = attributes[Attributes.LAST_NAME][0]
affiliation = attributes.get(Attributes.EDU_AFFILIATION, ['-1'])
try:
# Grab attributes from shib and auth user
user = User.objects.get(username=username)
except User.DoesNotExist:
# If user does not exist in DB, Create a user object and save to DB
user = User(username=username, email=username + "@rit.edu")
user.set_unusable_password()
user.first_name = first_name
user.last_name = last_name
user.save()
# Set user profile attributes
user.profile.full_name = "{} {}".format(first_name, last_name)
user.profile.display_name = "{}{}".format(
first_name[0], last_name[0])
# Set user Affiliation
user.profile.has_access = 1
if 'Employee' in affiliation or 'Alumni' in affiliation or '-1' in affiliation:
user.profile.has_access = 0
user.profile.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
apache-2.0
|
Python
|
95ead630018870f293613febc599a50e8c69c792
|
Change in field length
|
hydroshare/hydroshare,hydroshare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,FescueFungiShare/hydroshare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
hs_core/migrations/0030_resourcefile_file_folder.py
|
hs_core/migrations/0030_resourcefile_file_folder.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0029_auto_20161123_1858'),
]
operations = [
migrations.AddField(
model_name='resourcefile',
name='file_folder',
field=models.CharField(max_length=4096, null=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0029_auto_20161123_1858'),
]
operations = [
migrations.AddField(
model_name='resourcefile',
name='file_folder',
field=models.CharField(max_length=255, null=True),
),
]
|
bsd-3-clause
|
Python
|
9b9d6db9d99bec69e61070a743d0b2194c35e375
|
Mark as dead
|
vuolter/pyload,vuolter/pyload,vuolter/pyload
|
module/plugins/hoster/FreevideoCz.py
|
module/plugins/hoster/FreevideoCz.py
|
# -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster, create_getInfo
class FreevideoCz(DeadHoster):
__name__ = "FreevideoCz"
__version__ = "0.3"
__type__ = "hoster"
__pattern__ = r'http://(?:www\.)?freevideo\.cz/vase-videa/.+'
__description__ = """Freevideo.cz hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "[email protected]"
getInfo = create_getInfo(FreevideoCz)
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
import re
from module.plugins.Hoster import Hoster
from module.network.RequestFactory import getURL
def getInfo(urls):
result = []
for url in urls:
html = getURL(url)
if re.search(FreevideoCz.OFFLINE_PATTERN, html):
# File offline
result.append((url, 0, 1, url))
else:
result.append((url, 0, 2, url))
yield result
class FreevideoCz(Hoster):
__name__ = "FreevideoCz"
__type__ = "hoster"
__pattern__ = r'http://(?:www\.)?freevideo.cz/vase-videa/(.*)\.html'
__version__ = "0.2"
__description__ = """Freevideo.cz hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "[email protected]"
OFFLINE_PATTERN = r'<h2 class="red-corner-full">Str.nka nebyla nalezena</h2>'
LINK_PATTERN = r'clip: {\s*url: "([^"]+)"'
def setup(self):
self.multiDL = self.resumeDownload = True
def process(self, pyfile):
self.html = self.load(pyfile.url, decode=True)
if re.search(self.OFFLINE_PATTERN, self.html):
self.offline()
found = re.search(self.LINK_PATTERN, self.html)
if found is None:
self.fail("Parse error (URL)")
download_url = found.group(1)
pyfile.name = re.match(self.__pattern__, pyfile.url).group(1) + ".mp4"
self.download(download_url)
|
agpl-3.0
|
Python
|
1fed9f26010f24af14abff9444862ed0861adb63
|
Add simplification between parsing and execution
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
thinglang/runner.py
|
thinglang/runner.py
|
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.parser import parse
from thinglang.parser.simplifier import simplify
def run(source):
if not source:
raise ValueError('Source cannot be empty')
source = source.strip().replace(' ' * 4, '\t')
lexical_groups = list(lexer(source))
tree = parse(lexical_groups)
root_node = simplify(tree)
with ExecutionEngine(root_node) as engine:
engine.execute()
return engine.results()
|
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.parser import parse
def run(source):
if not source:
raise ValueError('Got empty source')
source = source.strip().replace(' ' * 4, '\t')
lexical_groups = list(lexer(source))
root_node = parse(lexical_groups)
with ExecutionEngine(root_node) as engine:
engine.execute()
return engine.results()
|
mit
|
Python
|
e57e003b85f0a88ac6e3c19d5765144f95ac9959
|
Increase version to 0.3.2rc
|
TissueMAPS/TmServer
|
tmserver/version.py
|
tmserver/version.py
|
# TmServer - TissueMAPS server application.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.3.2rc'
|
# TmServer - TissueMAPS server application.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__version__ = '0.3.1'
|
agpl-3.0
|
Python
|
c09bfe422d6dd705e5e38402dd8754f461fabe59
|
Support filtering by list of jobNo
|
gis-rpd/pipelines,gis-rpd/pipelines,gis-rpd/pipelines
|
tools/accounting.py
|
tools/accounting.py
|
#!/usr/bin/env python3
"""
collection: gisds.accountinglogs
"""
#--- standard library imports
#
from argparse import ArgumentParser
from datetime import datetime
import os
from pprint import PrettyPrinter
import sys
from time import gmtime, strftime
#--- project specific imports
#
# add lib dir for this pipeline installation to PYTHONPATH
LIB_PATH = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from mongodb import mongodb_conn
__author__ = "LIEW Jun Xian"
__email__ = "[email protected]"
__copyright__ = "2016 Genome Institute of Singapore"
__license__ = "The MIT License (MIT)"
def main():
"""
Main function
"""
instance = ArgumentParser(description=__doc__)
instance.add_argument("-j", "--jobNo", nargs="*", help="filter records by jobNo of jobs")
args = instance.parse_args()
if args.jobNo:
projection = {}
projection["jobs"] = 1
for jobNo in args.jobNo:
selection = {}
selection["jobs.jobNo"] = jobNo
# print("SELECTION:\t" + str(selection))
# print("PROJECTION:\t" + str(projection))
for document in mongodb_conn(False).gisds.accountinglogs.find(selection, projection):
for job in document["jobs"]:
if job["jobNo"] == jobNo:
job["ruWallClock"] = strftime("%Hh%Mm%Ss", gmtime(job["ruWallClock"]))
job["submissionTime"] = str(datetime.fromtimestamp(
job["submissionTime"]).isoformat()).replace(":", "-")
PrettyPrinter(indent=2).pprint(job)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
"""
collection: gisds.accountinglogs
"""
#--- standard library imports
#
from argparse import ArgumentParser
from datetime import datetime
import os
from pprint import PrettyPrinter
import sys
from time import gmtime, strftime
#--- project specific imports
#
# add lib dir for this pipeline installation to PYTHONPATH
LIB_PATH = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from mongodb import mongodb_conn
__author__ = "LIEW Jun Xian"
__email__ = "[email protected]"
__copyright__ = "2016 Genome Institute of Singapore"
__license__ = "The MIT License (MIT)"
def main():
"""
Main function
"""
instance = ArgumentParser(description=__doc__)
instance.add_argument("-j", "--jobNo", help="filter records by jobNo of jobs")
instance.add_argument("-o", "--owner", help="filter records by owner of jobs")
args = instance.parse_args()
selection = {}
if args.jobNo:
selection["jobs.jobNo"] = args.jobNo
if args.owner:
selection["jobs.owner"] = args.owner
projection = {}
projection["jobs"] = 1
# print("SELECTION:\t" + str(selection))
# print("PROJECTION:\t" + str(projection))
for document in mongodb_conn(False).gisds.accountinglogs.find(selection, projection):
for job in document["jobs"]:
if job["jobNo"] == args.jobNo:
job["ruWallClock"] = strftime("%Hh%Mm%Ss", gmtime(job["ruWallClock"]))
job["submissionTime"] = str(datetime.fromtimestamp(
job["submissionTime"]).isoformat()).replace(":", "-")
PrettyPrinter(indent=2).pprint(job)
if __name__ == "__main__":
main()
|
mit
|
Python
|
5ac6c93073c98ea17a0786e6e1a1de3837e460d9
|
Handle RSS feeds for blogs that don't have dates
|
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,natestedman/Observatory,natestedman/Observatory,natestedman/Observatory,rcos/Observatory
|
observatory/dashboard/models/Blog.py
|
observatory/dashboard/models/Blog.py
|
# Copyright (c) 2010, Nate Stedman <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import datetime
from dashboard.util import find_author
from django.db import models
from lib import feedparser, dateutil
from EventSet import EventSet
# a blog for a project
class Blog(EventSet):
class Meta:
app_label = 'dashboard'
# link to the blog, if it isn't hosted on dashboard
url = models.URLField("Blog Web Address", max_length = 64)
rss = models.URLField("Blog Feed", max_length = 64)
# fetches the posts from the rss feed
def fetch(self):
import BlogPost
# don't fetch internally hosted blogs
if not self.from_feed: return
events = []
# parse and iterate the feed
entries = feedparser.parse(self.rss).entries
for post in entries:
try:
date = dateutil.parser.parse(post.date).replace(tzinfo=None)
except:
date = datetime.datetime.utcnow()
# don't re-add old posts
if self.most_recent_date >= date:
continue
try:
content = post.content[0].value
except:
content = post.description
try:
author_name = post.author_detail["name"]
except:
author_name = None
events.append(self.add_event(BlogPost.BlogPost,
title = post.title,
summary = post.description,
from_feed = True,
author_name = author_name,
date = date,
extra_args = {
"external_link": post.link,
"content": content,
"blog_id": self.id
}
))
# find the new most recent date
dates = [event.date for event in events if event is not None]
dates.append(self.most_recent_date)
self.most_recent_date = max(dates)
self.save()
|
# Copyright (c) 2010, Nate Stedman <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from dashboard.util import find_author
from django.db import models
from lib import feedparser, dateutil
from EventSet import EventSet
# a blog for a project
class Blog(EventSet):
class Meta:
app_label = 'dashboard'
# link to the blog, if it isn't hosted on dashboard
url = models.URLField("Blog Web Address", max_length = 64)
rss = models.URLField("Blog Feed", max_length = 64)
# fetches the posts from the rss feed
def fetch(self):
import BlogPost
# don't fetch internally hosted blogs
if not self.from_feed: return
events = []
# parse and iterate the feed
entries = feedparser.parse(self.rss).entries
for post in entries:
# time manipation is fun
date = dateutil.parser.parse(post.date)
try:
date = (date - date.utcoffset()).replace(tzinfo=None)
except:
pass
# don't re-add old posts
if self.most_recent_date >= date:
continue
try:
content = post.content[0].value
except:
content = post.description
try:
author_name = post.author_detail["name"]
except:
author_name = None
events.append(self.add_event(BlogPost.BlogPost,
title = post.title,
summary = post.description,
from_feed = True,
author_name = author_name,
date = date,
extra_args = {
"external_link": post.link,
"content": content,
"blog_id": self.id
}
))
# find the new most recent date
dates = [event.date for event in events if event is not None]
dates.append(self.most_recent_date)
self.most_recent_date = max(dates)
self.save()
|
isc
|
Python
|
ff30fbd3adef0de27c7b3f690fff1c47c6d42b6a
|
set tree self.depth to minDepth
|
FermiDirak/RandomForest
|
DecisionTree.py
|
DecisionTree.py
|
class Node:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
class Tree:
def __init__(self, dataset, minDepth):
self.root = None
self.left = None
self.right = None
self.data = dataset
self.depth = minDepth
def add_head(self, data):
if self.root == None:
self.root = data #unsafe lol
def add_left_child(self, node, data):
"""possible implementation?"""
if node.left == None: node.left = Node(data)
def add_right_child(self, data):
"""possible implementation?"""
if node.right == None: node.right = Node(data)
def get_data(self, node):
return node.data
def gen_tree(self, depth=self.depth):
""" builds full tree recursively """
if depth == 1:
return Node()
self.root = Node()
self.root.left = gen_tree(depth-1)
self.root.right = gen_tree(depth-1)
return self.root
#gets a random split point for the dataset
def getRandomSplit(dataset):
split = np.transpose(np.matrix(np.zeros(2)))
coordN = np.round(np.random.rand())
coordM = np.floor(dataset.size.m * np.random.rand())
split[coordN, 0] = dataset[coordN + 1, coordM]
return split
#get the best split point for dataset
def getBestGiniSplit(dataset, labelsCount):
return 0
#calculates gini value of a given dataset
def calcGini(histogram, labelsCount):
gini = 0
for i in range(0, histogram.size.m)
gini += histogram[0, i] * histogram[0, i]
gini = 1 - gini
return gini
#returns a 1 * labelCount matrix of histogram data
def getHistogram(dataset, labelsCount):
histogram = np.matrix(np.zeros(labelsCount))
for i in range(dataset.size.m)
j = dataset[0, i]
histogram[0, j] += 1
return histogram
if __name__ == '__main__':
# toy tree demo
root = Tree()
|
class Node:
def __init__(self, data=None):
self.data = data
self.left = None
self.right = None
class Tree: # passing (object ) into class is no longer needed in python3
def __init__(self, dataset, minDepth, depth = 3):
self.root = None
self.left = None
self.right = None
self.data = dataset
self.depth = depth
def add_head(self, data):
if self.root == None:
self.root = data #unsafe lol
def add_left_child(self, node, data):
"""possible implementation?"""
if node.left == None: node.left = Node(data)
def add_right_child(self, data):
"""possible implementation?"""
if node.right == None: node.right = Node(data)
def get_data(self, node):
return node.data
def gen_tree(self, depth=self.depth):
""" builds full tree recursively """
if depth == 1:
return Node()
self.root = Node()
self.root.left = gen_tree(depth-1)
self.root.right = gen_tree(depth-1)
return self.root
#gets a random split point for the dataset
def getRandomSplit(dataset):
split = np.transpose(np.matrix(np.zeros(2)))
coordN = np.round(np.random.rand())
coordM = np.floor(dataset.size.m * np.random.rand())
split[coordN, 0] = dataset[coordN + 1, coordM]
return split
#get the best split point for dataset
def getBestGiniSplit(dataset, labelsCount):
return 0
#calculates gini value of a given dataset
def calcGini(histogram, labelsCount):
gini = 0
for i in range(0, histogram.size.m)
gini += histogram[0, i] * histogram[0, i]
gini = 1 - gini
return gini
#returns a 1 * labelCount matrix of histogram data
def getHistogram(dataset, labelsCount):
histogram = np.matrix(np.zeros(labelsCount))
for i in range(dataset.size.m)
j = dataset[0, i]
histogram[0, j] += 1
return histogram
if __name__ == '__main__':
# toy tree demo
root = Tree()
|
mit
|
Python
|
a5bef7ac44a688b9d4493c28210a1a3fbcb64ffe
|
Fix channel comparison with # prefix
|
slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackhq/python-slackclient
|
slackclient/_channel.py
|
slackclient/_channel.py
|
class Channel(object):
def __init__(self, server, name, channel_id, members=None):
self.server = server
self.name = name
self.id = channel_id
self.members = [] if members is None else members
def __eq__(self, compare_str):
if self.name == compare_str or "#" + self.name == compare_str or self.id == compare_str:
return True
else:
return False
def __str__(self):
data = ""
for key in list(self.__dict__.keys()):
data += "{0} : {1}\n".format(key, str(self.__dict__[key])[:40])
return data
def __repr__(self):
return self.__str__()
def send_message(self, message):
message_json = {"type": "message", "channel": self.id, "text": message}
self.server.send_to_websocket(message_json)
|
class Channel(object):
def __init__(self, server, name, channel_id, members=None):
self.server = server
self.name = name
self.id = channel_id
self.members = [] if members is None else members
def __eq__(self, compare_str):
if self.name == compare_str or self.name == "#" + compare_str or self.id == compare_str:
return True
else:
return False
def __str__(self):
data = ""
for key in list(self.__dict__.keys()):
data += "{0} : {1}\n".format(key, str(self.__dict__[key])[:40])
return data
def __repr__(self):
return self.__str__()
def send_message(self, message):
message_json = {"type": "message", "channel": self.id, "text": message}
self.server.send_to_websocket(message_json)
|
mit
|
Python
|
a2a652620fa4d7504baa42f08fc80bd2a7db1341
|
Make frozendict peristently-hasheable
|
edgedb/edgedb,edgedb/edgedb,edgedb/edgedb
|
edgedb/lang/common/datastructures/immutables.py
|
edgedb/lang/common/datastructures/immutables.py
|
##
# Copyright (c) 2008-2010, 2014 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import abc
import collections
from metamagic.utils.algos.persistent_hash import persistent_hash
class ImmutableMeta(type):
def __new__(mcls, name, bases, dct):
if '_shadowed_methods_' in dct:
shadowed = dct['_shadowed_methods_']
del dct['_shadowed_methods_']
for method in shadowed:
def meth(self, *args, _allow_mutation_=False, **kwargs):
if not _allow_mutation_:
raise TypeError('%r is immutable' % self.__class__.__name__)
return super()[method](*args, **kwargs)
meth.__name__ = method
dct[method] = meth
return super().__new__(mcls, name, bases, dct)
class frozendict(dict, metaclass=ImmutableMeta):
"""Immutable dict (like ``frozenset`` for ``set``.)"""
_shadowed_methods_ = ('__setitem__', '__delitem__', 'update', 'clear',
'pop', 'popitem', 'setdefault')
def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, super().__repr__())
__eq__ = dict.__eq__
def __hash__(self):
return hash(frozenset(self.items()))
def persistent_hash(self):
return persistent_hash(frozenset(self.items()))
|
##
# Copyright (c) 2008-2010 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import abc
import collections
class ImmutableMeta(type):
def __new__(mcls, name, bases, dct):
if '_shadowed_methods_' in dct:
shadowed = dct['_shadowed_methods_']
del dct['_shadowed_methods_']
for method in shadowed:
def meth(self, *args, _allow_mutation_=False, **kwargs):
if not _allow_mutation_:
raise TypeError('%r is immutable' % self.__class__.__name__)
return super()[method](*args, **kwargs)
meth.__name__ = method
dct[method] = meth
return super().__new__(mcls, name, bases, dct)
class frozendict(dict, metaclass=ImmutableMeta):
"""Immutable dict (like ``frozenset`` for ``set``.)"""
_shadowed_methods_ = ('__setitem__', '__delitem__', 'update', 'clear',
'pop', 'popitem', 'setdefault')
def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, super().__repr__())
__eq__ = dict.__eq__
def __hash__(self):
return hash(frozenset(self.items()))
|
apache-2.0
|
Python
|
4cda993213fce2b4567ba31f2dc6a116445ce664
|
rollback on dummy database now has no effect (previously raised an error). This means that custom 500 error pages (and e-mailed exceptions) now work even if a database has not been configured. Fixes #4429.
|
apollo13/django,krishna-pandey-git/django,BMJHayward/django,pquentin/django,risicle/django,jylaxp/django,h4r5h1t/django-hauthy,baylee/django,frishberg/django,tcwicklund/django,programadorjc/django,andreif/django,MarcJoan/django,Proggie02/TestRepo,gdub/django,zsiciarz/django,Leila20/django,shaib/django,ptoraskar/django,supriyantomaftuh/django,BMJHayward/django,chyeh727/django,aleida/django,MoritzS/django,rynomster/django,AlexHill/django,aerophile/django,yograterol/django,jeezybrick/django,beni55/django,adelton/django,GhostThrone/django,erikr/django,GaussDing/django,rlugojr/django,eugena/django,duqiao/django,TimBuckley/effective_django,EmadMokhtar/Django,hackerbot/DjangoDev,BrotherPhil/django,MarkusH/django,Anonymous-X6/django,BrotherPhil/django,mrfuxi/django,solarissmoke/django,ghickman/django,sephii/django,solarissmoke/django,saydulk/django,TridevGuha/django,davidharrigan/django,ebar0n/django,katrid/django,jgoclawski/django,mojeto/django,sbellem/django,yewang15215/django,akintoey/django,doismellburning/django,eyohansa/django,vitaly4uk/django,auvipy/django,roselleebarle04/django,vitan/django,frdb194/django,alilotfi/django,andreif/django,waytai/django,Endika/django,avneesh91/django,Balachan27/django,yask123/django,blueyed/django,Argon-Zhou/django,knifenomad/django,mcella/django,dursk/django,doismellburning/django,yakky/django,xrmx/django,yask123/django,TridevGuha/django,chrishas35/django-travis-ci,hobarrera/django,gannetson/django,HousekeepLtd/django,ryanahall/django,jhg/django,georgemarshall/django,wetneb/django,aspidites/django,erikr/django,elijah513/django,krisys/django,henryfjordan/django,jhg/django,jvkops/django,mdj2/django,ryangallen/django,dhruvagarwal/django,tcwicklund/django,digimarc/django,mlavin/django,django-nonrel/django,reinout/django,rockneurotiko/django,litchfield/django,gdi2290/django,ASCrookes/django,marqueedev/django,georgemarshall/django,mattrobenolt/django,marckuz/django,filias/django,pasqualguerrero/django,django-nonrel/django-nonrel,sdcooke/django,weiawe/django,x111ong/django,z0by/django,sjlehtin/django,davidharrigan/django,nju520/django,double-y/django,blindroot/django,himleyb85/django,bikong2/django,ghedsouza/django,blindroot/django,mcardillo55/django,ericfc/django,Argon-Zhou/django,zerc/django,pjdelport/django,rwillmer/django,alexallah/django,HonzaKral/django,tragiclifestories/django,sjlehtin/django,MarcJoan/django,Beauhurst/django,aerophile/django,Vixionar/django,leekchan/django_test,marctc/django,timgraham/django,jdelight/django,Y3K/django,dhruvagarwal/django,manhhomienbienthuy/django,matiasb/django,gcd0318/django,oberlin/django,EliotBerriot/django,etos/django,digimarc/django,ytjiang/django,rapilabs/django,beckastar/django,kamyu104/django,ojake/django,camilonova/django,darjeeling/django,ccn-2m/django,timgraham/django,elijah513/django,rsalmaso/django,blighj/django,RossBrunton/django,bitcity/django,robhudson/django,bspink/django,Yong-Lee/django,nemesisdesign/django,aidanlister/django,b-me/django,apollo13/django,rmboggs/django,blueyed/django,chrishas35/django-travis-ci,akshatharaj/django,wsmith323/django,loic/django,mitya57/django,henryfjordan/django,z0by/django,areski/django,gunchleoc/django,druuu/django,andela-ooladayo/django,rlugojr/django,jejimenez/django,ptoraskar/django,kevintaw/django,pquentin/django,adamchainz/django,chrisfranzen/django,vsajip/django,carljm/django,DasIch/django,myang321/django,helenst/django,MikeAmy/django,HonzaKral/django,kamyu104/django,kangfend/django,hottwaj/django,mmardini/django,delhivery/django,tuhangdi/django,sbellem/django,sbellem/django,t0in4/django,kevintaw/django,zhoulingjun/django,lunafeng/django,roselleebarle04/django,ebar0n/django,b-me/django,lwiecek/django,litchfield/django,alimony/django,errx/django,abomyi/django,spisneha25/django,nealtodd/django,kisna72/django,dursk/django,RaoUmer/django,loic/django,bikong2/django,jsoref/django,jgeskens/django,auvipy/django,mojeto/django,knifenomad/django,dbaxa/django,donkirkby/django,rhertzog/django,risicle/django,jarshwah/django,hackerbot/DjangoDev,ABaldwinHunter/django-clone-classic,ABaldwinHunter/django-clone,beni55/django,koordinates/django,ABaldwinHunter/django-clone,sarthakmeh03/django,schinckel/django,myang321/django,anant-dev/django,dbaxa/django,Nepherhotep/django,errx/django,AltSchool/django,mrbox/django,rajsadho/django,sdcooke/django,RaoUmer/django,JorgeCoock/django,coldmind/django,BrotherPhil/django,WSDC-NITWarangal/django,filias/django,deployed/django,mattrobenolt/django,dwightgunning/django,Y3K/django,ArnossArnossi/django,aerophile/django,twz915/django,asser/django,ccn-2m/django,weiawe/django,Beauhurst/django,jrrembert/django,SebasSBM/django,tragiclifestories/django,olasitarska/django,wsmith323/django,felixjimenez/django,benspaulding/django,1013553207/django,vitaly4uk/django,jdelight/django,frePPLe/django,hynekcer/django,zhaodelong/django,atul-bhouraskar/django,helenst/django,1013553207/django,sdcooke/django,jpic/django,rhertzog/django,duqiao/django,tbeadle/django,liavkoren/djangoDev,akshatharaj/django,frankvdp/django,ticosax/django,hackerbot/DjangoDev,ryangallen/django,myang321/django,spisneha25/django,x111ong/django,wweiradio/django,kosz85/django,jgoclawski/django,mmardini/django,elijah513/django,Leila20/django,Korkki/django,kutenai/django,mbox/django,intgr/django,labcodes/django,ABaldwinHunter/django-clone-classic,barbuza/django,areski/django,davgibbs/django,yakky/django,riklaunim/django-custom-multisite,divio/django,pipermerriam/django,PetrDlouhy/django,maxsocl/django,xadahiya/django,synasius/django,zanderle/django,extremewaysback/django,DONIKAN/django,alrifqi/django,xwolf12/django,xwolf12/django,baylee/django,mewtaylor/django,jejimenez/django,ivandevp/django,ghickman/django,monetate/django,vmarkovtsev/django,Proggie02/TestRepo,mitar/django,etos/django,MarkusH/django,camilonova/django,jasonbot/django,zerc/django,apocquet/django,adelton/django,gannetson/django,mdj2/django,adrianholovaty/django,knifenomad/django,pelme/django,spisneha25/django,avanov/django,rapilabs/django,darkryder/django,jallohm/django,marqueedev/django,seanwestfall/django,davgibbs/django,aleida/django,irwinlove/django,kosz85/django,AltSchool/django,fpy171/django,harisibrahimkv/django,leeon/annotated-django,taaviteska/django,litchfield/django,makinacorpus/django,delhivery/django,sergei-maertens/django,simone/django-gb,simonw/django,ericholscher/django,felixjimenez/django,JavML/django,craynot/django,daniponi/django,vmarkovtsev/django,mitar/django,shtouff/django,dydek/django,kcpawan/django,mttr/django,dpetzold/django,ojake/django,KokareIITP/django,katrid/django,kamyu104/django,alimony/django,akaariai/django,moreati/django,hellhovnd/django,whs/django,ajaali/django,JavML/django,mammique/django,liu602348184/django,riklaunim/django-custom-multisite,redhat-openstack/django,marckuz/django,b-me/django,kamyu104/django,yamila-moreno/django,aidanlister/django,t0in4/django,liu602348184/django,gengue/django,rwillmer/django,jaywreddy/django,manhhomienbienthuy/django,yamila-moreno/django,zhoulingjun/django,shaistaansari/django,andresgz/django,jnovinger/django,marcelocure/django,taaviteska/django,GhostThrone/django,ASCrookes/django,yigitguler/django,jylaxp/django,szopu/django,chyeh727/django,matiasb/django,ar45/django,gdub/django,yceruto/django,elky/django,jscn/django,HousekeepLtd/django,andreif/django,karyon/django,github-account-because-they-want-it/django,Argon-Zhou/django,simonw/django,mbox/django,alrifqi/django,beck/django,NullSoldier/django,jscn/django,tragiclifestories/django,BrotherPhil/django,tayfun/django,Vixionar/django,tanmaythakur/django,jenalgit/django,EmadMokhtar/Django,crazy-canux/django,ghedsouza/django,leereilly/django-1,edmorley/django,hellhovnd/django,charettes/django,zerc/django,xadahiya/django,ecederstrand/django,makinacorpus/django,hunter007/django,gcd0318/django,sjlehtin/django,ulope/django,dracos/django,avanov/django,jasonwzhy/django,poiati/django,shaistaansari/django,mrbox/django,liuliwork/django,ASCrookes/django,rsvip/Django,blaze33/django,tuhangdi/django,syphar/django,mattrobenolt/django,katrid/django,dfdx2/django,ataylor32/django,andrewsmedina/django,leeon/annotated-django,jaywreddy/django,adrianholovaty/django,asser/django,BlindHunter/django,gitaarik/django,ArnossArnossi/django,Matt-Deacalion/django,jnovinger/django,takeshineshiro/django,marissazhou/django,lmorchard/django,adamchainz/django,ryangallen/django,shacker/django,postrational/django,Adnn/django,pasqualguerrero/django,apollo13/django,oscaro/django,alimony/django,Beeblio/django,jgeskens/django,risicle/django,akaariai/django,ziima/django,uranusjr/django,freakboy3742/django,hnakamur/django,wkschwartz/django,arun6582/django,rrrene/django,wweiradio/django,ivandevp/django,ryanahall/django,pasqualguerrero/django,zedr/django,Y3K/django,vmarkovtsev/django,megaumi/django,willharris/django,raphaelmerx/django,whs/django,sarthakmeh03/django,freakboy3742/django,shacker/django,dursk/django,kutenai/django,szopu/django,jyotsna1820/django,epandurski/django,henryfjordan/django,1013553207/django,joakim-hove/django,SoftwareMaven/django,yigitguler/django,carljm/django,lsqtongxin/django,atul-bhouraskar/django,gchp/django,Argon-Zhou/django,denisenkom/django,jasonbot/django,cainmatt/django,mathspace/django,cobalys/django,stevenewey/django,rlugojr/django,rapilabs/django,simonw/django,felixxm/django,quxiaolong1504/django,syphar/django,hkchenhongyi/django,megaumi/django,shtouff/django,manhhomienbienthuy/django,devops2014/djangosite,ArnossArnossi/django,takeshineshiro/django,indevgr/django,apocquet/django,PetrDlouhy/django,intgr/django,yceruto/django,frishberg/django,dursk/django,xrmx/django,dudepare/django,uranusjr/django,willhardy/django,kangfend/django,mewtaylor/django,labcodes/django,shaistaansari/django,imtapps/django-imt-fork,fenginx/django,hybrideagle/django,edmorley/django,druuu/django,blaze33/django,edmorley/django,devops2014/djangosite,AltSchool/django,riteshshrv/django,avneesh91/django,Mixser/django,frankvdp/django,akaihola/django,Beeblio/django,elkingtonmcb/django,sergei-maertens/django,craynot/django,wkschwartz/django,YYWen0o0/python-frame-django,mitchelljkotler/django,techdragon/django,denis-pitul/django,KokareIITP/django,eyohansa/django,SebasSBM/django,marckuz/django,salamer/django,jenalgit/django,Endika/django,sgzsh269/django,jmcarp/django,takis/django,dex4er/django,rizumu/django,webgeodatavore/django,dfunckt/django,WSDC-NITWarangal/django,jylaxp/django,baylee/django,ifduyue/django,craynot/django,petecummings/django,IRI-Research/django,asser/django,hassanabidpk/django,HonzaKral/django,wweiradio/django,moreati/django,kaedroho/django,nju520/django,hellhovnd/django,gohin/django,hybrideagle/django,matiasb/django,GitAngel/django,beckastar/django,roselleebarle04/django,codepantry/django,curtisstpierre/django,lsqtongxin/django,denis-pitul/django,cobalys/django,jhoos/django,andela-ooladayo/django,edevil/django,yewang15215/django,nhippenmeyer/django,aisipos/django,mjtamlyn/django,shownomercy/django,liavkoren/djangoDev,Proggie02/TestRepo,mmardini/django,jpic/django,karyon/django,jmcarp/django,follow99/django,alilotfi/django,djbaldey/django,sergei-maertens/django,ojengwa/django-1,lwiecek/django,mttr/django,sgzsh269/django,zedr/django,benspaulding/django,mshafiq9/django,hynekcer/django,koordinates/django,schinckel/django,PolicyStat/django,hellhovnd/django,nielsvanoch/django,kcpawan/django,takis/django,tbeadle/django,ajaali/django,bak1an/django,jasonbot/django,georgemarshall/django,oberlin/django,gitaarik/django,rlugojr/django,ArnossArnossi/django,mammique/django,aroche/django,jaywreddy/django,bikong2/django,sephii/django,ccn-2m/django,phalt/django,jrrembert/django,rajsadho/django,jvkops/django,ataylor32/django,jnovinger/django,dsanders11/django,davidharrigan/django,gunchleoc/django,blighj/django,Korkki/django,rogerhu/django,imtapps/django-imt-fork,imtapps/django-imt-fork,hnakamur/django,ericfc/django,alexmorozov/django,tayfun/django,django/django,hybrideagle/django,peterlauri/django,waytai/django,andrewsmedina/django,cainmatt/django,NullSoldier/django,kangfend/django,googleinterns/django,guettli/django,Mixser/django,denisenkom/django,rmboggs/django,djbaldey/django,ebar0n/django,liuliwork/django,petecummings/django,rizumu/django,gunchleoc/django,payeldillip/django,hnakamur/django,zulip/django,sadaf2605/django,ghedsouza/django,AndrewGrossman/django,dsanders11/django,dgladkov/django,tomchristie/django,krishna-pandey-git/django,marqueedev/django,anant-dev/django,hottwaj/django,gchp/django,areski/django,peterlauri/django,tuhangdi/django,lzw120/django,alx-eu/django,Mixser/django,scorphus/django,tomchristie/django,edevil/django,fpy171/django,coldmind/django,quamilek/django,tcwicklund/django,YangSongzhou/django,drjeep/django,delinhabit/django,jarshwah/django,rtindru/django,sadaf2605/django,vincepandolfo/django,anant-dev/django,haxoza/django,marcelocure/django,kaedroho/django,sarvex/django,dracos/django,ghickman/django,petecummings/django,edevil/django,mathspace/django,delhivery/django,vincepandolfo/django,vsajip/django,MounirMesselmeni/django,riteshshrv/django,ckirby/django,mitya57/django,kswiat/django,dsanders11/django,django/django,manhhomienbienthuy/django,solarissmoke/django,megaumi/django,leekchan/django_test,abomyi/django,quamilek/django,bitcity/django,rynomster/django,divio/django,Nepherhotep/django,sam-tsai/django,huang4fstudio/django,bitcity/django,ericfc/django,kisna72/django,alx-eu/django,redhat-openstack/django,jasonwzhy/django,davgibbs/django,megaumi/django,riteshshrv/django,hcsturix74/django,DrMeers/django,digimarc/django,guettli/django,deployed/django,frdb194/django,github-account-because-they-want-it/django,elena/django,gcd0318/django,fenginx/django,ccn-2m/django,elena/django,Yong-Lee/django,arun6582/django,aleida/django,SebasSBM/django,dgladkov/django,dydek/django,tayfun/django,akintoey/django,rrrene/django,tanmaythakur/django,ataylor32/django,rsalmaso/django,TridevGuha/django,eyohansa/django,techdragon/django,marcelocure/django,monetate/django,webgeodatavore/django,litchfield/django,aroche/django,ivandevp/django,stewartpark/django,lisael/pg-django,xrmx/django,payeldillip/django,beck/django,zulip/django,mitya57/django,xadahiya/django,blindroot/django,GhostThrone/django,Mixser/django,akintoey/django,supriyantomaftuh/django,denys-duchier/django,whs/django,lunafeng/django,oinopion/django,treyhunner/django,ecederstrand/django,wetneb/django,piquadrat/django,schinckel/django,shaib/django,treyhunner/django,peterlauri/django,ckirby/django,simone/django-gb,mattseymour/django,mrbox/django,sarvex/django,MarcJoan/django,kosz85/django,yceruto/django,taaviteska/django,uranusjr/django,ecederstrand/django,ar45/django,ironbox360/django,jdelight/django,GaussDing/django,abomyi/django,alrifqi/django,django-nonrel/django,bobcyw/django,rockneurotiko/django,vincepandolfo/django,hkchenhongyi/django,eyohansa/django,jaywreddy/django,duqiao/django,szopu/django,delinhabit/django,varunnaganathan/django,BlindHunter/django,shacker/django,MikeAmy/django,theo-l/django,savoirfairelinux/django,mcrowson/django,shownomercy/django,sdcooke/django,tysonclugg/django,rtindru/django,darkryder/django,darjeeling/django,MatthewWilkes/django,sgzsh269/django,blaze33/django,blighj/django,vitan/django,yamila-moreno/django,DONIKAN/django,TimYi/django,atul-bhouraskar/django,yamila-moreno/django,labcodes/django,dpetzold/django,stevenewey/django,ojake/django,codepantry/django,intgr/django,MatthewWilkes/django,mitchelljkotler/django,JorgeCoock/django,henryfjordan/django,gdub/django,curtisstpierre/django,andresgz/django,savoirfairelinux/django,dex4er/django,ojengwa/django-1,piquadrat/django,kevintaw/django,darjeeling/django,follow99/django,bikong2/django,shtouff/django,AndrewGrossman/django,avanov/django,jasonwzhy/django,ebar0n/django,deployed/django,robhudson/django,waytai/django,aspidites/django,hobarrera/django,weiawe/django,lzw120/django,jnovinger/django,kisna72/django,varunnaganathan/django,feroda/django,seocam/django,carljm/django,erikr/django,liuliwork/django,nielsvanoch/django,jsoref/django,PolicyStat/django,savoirfairelinux/django,shownomercy/django,alexallah/django,andyzsf/django,Leila20/django,syaiful6/django,pquentin/django,b-me/django,RossBrunton/django,mattseymour/django,feroda/django,Sonicbids/django,GitAngel/django,darjeeling/django,barbuza/django,nemesisdesign/django,maxsocl/django,mlavin/django,denis-pitul/django,hottwaj/django,andresgz/django,ticosax/django,mitar/django,petecummings/django,olasitarska/django,unaizalakain/django,hybrideagle/django,jmcarp/django,h4r5h1t/django-hauthy,monetate/django,fpy171/django,himleyb85/django,rsalmaso/django,marctc/django,benjaminjkraft/django,BMJHayward/django,dgladkov/django,SoftwareMaven/django,elena/django,marctc/django,dfdx2/django,jenalgit/django,chrishas35/django-travis-ci,pipermerriam/django,waytai/django,jarshwah/django,koordinates/django,mcrowson/django,Anonymous-X6/django,MounirMesselmeni/django,dracos/django,willhardy/django,koniiiik/django,mcella/django,elkingtonmcb/django,iambibhas/django,simone/django-gb,labcodes/django,areski/django,koniiiik/django,wetneb/django,DrMeers/django,cainmatt/django,RevelSystems/django,ulope/django,yograterol/django,caotianwei/django,mattseymour/django,mlavin/django,AndrewGrossman/django,pelme/django,mttr/django,DasIch/django,chyeh727/django,solarissmoke/django,mjtamlyn/django,djbaldey/django,rockneurotiko/django,crazy-canux/django,NullSoldier/django,tanmaythakur/django,hynekcer/django,daniponi/django,risicle/django,joequery/django,oscaro/django,Adnn/django,jhg/django,jscn/django,lmorchard/django,felixjimenez/django,rynomster/django,kswiat/django,fafaman/django,caotianwei/django,unaizalakain/django,x111ong/django,drjeep/django,HonzaKral/django,darkryder/django,WillGuan105/django,beck/django,dex4er/django,timgraham/django,rsvip/Django,lsqtongxin/django,baylee/django,lisael/pg-django,ABaldwinHunter/django-clone,jeezybrick/django,WSDC-NITWarangal/django,ABaldwinHunter/django-clone-classic,gitaarik/django,kennethlove/django,tysonclugg/django,frishberg/django,apocquet/django,auready/django,elkingtonmcb/django,schinckel/django,xadahiya/django,donkirkby/django,eugena/django,bspink/django,uranusjr/django,gcd0318/django,github-account-because-they-want-it/django,eugena/django,takis/django,takis/django,gengue/django,jpic/django,stewartpark/django,vmarkovtsev/django,pipermerriam/django,raphaelmerx/django,fenginx/django,chrisfranzen/django,shaib/django,quxiaolong1504/django,abomyi/django,neiudemo1/django,hunter007/django,HousekeepLtd/django,googleinterns/django,dudepare/django,alilotfi/django,TimBuckley/effective_django,WillGuan105/django,jn7163/django,elkingtonmcb/django,mcardillo55/django,YYWen0o0/python-frame-django,adamchainz/django,davidharrigan/django,ericfc/django,programadorjc/django,krishna-pandey-git/django,yakky/django,riteshshrv/django,adambrenecki/django,YangSongzhou/django,druuu/django,follow99/django,adamchainz/django,MarkusH/django,zanderle/django,dwightgunning/django,wetneb/django,joequery/django,jenalgit/django,quamilek/django,zhaodelong/django,unaizalakain/django,stevenewey/django,Nepherhotep/django,charettes/django,Endika/django,ojake/django,taaviteska/django,etos/django,hassanabidpk/django,jeezybrick/django,whs/django,caotianwei/django,marissazhou/django,sephii/django,mattrobenolt/django,ptoraskar/django,ericholscher/django,stewartpark/django,jallohm/django,frePPLe/django,mcardillo55/django,WSDC-NITWarangal/django,lunafeng/django,postrational/django,davgibbs/django,curtisstpierre/django,MikeAmy/django,poiati/django,ajoaoff/django,vincepandolfo/django,bliti/django-nonrel-1.5,mcrowson/django,xwolf12/django,Endika/django,peterlauri/django,IRI-Research/django,benspaulding/django,epandurski/django,frePPLe/django,dydek/django,andela-ooladayo/django,aroche/django,ziima/django,TimBuckley/effective_django,gengue/django,yask123/django,gohin/django,MoritzS/django,zulip/django,mewtaylor/django,rtindru/django,ticosax/django,vitaly4uk/django,tragiclifestories/django,yewang15215/django,MoritzS/django,claudep/django,mitchelljkotler/django,bobcyw/django,h4r5h1t/django-hauthy,zerc/django,aidanlister/django,sjlehtin/django,nemesisdesign/django,hynekcer/django,leereilly/django-1,Yong-Lee/django,techdragon/django,synasius/django,oberlin/django,lmorchard/django,aspidites/django,moreati/django,mdj2/django,sopier/django,haxoza/django,sarthakmeh03/django,helenst/django,Adnn/django,nhippenmeyer/django,saydulk/django,sam-tsai/django,akintoey/django,RossBrunton/django,sergei-maertens/django,kholidfu/django,Matt-Deacalion/django,mojeto/django,mathspace/django,MarcJoan/django,denys-duchier/django,supriyantomaftuh/django,ironbox360/django,tuhangdi/django,YangSongzhou/django,frdb194/django,PetrDlouhy/django,gohin/django,mitya57/django,gchp/django,claudep/django,marissazhou/django,hunter007/django,hottwaj/django,ar45/django,mrfuxi/django,KokareIITP/django,DONIKAN/django,delhivery/django,andela-ifageyinbo/django,ytjiang/django,treyhunner/django,extremewaysback/django,jallohm/django,beck/django,crazy-canux/django,pauloxnet/django,salamer/django,syaiful6/django,jhg/django,seocam/django,mbox/django,nhippenmeyer/django,redhat-openstack/django,Beeblio/django,Yong-Lee/django,bobcyw/django,alexmorozov/django,dfunckt/django,synasius/django,mshafiq9/django,ironbox360/django,rmboggs/django,craynot/django,doismellburning/django,scorphus/django,weiawe/django,JavML/django,loic/django,django-nonrel/django-nonrel,nemesisdesign/django,nielsvanoch/django,donkirkby/django,rockneurotiko/django,rmboggs/django,sopier/django,mrbox/django,hackerbot/DjangoDev,rajsadho/django,rogerhu/django,GaussDing/django,SebasSBM/django,andela-ifageyinbo/django,asser/django,dpetzold/django,leeon/annotated-django,delinhabit/django,dydek/django,joakim-hove/django,drjeep/django,nhippenmeyer/django,himleyb85/django,payeldillip/django,ytjiang/django,jejimenez/django,arun6582/django,myang321/django,zhaodelong/django,mmardini/django,akaihola/django,varunnaganathan/django,rwillmer/django,oinopion/django,aisipos/django,willharris/django,TridevGuha/django,iambibhas/django,bobcyw/django,zedr/django,beckastar/django,carljm/django,ryanahall/django,rsvip/Django,joequery/django,syphar/django,Leila20/django,wsmith323/django,TimYi/django,andrewsmedina/django,dracos/django,Matt-Deacalion/django,coldmind/django,irwinlove/django,devops2014/djangosite,chrisfranzen/django,mrfuxi/django,auvipy/django,zhoulingjun/django,Sonicbids/django,evansd/django,zhaodelong/django,reinout/django,AndrewGrossman/django,MarkusH/django,wkschwartz/django,frankvdp/django,tcwicklund/django,jn7163/django,ryanahall/django,archen/django,unaizalakain/django,denis-pitul/django,dfdx2/django,gohin/django,rrrene/django,akaihola/django,GitAngel/django,AlexHill/django,NullSoldier/django,takeshineshiro/django,kangfend/django,IRI-Research/django,shacker/django,SujaySKumar/django,jyotsna1820/django,spisneha25/django,tbeadle/django,theo-l/django,auready/django,DrMeers/django,hobarrera/django,JorgeCoock/django,reinout/django,zanderle/django,alexallah/django,jasonbot/django,maxsocl/django,twz915/django,sarthakmeh03/django,andyzsf/django,vitan/django,salamer/django,fafaman/django,jn7163/django,karyon/django,Vixionar/django,BlindHunter/django,memtoko/django,jscn/django,delinhabit/django,poiati/django,jhoos/django,vitan/django,seanwestfall/django,twz915/django,codepantry/django,oinopion/django,mshafiq9/django,benjaminjkraft/django,krisys/django,adambrenecki/django,pauloxnet/django,yigitguler/django,fpy171/django,mshafiq9/django,pjdelport/django,z0by/django,codepantry/django,neiudemo1/django,oscaro/django,akaariai/django,x111ong/django,mattseymour/django,barbuza/django,alexmorozov/django,felixxm/django,RaoUmer/django,ecederstrand/django,Anonymous-X6/django,fafaman/django,sarvex/django,koniiiik/django,andela-ifageyinbo/django,hasadna/django,hcsturix74/django,AlexHill/django,pjdelport/django,elky/django,haxoza/django,extremewaysback/django,wsmith323/django,bspink/django,timgraham/django,stewartpark/django,RevelSystems/django,elijah513/django,xrmx/django,pasqualguerrero/django,mjtamlyn/django,saydulk/django,jvkops/django,quxiaolong1504/django,hobarrera/django,dhruvagarwal/django,lisael/pg-django,marissazhou/django,kholidfu/django,apocquet/django,seanwestfall/django,mlavin/django,t0in4/django,bliti/django-nonrel-1.5,hassanabidpk/django,SoftwareMaven/django,ziima/django,scorphus/django,jejimenez/django,mcella/django,YYWen0o0/python-frame-django,ajoaoff/django,jgoclawski/django,divio/django,EliotBerriot/django,alexmorozov/django,digimarc/django,nealtodd/django,Balachan27/django,jhoos/django,dpetzold/django,Nepherhotep/django,charettes/django,arun6582/django,DONIKAN/django,frankvdp/django,ojengwa/django-1,jasonwzhy/django,kosz85/django,irwinlove/django,kcpawan/django,piquadrat/django,Anonymous-X6/django,ckirby/django,shaib/django,felixjimenez/django,ironbox360/django,hcsturix74/django,dbaxa/django,sgzsh269/django,claudep/django,sam-tsai/django,jvkops/django,rhertzog/django,irwinlove/django,poiati/django,denys-duchier/django,charettes/django,memtoko/django,roselleebarle04/django,salamer/django,cobalys/django,beni55/django,EliotBerriot/django,adelton/django,treyhunner/django,phalt/django,vitaly4uk/django,joakim-hove/django,jeezybrick/django,alrifqi/django,marctc/django,seocam/django,rsvip/Django,reinout/django,rtindru/django,coldmind/django,avneesh91/django,liavkoren/djangoDev,knifenomad/django,elky/django,himleyb85/django,alx-eu/django,googleinterns/django,bak1an/django,ifduyue/django,Proggie02/TestRepo,dwightgunning/django,DasIch/django,katrid/django,liuliwork/django,indevgr/django,aisipos/django,hasadna/django,willhardy/django,bak1an/django,hkchenhongyi/django,programadorjc/django,kutenai/django,MounirMesselmeni/django,rizumu/django,yewang15215/django,yakky/django,HousekeepLtd/django,Balachan27/django,MikeAmy/django,lsqtongxin/django,raphaelmerx/django,seocam/django,runekaagaard/django-contrib-locking,camilonova/django,hasadna/django,dfunckt/django,apollo13/django,django-nonrel/django-nonrel,1013553207/django,GitAngel/django,BlindHunter/django,koniiiik/django,yograterol/django,ticosax/django,caotianwei/django,harisibrahimkv/django,pelme/django,alimony/django,curtisstpierre/django,ghedsouza/django,jallohm/django,daniponi/django,tayfun/django,lwiecek/django,ghickman/django,jdelight/django,RaoUmer/django,robhudson/django,neiudemo1/django,filias/django,auready/django,joequery/django,ataylor32/django,eugena/django,frdb194/django,ajoaoff/django,quxiaolong1504/django,oberlin/django,willharris/django,MatthewWilkes/django,kswiat/django,zsiciarz/django,ckirby/django,vsajip/django,t0in4/django,chrisfranzen/django,intgr/django,dhruvagarwal/django,phalt/django,ytjiang/django,supriyantomaftuh/django,piquadrat/django,frePPLe/django,scorphus/django,huang4fstudio/django,krisys/django,camilonova/django,PolicyStat/django,fafaman/django,elena/django,dudepare/django,benjaminjkraft/django,gengue/django,darkryder/django,EliotBerriot/django,TimYi/django,mammique/django,JavML/django,divio/django,dudepare/django,monetate/django,evansd/django,loic/django,SujaySKumar/django,Beauhurst/django,jgeskens/django,epandurski/django,bspink/django,errx/django,marqueedev/django,rapilabs/django,feroda/django,RevelSystems/django,pauloxnet/django,double-y/django,DasIch/django,denisenkom/django,andela-ooladayo/django,GhostThrone/django,kennethlove/django,mttr/django,rajsadho/django,EmadMokhtar/Django,zulip/django,shownomercy/django,aspidites/django,auready/django,claudep/django,theo-l/django,liu602348184/django,indevgr/django,blueyed/django,Beauhurst/django,riklaunim/django-custom-multisite,payeldillip/django,kennethlove/django,gunchleoc/django,googleinterns/django,willharris/django,dfunckt/django,mitchelljkotler/django,Korkki/django,SujaySKumar/django,guettli/django,kaedroho/django,erikr/django,yask123/django,hunter007/django,gitaarik/django,savoirfairelinux/django,lunafeng/django,neiudemo1/django,ryangallen/django,adrianholovaty/django,georgemarshall/django,sopier/django,xwolf12/django,harisibrahimkv/django,mcrowson/django,SoftwareMaven/django,indevgr/django,Matt-Deacalion/django,zhoulingjun/django,lzw120/django,shtouff/django,nju520/django,aisipos/django,jrrembert/django,redhat-openstack/django,archen/django,jn7163/django,sadaf2605/django,aroche/django,hcsturix74/django,leereilly/django-1,jhoos/django,WillGuan105/django,sam-tsai/django,adambrenecki/django,rsalmaso/django,ar45/django,aerophile/django,rwillmer/django,tomchristie/django,oscaro/django,alilotfi/django,Balachan27/django,kholidfu/django,tanmaythakur/django,kutenai/django,gannetson/django,ajaali/django,pauloxnet/django,blighj/django,avanov/django,huang4fstudio/django,jsoref/django,ulope/django,gdi2290/django,ptoraskar/django,nju520/django,alexallah/django,seanwestfall/django,rrrene/django,karyon/django,runekaagaard/django-contrib-locking,ifduyue/django,mojeto/django,RossBrunton/django,filias/django,webgeodatavore/django,koordinates/django,sbellem/django,dwightgunning/django,Sonicbids/django,akaariai/django,cainmatt/django,robhudson/django,jsoref/django,ziima/django,sarvex/django,JorgeCoock/django,stevenewey/django,synasius/django,donkirkby/django,gdi2290/django,bitcity/django,liu602348184/django,github-account-because-they-want-it/django,kholidfu/django,techdragon/django,pipermerriam/django,django-nonrel/django,gdub/django,mjtamlyn/django,Vixionar/django,phalt/django,follow99/django,hassanabidpk/django,hnakamur/django,varunnaganathan/django,ASCrookes/django,sadaf2605/django,druuu/django,andela-ifageyinbo/django,evansd/django,double-y/django,GaussDing/django,BMJHayward/django,ifduyue/django,django/django,ericholscher/django,jarshwah/django,bliti/django-nonrel-1.5,makinacorpus/django,jylaxp/django,ivandevp/django,moreati/django,dbaxa/django,duqiao/django,krisys/django,rogerhu/django,jrrembert/django,django-nonrel/django,lwiecek/django,dgladkov/django,mcella/django,memtoko/django,oinopion/django,jgoclawski/django,akshatharaj/django,MounirMesselmeni/django,olasitarska/django,huang4fstudio/django,avneesh91/django,mewtaylor/django,SujaySKumar/django,harisibrahimkv/django,KokareIITP/django,guettli/django,ojengwa/django-1,matiasb/django,MoritzS/django,postrational/django,blindroot/django,dfdx2/django,leekchan/django_test,wweiradio/django,gchp/django,ABaldwinHunter/django-clone,takeshineshiro/django,programadorjc/django,ajaali/django,quamilek/django,gannetson/django,felixxm/django,sopier/django,benjaminjkraft/django,nealtodd/django,kcpawan/django,TimYi/django,felixxm/django,zsiciarz/django,rizumu/django,feroda/django,barbuza/django,jyotsna1820/django,iambibhas/django,freakboy3742/django,theo-l/django,Beeblio/django,drjeep/django,saydulk/django,yograterol/django,akshatharaj/django,zsiciarz/django,jyotsna1820/django,crazy-canux/django,beckastar/django,evansd/django,ajoaoff/django,PetrDlouhy/django,atul-bhouraskar/django,raphaelmerx/django,Adnn/django,tysonclugg/django,fenginx/django,extremewaysback/django,z0by/django,mcardillo55/django,Korkki/django,andreif/django,tbeadle/django,willhardy/django,WillGuan105/django,double-y/django,kevintaw/django,archen/django,Y3K/django,frishberg/django,epandurski/django,runekaagaard/django-contrib-locking,maxsocl/django,djbaldey/django,anant-dev/django,RevelSystems/django,etos/django,AltSchool/django,mrfuxi/django,jmcarp/django,joakim-hove/django,blueyed/django,rynomster/django,andresgz/django,denys-duchier/django,zanderle/django,chyeh727/django,bak1an/django,django/django,kisna72/django,auvipy/django,elky/django,alx-eu/django,krishna-pandey-git/django,h4r5h1t/django-hauthy,ABaldwinHunter/django-clone-classic,dsanders11/django,MatthewWilkes/django,adelton/django,twz915/django,andyzsf/django,jpic/django,syaiful6/django,tomchristie/django,haxoza/django,syaiful6/django,rhertzog/django,syphar/django,wkschwartz/django,daniponi/django,mathspace/django,tysonclugg/django,shaistaansari/django,nealtodd/django,simonw/django,aidanlister/django,YangSongzhou/django,lmorchard/django,edmorley/django,beni55/django,webgeodatavore/django,marckuz/django,marcelocure/django,hkchenhongyi/django
|
django/db/backends/dummy/base.py
|
django/db/backends/dummy/base.py
|
"""
Dummy database backend for Django.
Django uses this if the DATABASE_ENGINE setting is empty (None or empty string).
Each of these API functions, except connection.close(), raises
ImproperlyConfigured.
"""
from django.core.exceptions import ImproperlyConfigured
def complain(*args, **kwargs):
raise ImproperlyConfigured, "You haven't set the DATABASE_ENGINE setting yet."
def ignore(*args, **kwargs):
pass
class DatabaseError(Exception):
pass
class IntegrityError(DatabaseError):
pass
class DatabaseWrapper:
cursor = complain
_commit = complain
_rollback = ignore
def __init__(self, **kwargs):
pass
def close(self):
pass # close()
supports_constraints = False
quote_name = complain
dictfetchone = complain
dictfetchmany = complain
dictfetchall = complain
get_last_insert_id = complain
get_date_extract_sql = complain
get_date_trunc_sql = complain
get_limit_offset_sql = complain
get_random_function_sql = complain
get_deferrable_sql = complain
get_fulltext_search_sql = complain
get_drop_foreignkey_sql = complain
get_sql_flush = complain
get_sql_sequence_reset = complain
OPERATOR_MAPPING = {}
|
"""
Dummy database backend for Django.
Django uses this if the DATABASE_ENGINE setting is empty (None or empty string).
Each of these API functions, except connection.close(), raises
ImproperlyConfigured.
"""
from django.core.exceptions import ImproperlyConfigured
def complain(*args, **kwargs):
raise ImproperlyConfigured, "You haven't set the DATABASE_ENGINE setting yet."
class DatabaseError(Exception):
pass
class IntegrityError(DatabaseError):
pass
class DatabaseWrapper:
cursor = complain
_commit = complain
_rollback = complain
def __init__(self, **kwargs):
pass
def close(self):
pass # close()
supports_constraints = False
quote_name = complain
dictfetchone = complain
dictfetchmany = complain
dictfetchall = complain
get_last_insert_id = complain
get_date_extract_sql = complain
get_date_trunc_sql = complain
get_limit_offset_sql = complain
get_random_function_sql = complain
get_deferrable_sql = complain
get_fulltext_search_sql = complain
get_drop_foreignkey_sql = complain
get_sql_flush = complain
get_sql_sequence_reset = complain
OPERATOR_MAPPING = {}
|
bsd-3-clause
|
Python
|
1ec9d3b5d7a2fdfd6e7d0e763c95e1a3117cd96d
|
Update middleware to be django1.10-compatible
|
selwin/django-user_agents,selwin/django-user_agents
|
django_user_agents/middleware.py
|
django_user_agents/middleware.py
|
from django.utils.functional import SimpleLazyObject
from django.utils.deprecation import MiddlewareMixin
from .utils import get_user_agent
class UserAgentMiddleware(MiddlewareMixin):
# A middleware that adds a "user_agent" object to request
def process_request(self, request):
request.user_agent = SimpleLazyObject(lambda: get_user_agent(request))
|
from django.utils.functional import SimpleLazyObject
from .utils import get_user_agent
class UserAgentMiddleware(object):
# A middleware that adds a "user_agent" object to request
def process_request(self, request):
request.user_agent = SimpleLazyObject(lambda: get_user_agent(request))
|
mit
|
Python
|
9fe4b5fee790b7e21eb5810176a2cfa49abde7b2
|
Create author obj only for active users
|
AvadootNachankar/gstudio,AvadootNachankar/gstudio,gnowledge/gstudio,gnowledge/gstudio,gnowledge/gstudio,gnowledge/gstudio,AvadootNachankar/gstudio,gnowledge/gstudio,AvadootNachankar/gstudio
|
doc/deployer/create_auth_objs.py
|
doc/deployer/create_auth_objs.py
|
from gnowsys_ndf.ndf.models import *
from django.contrib.auth.models import User
all_users = User.objects.all()
auth_gst = node_collection.one({'_type': u'GSystemType', 'name': u'Author'})
new_auth_instances = 0
for each_user in all_users:
auth = node_collection.one({'_type': u"Author", 'created_by': int(each_user.id)})
# This will create user document in Author collection to behave user as a group.
if auth is None and each_user.is_active:
print "\n Creating new Author obj for ",each_user.username
auth = node_collection.collection.Author()
auth.name = unicode(each_user.username)
auth.email = unicode(each_user.email)
auth.password = u""
auth.member_of.append(auth_gst._id)
auth.group_type = u"PUBLIC"
auth.edit_policy = u"NON_EDITABLE"
auth.subscription_policy = u"OPEN"
auth.created_by = each_user.id
auth.modified_by = each_user.id
auth.contributors.append(each_user.id)
auth.group_admin.append(each_user.id)
auth.preferred_languages = {'primary': ('en', 'English')}
auth.agency_type = "Student"
auth_id = ObjectId()
auth._id = auth_id
auth.save(groupid=auth._id)
home_group_obj = node_collection.one({'_type': u"Group", 'name': unicode("home")})
if each_user.id not in home_group_obj.author_set:
node_collection.collection.update({'_id': home_group_obj._id}, {'$push': {'author_set': each_user.id }}, upsert=False, multi=False)
home_group_obj.reload()
desk_group_obj = node_collection.one({'_type': u"Group", 'name': unicode("desk")})
if desk_group_obj and each_user.id not in desk_group_obj.author_set:
node_collection.collection.update({'_id': desk_group_obj._id}, {'$push': {'author_set': each_user.id }}, upsert=False, multi=False)
desk_group_obj.reload()
new_auth_instances = new_auth_instances + 1
print "\n Total Author objects created: ", new_auth_instances
|
from gnowsys_ndf.ndf.models import *
from django.contrib.auth.models import User
all_users = User.objects.all()
auth_gst = node_collection.one({'_type': u'GSystemType', 'name': u'Author'})
new_auth_instances = 0
for each_user in all_users:
auth = node_collection.one({'_type': u"Author", 'created_by': int(each_user.id)})
# This will create user document in Author collection to behave user as a group.
if auth is None:
print "\n Creating new Author obj for ",each_user.username
auth = node_collection.collection.Author()
auth.name = unicode(each_user.username)
auth.email = unicode(each_user.email)
auth.password = u""
auth.member_of.append(auth_gst._id)
auth.group_type = u"PUBLIC"
auth.edit_policy = u"NON_EDITABLE"
auth.subscription_policy = u"OPEN"
auth.created_by = each_user.id
auth.modified_by = each_user.id
auth.contributors.append(each_user.id)
auth.group_admin.append(each_user.id)
auth.preferred_languages = {'primary': ('en', 'English')}
auth.agency_type = "Student"
auth_id = ObjectId()
auth._id = auth_id
auth.save(groupid=auth._id)
home_group_obj = node_collection.one({'_type': u"Group", 'name': unicode("home")})
if each_user.id not in home_group_obj.author_set:
node_collection.collection.update({'_id': home_group_obj._id}, {'$push': {'author_set': each_user.id }}, upsert=False, multi=False)
home_group_obj.reload()
desk_group_obj = node_collection.one({'_type': u"Group", 'name': unicode("desk")})
if desk_group_obj and each_user.id not in desk_group_obj.author_set:
node_collection.collection.update({'_id': desk_group_obj._id}, {'$push': {'author_set': each_user.id }}, upsert=False, multi=False)
desk_group_obj.reload()
new_auth_instances = new_auth_instances + 1
print "\n Total Author objects created: ", new_auth_instances
|
agpl-3.0
|
Python
|
69f28c471935d5e8136a4b32f51310f1f46046f0
|
set lower for envvar keys
|
dockU/build
|
docku/build/__init__.py
|
docku/build/__init__.py
|
import json
import os
class BuildConfig(dict):
def __init__(self, path):
cc = {}
with open(path) as fh:
cc = json.load(fh)
super().__init__(cc)
self.populate_envvars()
def populate_envvars(self):
keys = ['BINTRAY_TOKEN', 'BINTRAY_USER', 'BINTRAY_REPO']
for key in keys:
value = os.getenv(key)
if value:
self[key.lower()] = value
|
import json
import os
class BuildConfig(dict):
def __init__(self, path):
cc = {}
with open(path) as fh:
cc = json.load(fh)
super().__init__(cc)
self.populate_envvars()
def populate_envvars(self):
keys = ['BINTRAY_TOKEN', 'BINTRAY_USER', 'BINTRAY_REPO']
for key in keys:
value = os.getenv(key)
if value:
self[key] = value
|
mit
|
Python
|
b289569a228ff574f2c469d0d2a7fbb019c19c9e
|
Update version
|
snipsco/snipsskills,snipsco/snipsskills,snipsco/snipsskills,snipsco/snipsskills
|
snipsskills/__init__.py
|
snipsskills/__init__.py
|
# -*-: coding utf-8 -*-
""" snipsskills module """
__version__ = '0.1.4.935'
|
# -*-: coding utf-8 -*-
""" snipsskills module """
__version__ = '0.1.4.934'
|
mit
|
Python
|
0d6f7da3de63de55d8aa96532e072626f3198ed5
|
Sort by date
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
Instanssi/ext_blog/templatetags/blog_tags.py
|
Instanssi/ext_blog/templatetags/blog_tags.py
|
# -*- coding: utf-8 -*-
from django import template
from Instanssi.ext_blog.models import BlogEntry
register = template.Library()
@register.inclusion_tag('ext_blog/blog_messages.html')
def render_blog(event_id):
entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True).order_by('-date')[:10]
return {'entries': entries}
@register.inclusion_tag('ext_blog/blog_rss_tag.html')
def render_blog_rss_tag():
return {}
@register.simple_tag
def blog_rss_url():
return 'http://instanssi.org/blog/rss/'
|
# -*- coding: utf-8 -*-
from django import template
from Instanssi.ext_blog.models import BlogEntry
register = template.Library()
@register.inclusion_tag('ext_blog/blog_messages.html')
def render_blog(event_id):
entries = BlogEntry.objects.filter(event_id__lte=int(event_id), public=True)[:10]
return {'entries': entries}
@register.inclusion_tag('ext_blog/blog_rss_tag.html')
def render_blog_rss_tag():
return {}
@register.simple_tag
def blog_rss_url():
return 'http://instanssi.org/blog/rss/'
|
mit
|
Python
|
bc7c3322e027578f79119e6836111244ba1445cc
|
revert out
|
sk2/autonetkit
|
autonetkit/config.py
|
autonetkit/config.py
|
import pkg_resources
import ConfigParser
from configobj import ConfigObj, flatten_errors
import os
import validate
validator = validate.Validator()
import os.path
# from http://stackoverflow.com/questions/4028904
ank_user_dir = os.path.join(os.path.expanduser("~"), ".autonetkit")
def load_config():
settings = ConfigParser.RawConfigParser()
spec_file = pkg_resources.resource_filename(__name__,"/config/configspec.cfg")
settings = ConfigObj(configspec=spec_file, encoding='UTF8')
# User's ANK settings
user_config_file = os.path.join(ank_user_dir, "autonetkit.cfg")
settings.merge(ConfigObj(user_config_file))
# ANK settings in current directory
settings.merge(ConfigObj("autonetkit.cfg"))
# ANK settings specified by environment variable
try:
ankcfg = os.environ['AUTONETKIT_CFG']
settings.merge(ConfigObj(ankcfg))
except KeyError:
pass
results = settings.validate(validator)
if results != True:
for (section_list, key, _) in flatten_errors(settings, results):
if key is not None:
print "Error loading configuration file:"
print 'Invalid key "%s" in section "%s"' % (key, ', '.join(section_list))
raise SystemExit
else:
# ignore missing sections - use defaults
#print 'The following section was missing:%s ' % ', '.join(section_list)
pass
return settings
#NOTE: this only gets loaded once package-wide if imported as import autonetkit.config
settings = load_config()
|
import pkg_resources
import ConfigParser
from configobj import ConfigObj, flatten_errors
import os
import validate
validator = validate.Validator()
import os.path
#TODO: check this works on Windows
ank_user_dir = os.path.join(os.environ['HOME'], ".autonetkit")
def load_config():
settings = ConfigParser.RawConfigParser()
spec_file = pkg_resources.resource_filename(__name__,"/config/configspec.cfg")
settings = ConfigObj(configspec=spec_file, encoding='UTF8')
# User's ANK settings
user_config_file = os.path.join(ank_user_dir, "autonetkit.cfg")
settings.merge(ConfigObj(user_config_file))
# ANK settings in current directory
settings.merge(ConfigObj("autonetkit.cfg"))
# ANK settings specified by environment variable
try:
ankcfg = os.environ['AUTONETKIT_CFG']
settings.merge(ConfigObj(ankcfg))
except KeyError:
pass
results = settings.validate(validator)
if results != True:
for (section_list, key, _) in flatten_errors(settings, results):
if key is not None:
print "Error loading configuration file:"
print 'Invalid key "%s" in section "%s"' % (key, ', '.join(section_list))
raise SystemExit
else:
# ignore missing sections - use defaults
#print 'The following section was missing:%s ' % ', '.join(section_list)
pass
return settings
#NOTE: this only gets loaded once package-wide if imported as import autonetkit.config
settings = load_config()
|
bsd-3-clause
|
Python
|
080b967c0854d416532449dba96bbbd8f0318d8a
|
remove time_per_record since it does not make real sense
|
enthought/pikos,enthought/pikos,enthought/pikos
|
pikos/benchmark/monitors.py
|
pikos/benchmark/monitors.py
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# Package: Pikos toolkit
# File: benchmark/monitors.py
# License: LICENSE.TXT
#
# Copyright (c) 2012, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
""" Estimate the overhead cost of using a monitor.
The benchmark runs the pystones benchmark under each monitor and calculates
the overhead.
"""
from test import pystone
from pikos.benchmark.record_counter import RecordCounter
def pymonitors():
""" Pure python monitors """
from pikos.monitors.api import (
FunctionMonitor, LineMonitor,
FunctionMemoryMonitor, LineMemoryMonitor)
return {
'FunctionMonitor': FunctionMonitor,
'LineMonitor': LineMonitor,
'FunctionMemoryMonitor': FunctionMemoryMonitor,
'LineMemoryMonitor': LineMemoryMonitor}
def cymonitors():
""" Cython monitors """
from pikos.cymonitors.api import FunctionMonitor
from pikos.cymonitors.api import FunctionMemoryMonitor
return {
'CFunctionMonitor': FunctionMonitor,
'CFunctionMemoryMonitor': FunctionMemoryMonitor}
def run(monitors, loops, record_type=None):
""" Time the monitors overhead using pystones.
Parameter
---------
monitors : list
The list of monitors to time.
loops : int
The number of loops to run pystones.
record_type : object
The type of record to use.
"""
header = (
"Overhead time | Relative overhead | "
"{:^10} | {:^{length}}".format(
'Records', 'Name',
length=max(len(key) for key in monitors) - 4))
line = ('{time:>13} | {relative:>17} | {records:>10} | {name}')
print header
print len(header) * '-'
expected_time, _ = pystone.pystones(loops)
for name, monitor in monitors.iteritems():
recorder = RecordCounter()
with monitor(recorder=recorder, record_type=record_type):
time, _ = pystone.pystones(loops)
print line.format(
name=name,
time='{:2.2f}'.format(time - expected_time),
relative='{:.2%}'.format((time - expected_time) / expected_time),
records='{:10d}'.format(recorder.records))
def main(monitors, loops=1000):
print 'With default record types'
run(monitors, loops)
print
print 'Using tuples as records'
run(monitors, loops, record_type=tuple)
if __name__ == '__main__':
monitors = pymonitors()
monitors.update(cymonitors())
main(monitors)
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# Package: Pikos toolkit
# File: benchmark/monitors.py
# License: LICENSE.TXT
#
# Copyright (c) 2012, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
""" Estimate the overhead cost of using a monitor.
The benchmark runs the pystones benchmark under each monitor and calculates
the overhead.
"""
from test import pystone
from pikos.benchmark.record_counter import RecordCounter
def pymonitors():
""" Pure python monitors """
from pikos.monitors.api import (
FunctionMonitor, LineMonitor,
FunctionMemoryMonitor, LineMemoryMonitor)
return {
'FunctionMonitor': lambda recorder, record_type: FunctionMonitor(
recorder, None if record_type is None else tuple),
'LineMonitor': LineMonitor,
'FunctionMemoryMonitor': FunctionMemoryMonitor,
'LineMemoryMonitor': LineMemoryMonitor}
def cymonitors():
""" Cython monitors """
from pikos.cymonitors.api import FunctionMonitor
from pikos.cymonitors.api import FunctionMemoryMonitor
return {
'CFunctionMonitor': FunctionMonitor,
'CFunctionMemoryMonitor': FunctionMemoryMonitor}
def run(monitors, loops, record_type=None):
""" Time the monitors overhead using pystones.
Parameter
---------
monitors : list
The list of monitors to time.
loops : int
The number of loops to run pystones.
record_type : object
The type of record to use.
"""
header = (
"Overhead time | Relative overhead | "
"{:^10} | Per record | {:^{length}}".format(
'Records', 'Name',
length=max(len(key) for key in monitors) - 4))
line = ('{time:>13} | {relative:>17} | {records:>10} '
'| {time_per_record:.6e} | {name}')
print header
print len(header) * '-'
expected_time, _ = pystone.pystones(loops)
for name, monitor in monitors.iteritems():
recorder = RecordCounter()
with monitor(recorder=recorder, record_type=record_type):
time, _ = pystone.pystones(loops)
time_per_record = (time - expected_time) / recorder.records
print line.format(
name=name,
time='{:2.2f}'.format(time - expected_time),
relative='{:.2%}'.format((time - expected_time) / expected_time),
time_per_record=time_per_record,
records='{:10d}'.format(recorder.records))
def main(monitors, loops=1000):
print 'With default record types'
run(monitors, loops)
print
print 'Using tuples as records'
run(monitors, loops, record_type=tuple)
if __name__ == '__main__':
monitors = pymonitors()
monitors.update(cymonitors())
main(monitors)
|
bsd-3-clause
|
Python
|
a477de34625f9fc4076eaa093b606463063e33b3
|
add check if TwitterAPI was installed and installed it if not
|
J216/gimp_be,J216/gimp_be
|
gimp_be/network/twitter.py
|
gimp_be/network/twitter.py
|
from gimp_be.settings.settings import *
from gimp_be.utils.string_tools import *
from gimp_be.utils.pip import *
try:
import TwitterAPI
except:
pipInstall("TwitterAPI")
def tweetImage(message,image_file):
"""
Tweet image with message
:param message:
:param image_file:
:return:
"""
from TwitterAPI import TwitterAPI
global settings_data
CONSUMER_KEY = settings_data['twitter']['CONSUMER_KEY']
CONSUMER_SECRET = settings_data['twitter']['CONSUMER_SECRET']
ACCESS_TOKEN_KEY = settings_data['twitter']['ACCESS_TOKEN_KEY']
ACCESS_TOKEN_SECRET = settings_data['twitter']['ACCESS_TOKEN_SECRET']
api = TwitterAPI(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN_KEY, ACCESS_TOKEN_SECRET)
file = open(image_file, 'rb')
data = file.read()
r = api.request('statuses/update_with_media', {'status':message}, {'media[]':data})
return str(str(r.status_code))
def tweetText(opt=0):
"""
return string of twitter message
:param opt:
:return:
"""
global settings_data
import datetime
now = datetime.datetime.now()
updateLocationData()
title = imageTitle(2)
city = settings_data["location"]["city"]
state = settings_data["location"]["state"]
host_name = settings_data["network"]["host_name"]
tempf = settings_data["location"]["tempf"]
weather = settings_data["location"]["weather"]
hashtags = settings_data["twitter"]["hashtags"]
time_stamp = str(datetime.datetime.now())
tweet_text = ''
if opt == 0:
tweet_text = title + '\nby ' + settings_data['user']['author'] + '\n' + city + ' ' + state + ' | ' + host_name + '\n' + tempf + 'F ' + weather + '\n' + now.strftime("%A %B %d - %I:%M%p")
elif opt == 1:
tweet_text = title + '\nby ' + settings_data['user']['author'] + ' ' + time_stamp[:4] + '\n' + hashtags
else:
tweet_text = title + '\nby ' + settings_data['user']['author'] + ' ' + time_stamp[:4]
return tweet_text
def tweetHashtags(hashtags=('0', '1', '2')):
"""
hashtag string
:param hashtags:
:return:
"""
tag_string = ''
for tag in hashtags:
tag_string = tag_string + '#' + tag + ' '
return tag_string.strip()
|
from gimp_be.settings.settings import *
from gimp_be.utils.string_tools import *
def tweetImage(message,image_file):
"""
Tweet image with message
:param message:
:param image_file:
:return:
"""
from TwitterAPI import TwitterAPI
global settings_data
CONSUMER_KEY = settings_data['twitter']['CONSUMER_KEY']
CONSUMER_SECRET = settings_data['twitter']['CONSUMER_SECRET']
ACCESS_TOKEN_KEY = settings_data['twitter']['ACCESS_TOKEN_KEY']
ACCESS_TOKEN_SECRET = settings_data['twitter']['ACCESS_TOKEN_SECRET']
api = TwitterAPI(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN_KEY, ACCESS_TOKEN_SECRET)
file = open(image_file, 'rb')
data = file.read()
r = api.request('statuses/update_with_media', {'status':message}, {'media[]':data})
return str(str(r.status_code))
def tweetText(opt=0):
"""
return string of twitter message
:param opt:
:return:
"""
global settings_data
import datetime
now = datetime.datetime.now()
updateLocationData()
title = imageTitle(2)
city = settings_data["location"]["city"]
state = settings_data["location"]["state"]
host_name = settings_data["network"]["host_name"]
tempf = settings_data["location"]["tempf"]
weather = settings_data["location"]["weather"]
hashtags = settings_data["twitter"]["hashtags"]
time_stamp = str(datetime.datetime.now())
tweet_text = ''
if opt == 0:
tweet_text = title + '\nby ' + settings_data['user']['author'] + '\n' + city + ' ' + state + ' | ' + host_name + '\n' + tempf + 'F ' + weather + '\n' + now.strftime("%A %B %d - %I:%M%p")
elif opt == 1:
tweet_text = title + '\nby ' + settings_data['user']['author'] + ' ' + time_stamp[:4] + '\n' + hashtags
else:
tweet_text = title + '\nby ' + settings_data['user']['author'] + ' ' + time_stamp[:4]
return tweet_text
def tweetHashtags(hashtags=('0', '1', '2')):
"""
hashtag string
:param hashtags:
:return:
"""
tag_string = ''
for tag in hashtags:
tag_string = tag_string + '#' + tag + ' '
return tag_string.strip()
|
mit
|
Python
|
6138f02896bc865a98480be36300bf670a6defa8
|
Replace re by os.path utils
|
justmao945/vim-clang,justmao945/vim-clang
|
plugin/complete_database.py
|
plugin/complete_database.py
|
import vim
import re
import json
from os import path
curr_file = vim.eval("expand('%:p')")
curr_file_noext = path.splitext(curr_file)[0]
ccd = vim.eval("l:ccd")
opts = []
with open(ccd) as database:
# Search for the right entry in the database matching file names
for d in json.load(database):
# This is an entry without a file attribute
if 'file' not in d:
continue
# This entry is about a different file. We consider file names
# without extension to handle header files which do not have
# an entry in the database.
d_file_noext = path.splitext(d['file'])[0]
if d_file_noext != curr_file_noext:
continue
for result in re.finditer(r'-D\s*[^\s]+', d['command']):
opts.append(result.group(0))
for result in re.finditer(r'-isystem\s*[^\s]+', d['command']):
opts.append(result.group(0))
for result in re.finditer(r'-I\s*([^\s]+)', d['command']):
opts.append('-I' + path.join(d['directory'], result.group(1)))
break
vim.command("let l:clang_options = '" + ' '.join(opts) + "'")
|
import vim
import re
import json
from os import path
current = vim.eval("expand('%:p')")
ccd = vim.eval("l:ccd")
opts = []
with open(ccd) as database:
data = json.load(database)
for d in data:
# hax for headers
fmatch = re.search(r'(.*)\.(\w+)$', current)
dmatch = re.search(r'(.*)\.(\w+)$', d['file'])
if fmatch.group(1) == dmatch.group(1):
for result in re.finditer(r'-D\s*[^\s]+', d['command']):
opts.append(result.group(0))
for result in re.finditer(r'-isystem\s*[^\s]+', d['command']):
opts.append(result.group(0))
for result in re.finditer(r'-I\s*([^\s]+)', d['command']):
opts.append('-I' + path.join(d['directory'], result.group(1)))
break
vim.command("let l:clang_options = '" + ' '.join(opts) + "'")
|
isc
|
Python
|
7903c7604a54a8786a5d4b658c224b6d28ed43af
|
Add iterator for lists
|
Dalloriam/engel,Dalloriam/engel,Dalloriam/engel
|
popeui/widgets/structure.py
|
popeui/widgets/structure.py
|
from .base import BaseContainer
from .abstract import HeadLink
class Document(BaseContainer):
"""
A document. Analogous to the HTML ``<html>`` element.
"""
html_tag = "html"
def __init__(self, id, view, classname=None, parent=None, **kwargs):
"""
:param view: :class:`~.application.View` in which the document is declared.
"""
super(Document, self).__init__(id, classname, parent, **kwargs)
self.view = view
class Head(BaseContainer):
html_tag = "head"
def load_script(self, id, path):
"""
Proper way to dynamically inject a script in a page.
:param path: Path of the script to inject.
"""
self.view.dispatch({'name': 'script', 'path': path})
def load_stylesheet(self, id, path):
"""
Proper way to dynamically inject a stylesheet in a page.
:param path: Path of the stylesheet to inject.
"""
self.add_child(HeadLink(id=id, link_type="stylesheet", path=path))
class Body(BaseContainer):
"""
Simple container analogous to the html ``<body>`` element.
"""
html_tag = "body"
class Panel(BaseContainer):
"""
Simple container analogous to the html ``<div>`` element.
"""
html_tag = "div"
class List(BaseContainer):
"""
Bridges python and HTML lists. :class:`List` exposes an interface similar to
python lists and takes care of updating the corresponding HTML ``<ul>`` when the python object is updated.
"""
html_tag = "ul"
def __init__(self, id, classname=None, parent=None, **kwargs):
super(List, self).__init__(id, classname, parent, **kwargs)
self._count = 0
self._items = []
def append(self, widget):
"""
Append a widget to the list.
:param widget: Object inheriting :class:`~.widgets.base.BaseElement`
"""
li_itm = _li(id=self.id + str(self._count))
li_itm.add_child(widget)
self.add_child(li_itm)
self._items.append((widget, li_itm))
self._count += 1
def remove(self, widget):
"""
Remove a widget from the list.
:param widget: Object inheriting :class:`~.widgets.base.BaseElement`
"""
raw = list(filter(lambda x: x[0] == widget, self._items))
if raw:
itm, wrapped = raw[0]
self._items.remove(raw[0])
self.remove_child(wrapped)
else:
raise ValueError("Child not in list.")
def __iter__(self):
return iter(list([x[0] for x in self._items]))
def __len__(self):
return len(self._items)
def __getitem__(self, index):
return self._items[index][0]
def __setitem__(self, index, widget):
old_li = self._items[index]
li_itm = _li(id=old_li[1].id)
li_itm.add_child(widget)
old_wid = self.children[index]
self.replace_child(old_wid, li_itm)
self._items[index] = (widget, li_itm)
class _li(BaseContainer):
html_tag = "li"
|
from .base import BaseContainer
from .abstract import HeadLink
class Document(BaseContainer):
"""
A document. Analogous to the HTML ``<html>`` element.
"""
html_tag = "html"
def __init__(self, id, view, classname=None, parent=None, **kwargs):
"""
:param view: :class:`~.application.View` in which the document is declared.
"""
super(Document, self).__init__(id, classname, parent, **kwargs)
self.view = view
class Head(BaseContainer):
html_tag = "head"
def load_script(self, id, path):
"""
Proper way to dynamically inject a script in a page.
:param path: Path of the script to inject.
"""
self.view.dispatch({'name': 'script', 'path': path})
def load_stylesheet(self, id, path):
"""
Proper way to dynamically inject a stylesheet in a page.
:param path: Path of the stylesheet to inject.
"""
self.add_child(HeadLink(id=id, link_type="stylesheet", path=path))
class Body(BaseContainer):
"""
Simple container analogous to the html ``<body>`` element.
"""
html_tag = "body"
class Panel(BaseContainer):
"""
Simple container analogous to the html ``<div>`` element.
"""
html_tag = "div"
class List(BaseContainer):
"""
Bridges python and HTML lists. :class:`List` exposes an interface similar to
python lists and takes care of updating the corresponding HTML ``<ul>`` when the python object is updated.
"""
html_tag = "ul"
def __init__(self, id, classname=None, parent=None, **kwargs):
super(List, self).__init__(id, classname, parent, **kwargs)
self._count = 0
self._items = []
def append(self, widget):
"""
Append a widget to the list.
:param widget: Object inheriting :class:`~.widgets.base.BaseElement`
"""
li_itm = _li(id=self.id + str(self._count))
li_itm.add_child(widget)
self.add_child(li_itm)
self._items.append((widget, li_itm))
self._count += 1
def remove(self, widget):
"""
Remove a widget from the list.
:param widget: Object inheriting :class:`~.widgets.base.BaseElement`
"""
raw = list(filter(lambda x: x[0] == widget, self._items))
if raw:
itm, wrapped = raw[0]
self._items.remove(raw[0])
self.remove_child(wrapped)
else:
raise ValueError("Child not in list.")
def __len__(self):
return len(self._items)
def __getitem__(self, index):
return self._items[index][0]
def __setitem__(self, index, widget):
old_li = self._items[index]
li_itm = _li(id=old_li[1].id)
li_itm.add_child(widget)
old_wid = self.children[index]
self.replace_child(old_wid, li_itm)
self._items[index] = (widget, li_itm)
class _li(BaseContainer):
html_tag = "li"
|
mit
|
Python
|
c4b408bdf84333a5e41d10ee3d46f926069b5548
|
Delete deprecated with_coverage task
|
Turupawn/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,Turupawn/website,lutris/website,lutris/website
|
lutrisweb/settings/test.py
|
lutrisweb/settings/test.py
|
from base import * # noqa
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS += (
'django_jenkins',
)
JENKINS_TASKS = (
'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.run_pep8',
)
PROJECT_APPS = (
'games',
'accounts',
'common'
)
|
from base import * # noqa
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS += (
'django_jenkins',
)
JENKINS_TASKS = (
'django_jenkins.tasks.with_coverage',
'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.run_pep8',
)
PROJECT_APPS = (
'games',
'accounts',
'common'
)
|
agpl-3.0
|
Python
|
421ace15d779cc686aa83489c0e965bbeabe49b9
|
Update script to repeat test set experiment 10 times
|
NLeSC/cptm,NLeSC/cptm
|
cptm/experiment_testset_without_perspectives.py
|
cptm/experiment_testset_without_perspectives.py
|
"""Script to extract a document/topic matrix for a set of text documents.
The corpus is not divided in perspectives.
Used to calculate theta for the CAP vragenuurtje data.
"""
import logging
import argparse
import pandas as pd
import os
from CPTCorpus import CPTCorpus
from cptm.utils.experiment import get_sampler, thetaFileName, load_config, \
topicFileName
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
parser.add_argument('data_dir', help='dir containing the input data.')
parser.add_argument('out_dir', help='dir to write results to.')
args = parser.parse_args()
params = load_config(args.json)
input_dir = [args.data_dir]
topicDict = params.get('outDir').format('topicDict.dict')
opinionDict = params.get('outDir').format('opinionDict.dict')
phi_topic_file = topicFileName(params)
phi_topic = pd.read_csv(phi_topic_file, index_col=0, encoding='utf-8').values.T
#print phi_topic.shape
#print phi_topic
corpus = CPTCorpus(input=input_dir, topicDict=topicDict,
opinionDict=opinionDict, testSplit=100, file_dict=None,
topicLines=params.get('topicLines'),
opinionLines=params.get('opinionLines'))
print str(corpus)
params['outDir'] = args.out_dir
nTopics = params.get('nTopics')
for i in range(10):
sampler = get_sampler(params, corpus, nTopics=nTopics,
initialize=False)
sampler._initialize(phi_topic=phi_topic)
sampler.run()
sampler.estimate_parameters(start=params.get('sampleEstimateStart'),
end=params.get('sampleEstimateEnd'))
logger.info('saving files')
documents = []
for persp in corpus.perspectives:
print str(persp)
for f in persp.testFiles:
p, b = os.path.split(f)
documents.append(b)
theta = sampler.theta_to_df(sampler.theta, documents)
theta.to_csv(os.path.join(params['outDir'],
'theta_{}_{}.csv'.format(nTopics, i)),
encoding='utf8')
|
"""Script to extract a document/topic matrix for a set of text documents.
The corpus is not divided in perspectives.
Used to calculate theta for the CAP vragenuurtje data.
"""
import logging
import argparse
import pandas as pd
import os
from CPTCorpus import CPTCorpus
from cptm.utils.experiment import get_sampler, thetaFileName, load_config, \
topicFileName
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s : %(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('json', help='json file containing experiment '
'configuration.')
parser.add_argument('data_dir', help='dir containing the input data.')
parser.add_argument('out_dir', help='dir to write results to.')
args = parser.parse_args()
params = load_config(args.json)
input_dir = [args.data_dir]
topicDict = params.get('outDir').format('topicDict.dict')
opinionDict = params.get('outDir').format('opinionDict.dict')
phi_topic_file = topicFileName(params)
phi_topic = pd.read_csv(phi_topic_file, index_col=0, encoding='utf-8').values.T
#print phi_topic.shape
#print phi_topic
corpus = CPTCorpus(input=input_dir, topicDict=topicDict,
opinionDict=opinionDict, testSplit=100, file_dict=None,
topicLines=params.get('topicLines'),
opinionLines=params.get('opinionLines'))
print str(corpus)
params['outDir'] = args.out_dir
sampler = get_sampler(params, corpus, nTopics=params.get('nTopics'),
initialize=False)
sampler._initialize(phi_topic=phi_topic)
sampler.run()
sampler.estimate_parameters(start=params.get('sampleEstimateStart'),
end=params.get('sampleEstimateEnd'))
logger.info('saving files')
documents = []
for persp in corpus.perspectives:
print str(persp)
for f in persp.testFiles:
p, b = os.path.split(f)
documents.append(b)
theta = sampler.theta_to_df(sampler.theta, documents)
theta.to_csv(thetaFileName(params), encoding='utf8')
|
apache-2.0
|
Python
|
0fe7cd8cf316dc6d4ef547d733b634de64fc768c
|
Add more options on filters
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("database_name", "engine_name",
"environment_name", "instance_name", "databaseinfra_name")
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm", "volume_alarm", "engine_name",
"environment_name", "databaseinfra_name")
list_display = ("analyzed_at", "databaseinfra_name", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm", "volume_alarm")
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("database_name", "engine_name",
"environment_name", "instance_name", "databaseinfra_name")
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm", "volume_alarm")
list_display = ("analyzed_at", "databaseinfra_name", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm", "volume_alarm")
|
bsd-3-clause
|
Python
|
4605dfb434d7a934e2fce39f96d73e66f17a682b
|
Handle missing settings file
|
e2gal/antisarubot,e2gal/i2vbot,e2gal/i2vbot,e2gal/antisarubot
|
i2vbot/settings.py
|
i2vbot/settings.py
|
#!/usr/bin/env python2
# Ampuni aku... :(
import pickle
SETTINGS_FILE = "data/settings.pickle"
def loadSettings(settingsFile = SETTINGS_FILE):
try:
with open(settingsFile, 'r') as f:
return pickle.load(f)
except:
return {}
def saveSettings(settings, settingsFile = SETTINGS_FILE):
with open(settingsFile, 'w') as f:
pickle.dump(settings, f, -1)
|
#!/usr/bin/env python2
# Ampuni aku... :(
import pickle
SETTINGS_FILE = "data/settings.pickle"
def loadSettings(settingsFile = SETTINGS_FILE):
with open(settingsFile, 'r') as f:
try:
return pickle.load(f)
except:
return {}
def saveSettings(settings, settingsFile = SETTINGS_FILE):
with open(settingsFile, 'w') as f:
pickle.dump(settings, f, -1)
|
mit
|
Python
|
8bc3e371690ef28609f1999a4a3dabc0dd453850
|
Correct serve() to call serve_one() not listen_one()
|
PinkInk/upylib,PinkInk/upylib
|
uhttpsrv/uhttpsrv.py
|
uhttpsrv/uhttpsrv.py
|
import socket
class uHTTPsrv:
PROTECTED = [b'__init__', b'listen_once', b'listen', b'response_header', b'__qualname__', b'__module__', b'address', b'port', b'backlog', b'in_buffer_len', b'debug']
def __init__(self, address='', port=80, backlog=1, in_buffer_len=1024, debug=False):
self.address = address
self.port = port
self.backlog = backlog
self.in_buffer_len = in_buffer_len
self.debug = debug
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.bind((self.address, self.port))
self._socket.listen(self.backlog)
def serve_one(self):
conn,addr = self._socket.accept()
request = conn.recv(self.in_buffer_len)
request = request.rsplit(b'\r\n')
if self.debug:
for line in request:
print(line)
method = request[0].rsplit(b' ')[0].decode('utf-8')
print(method)
if method.lower() not in self.PROTECTED:
if hasattr(self, method):
response = self.response_header(200) + \
eval('self.'+method+'(self,request)')
else:
response=self.response_header(501)
else:
response = self.response_header(501)
if self.debug:
for line in response:
print(line)
conn.send(response)
conn.close()
def serve(self):
while True:
self.serve_one(self)
def response_header(self, code):
return b'HTTP/1.1 ' + str(code) + b'\nConnection: close\n\n'
|
import socket
class uHTTPsrv:
PROTECTED = [b'__init__', b'listen_once', b'listen', b'response_header', b'__qualname__', b'__module__', b'address', b'port', b'backlog', b'in_buffer_len', b'debug']
def __init__(self, address='', port=80, backlog=1, in_buffer_len=1024, debug=False):
self.address = address
self.port = port
self.backlog = backlog
self.in_buffer_len = in_buffer_len
self.debug = debug
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.bind((self.address, self.port))
self._socket.listen(self.backlog)
def serve_one(self):
conn,addr = self._socket.accept()
request = conn.recv(self.in_buffer_len)
request = request.rsplit(b'\r\n')
if self.debug:
for line in request:
print(line)
method = request[0].rsplit(b' ')[0].decode('utf-8')
print(method)
if method.lower() not in self.PROTECTED:
if hasattr(self, method):
response = self.response_header(200) + \
eval('self.'+method+'(self,request)')
else:
response=self.response_header(501)
else:
response = self.response_header(501)
if self.debug:
for line in response:
print(line)
conn.send(response)
conn.close()
def serve(self):
while True:
self.listen_once(self)
def response_header(self, code):
return b'HTTP/1.1 ' + str(code) + b'\nConnection: close\n\n'
|
mit
|
Python
|
d69aa85c74482354ff8788fb7b9692f0aee6d311
|
Fix it.
|
mrshu/iepy,mrshu/iepy,machinalis/iepy,machinalis/iepy,mrshu/iepy,machinalis/iepy
|
iepy/preprocess.py
|
iepy/preprocess.py
|
import logging
logger = logging.getLogger(__name__)
class PreProcessPipeline(object):
"""Coordinates the pre-processing tasks on a set of documents"""
def __init__(self, step_runners, documents_manager):
"""Takes a list of callables and a documents-manager.
Step Runners may be any callable. It they have an attribute step,
then that runner will be treated as the responsible for
accomplishing such a PreProcessStep.
"""
self.step_runners = step_runners
self.documents = documents_manager
def walk_document(self, doc):
"""Computes all the missing pre-process steps for the given document"""
for step in self.step_runners:
step(doc)
return
def process_step_in_batch(self, runner):
"""Tries to apply the required step to all documents lacking it"""
logger.info('Starting preprocessing step %s', runner)
if hasattr(runner, 'step') and not runner.override:
docs = self.documents.get_documents_lacking_preprocess(runner.step)
else:
docs = self.documents # everything
for i, doc in enumerate(docs):
runner(doc)
logger.info('\tDone for %i documents', i + 1)
def process_everything(self):
"""Tries to apply all the steps to all documents"""
for runner in self.step_runners:
self.process_step_in_batch(runner)
class BasePreProcessStepRunner(object):
# If it's for a particular step, you can write
# step = PreProcessSteps.something
def __init__(self, override=False):
self.override = override
def __call__(self, doc):
# You'll have to:
# - Check if the document satisfies pre-conditions, and if not, do nothing
# - Explicitely store pre process results on the document
# - Based on the "override" paramenter, and on your checks to see if the step
# was already done or not, decide if you will
# - skip
# - re-do step.
raise NotImplementedError
|
import logging
logger = logging.getLogger(__name__)
class PreProcessPipeline(object):
"""Coordinates the pre-processing tasks on a set of documents"""
def __init__(self, step_runners, documents_manager):
"""Takes a list of callables and a documents-manager.
Step Runners may be any callable. It they have an attribute step,
then that runner will be treated as the responsible for
accomplishing such a PreProcessStep.
"""
self.step_runners = step_runners
self.documents = documents_manager
def walk_document(self, doc):
"""Computes all the missing pre-process steps for the given document"""
for step in self.step_runners:
step(doc)
return
def process_step_in_batch(self, runner):
"""Tries to apply the required step to all documents lacking it"""
logger.info('Starting preprocessing step %s', runner)
if hasattr(runner, 'step'):
docs = self.documents.get_documents_lacking_preprocess(runner.step)
else:
docs = self.documents # everything
for i, doc in enumerate(docs):
runner(doc)
logger.info('\tDone for %i documents', i + 1)
def process_everything(self):
"""Tries to apply all the steps to all documents"""
for runner in self.step_runners:
self.process_step_in_batch(runner)
class BasePreProcessStepRunner(object):
# If it's for a particular step, you can write
# step = PreProcessSteps.something
def __init__(self, override=False):
self.override = override
def __call__(self, doc):
# You'll have to:
# - Check if the document satisfies pre-conditions, and if not, do nothing
# - Explicitely store pre process results on the document
# - Based on the "override" paramenter, and on your checks to see if the step
# was already done or not, decide if you will
# - skip
# - re-do step.
raise NotImplementedError
|
bsd-3-clause
|
Python
|
42743ac90ede1d9d78c892f1cee033c5e5a66c9b
|
fix typo in docker update script
|
dune-community/dune-gdt-super
|
.travis/docker/update_image.py
|
.travis/docker/update_image.py
|
#!/usr/bin/env python3
import os
import subprocess
import sys
cc_mapping = {'gcc': 'g++', 'clang': 'clang++'}
thisdir = os.path.dirname(os.path.abspath(__file__))
def update(commit, cc):
gdt_super_dir = os.path.join(thisdir, '..', '..',)
dockerfile = os.path.join(thisdir, 'dune-gdt-testing', 'Dockerfile')
os.chdir(gdt_super_dir)
cxx = cc_mapping[cc]
commit = commit.replace('/', '_')
repo = 'dunecommunity/dune-gdt-testing_{}'.format(cc)
subprocess.check_call(['docker', 'build', '--no-cache=true', '-f', dockerfile,
'-t', '{}:{}'.format(repo, commit), '--build-arg', 'cc={}'.format(cc),
'--build-arg', 'cxx={}'.format(cxx), '--build-arg', 'commit={}'.format(commit),
'.'])
subprocess.check_call(['docker', '--log-level="debug"', 'push', repo])
if __name__ == '__main__':
if len(sys.argv) > 2:
ccs = [sys.argv[1]]
commits = [sys.argv[2]]
else:
ccs = list(cc_mapping.keys())
commits = ['master']
subprocess.check_call(['docker', 'pull', 'dunecommunity/testing-base:latest'])
for b in commits:
for c in ccs:
update(b, c)
subprocess.check_call(['docker', '--log-level="debug"', 'images'])
|
#!/usr/bin/env python3
import os
import subprocess
import sys
cc_mapping = {'gcc': 'g++', 'clang': 'clang++'}
thisdir = os.path.dirname(os.path.abspath(__file__))
def update(commit, cc):
gdt_super_dir = os.path.join(thisdir, '..', '..',)
dockerfile = os.path.join(thisdir, 'dune-gdt-testing', 'Dockerfile')
os.chdir(gdt_super_dir)
cxx = cc_mapping[cc]
commit = commit.replace('/', '_')
repo = 'dunecommunity/dune-gdt-testing_{}'.format(cc)
subprocess.check_call(['docker', 'build', '--no-cache=true', '-f', dockerfile,
'-t', '{}:{}'.format(repo, commit), '--build-arg', 'cc={}'.format(cc),
'--build-arg', 'cxx={}'.format(cxx), '--build-arg', 'commit={}'.format(commit),
'.'])
subprocess.check_call(['docker', '--log-level="debug"', 'push', repo])
if __name__ == '__main__':
if len(sys.argv) > 2:
ccs = [sys.argv[1]]
commmits = [sys.argv[2]]
else:
ccs = list(cc_mapping.keys())
commits = ['master']
subprocess.check_call(['docker', 'pull', 'dunecommunity/testing-base:latest'])
for b in commits:
for c in ccs:
update(b, c)
subprocess.check_call(['docker', '--log-level="debug"', 'images'])
|
bsd-2-clause
|
Python
|
45ee385204d4a38ea904228d2648d266309332ab
|
fix shutdown command
|
TurtleRover/Turtle-Rover-Mission-Control,TurtleRover/Turtle-Rover-Mission-Control,TurtleRover/Turtle-Rover-Mission-Control,TurtleRover/Turtle-Rover-Mission-Control,TurtleRover/Turtle-Rover-Mission-Control,TurtleRover/Turtle-Rover-Mission-Control
|
server/sockets.py
|
server/sockets.py
|
import asyncio
from aiohttp import web
import socketio
import hexdump
from log import logname
import frame
from hardware import Hardware
from version import version_info
import os
import subprocess
logger = logname("sockets")
class WSnamespace(socketio.AsyncNamespace):
def __init__(self, namespace='/sockets'):
super().__init__(namespace)
self.sio = None
self.hw = Hardware()
async def on_connect(self, sid, environ):
logger.info("connected %s", sid)
await self.sio.emit('connected', {
'tcs_ver' : version_info,
'firmware_ver' : self.hw.getFirmwareVersion(),
'wifi_dongle' : self.hw.getWirelessAdapterInfo(),
'video_devices': self.hw.getCameraInfo()
}, namespace="/sockets")
async def on_motors(self, sid, payload):
self.hw.setMotors(payload)
await self.sio.emit('response', "motors set", namespace="/sockets")
async def on_manipulator(self, sid, payload):
self.hw.setManipulator(payload)
await self.sio.emit('response', 'manipulator set', namespace="/sockets")
async def on_gripper(self, sid, payload):
self.hw.setGripper(payload)
await self.sio.emit('response', 'gripper set', namespace="/sockets")
async def on_battery(self, sid):
battery_status = self.hw.getBattery()
await self.sio.emit('battery', battery_status, namespace="/sockets")
async def on_signal(self, sid):
signal_strength = self.hw.getSignal()
await self.sio.emit('signal', signal_strength, namespace="/sockets")
async def on_temperature(self, sid):
temperature = self.hw.getTemperature()
await self.sio.emit('temperature', temperature, namespace="/sockets")
async def on_shutdown(self, sid):
subprocess.run(['poweroff'])
class WSserver():
def __init__(self, app):
super().__init__()
self.sio = None
self.app = app
self.namespace = WSnamespace('/sockets')
def start(self):
self.sio = socketio.AsyncServer(async_mode='aiohttp')
self.sio.register_namespace(self.namespace)
self.namespace.sio = self.sio
self.sio.attach(self.app)
|
import asyncio
from aiohttp import web
import socketio
import hexdump
from log import logname
import frame
from hardware import Hardware
from version import version_info
import os
logger = logname("sockets")
class WSnamespace(socketio.AsyncNamespace):
def __init__(self, namespace='/sockets'):
super().__init__(namespace)
self.sio = None
self.hw = Hardware()
async def on_connect(self, sid, environ):
logger.info("connected %s", sid)
await self.sio.emit('connected', {
'tcs_ver' : version_info,
'firmware_ver' : self.hw.getFirmwareVersion(),
'wifi_dongle' : self.hw.getWirelessAdapterInfo(),
'video_devices': self.hw.getCameraInfo()
}, namespace="/sockets")
async def on_motors(self, sid, payload):
self.hw.setMotors(payload)
await self.sio.emit('response', "motors set", namespace="/sockets")
async def on_manipulator(self, sid, payload):
self.hw.setManipulator(payload)
await self.sio.emit('response', 'manipulator set', namespace="/sockets")
async def on_gripper(self, sid, payload):
self.hw.setGripper(payload)
await self.sio.emit('response', 'gripper set', namespace="/sockets")
async def on_battery(self, sid):
battery_status = self.hw.getBattery()
await self.sio.emit('battery', battery_status, namespace="/sockets")
async def on_signal(self, sid):
signal_strength = self.hw.getSignal()
await self.sio.emit('signal', signal_strength, namespace="/sockets")
async def on_temperature(self, sid):
temperature = self.hw.getTemperature()
await self.sio.emit('temperature', temperature, namespace="/sockets")
async def on_system_shutdown(self, sid):
os.system('poweroff')
class WSserver():
def __init__(self, app):
super().__init__()
self.sio = None
self.app = app
self.namespace = WSnamespace('/sockets')
def start(self):
self.sio = socketio.AsyncServer(async_mode='aiohttp')
self.sio.register_namespace(self.namespace)
self.namespace.sio = self.sio
self.sio.attach(self.app)
|
mit
|
Python
|
d4a2632a0dcdd6731a5930f321135ec7f9864460
|
Use new API, which requires being explicit about tracking ODF model.
|
yeatmanlab/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ,arokem/pyAFQ
|
AFQ/tests/test_tractography.py
|
AFQ/tests/test_tractography.py
|
import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel.tmpdirs as nbtmp
from AFQ.csd import fit_csd
from AFQ.dti import fit_dti
from AFQ.tractography import track
from AFQ.utils.testing import make_tracking_data
seeds = np.array([[-80., -120., -60.],
[-81, -121, -61],
[-81, -120, -60]])
tmpdir = nbtmp.InTemporaryDirectory()
fbval = op.join(tmpdir.name, 'dti.bval')
fbvec = op.join(tmpdir.name, 'dti.bvec')
fdata = op.join(tmpdir.name, 'dti.nii.gz')
make_tracking_data(fbval, fbvec, fdata)
min_length = 20
step_size = 0.5
def test_csd_tracking():
for sh_order in [4, 8, 10]:
fname = fit_csd(fdata, fbval, fbvec,
response=((0.0015, 0.0003, 0.0003), 100),
sh_order=8, lambda_=1, tau=0.1, mask=None,
out_dir=tmpdir.name)
for directions in ["det", "prob"]:
sl = track(fname, directions,
odf_model="CSD",
max_angle=30.,
sphere=None,
seed_mask=None,
n_seeds=seeds,
stop_mask=None,
step_size=step_size,
min_length=min_length)
npt.assert_(len(sl[0]) >= step_size * min_length)
def test_dti_tracking():
fdict = fit_dti(fdata, fbval, fbvec)
for directions in ["det", "prob"]:
sl = track(fdict['params'],
directions,
max_angle=30.,
sphere=None,
seed_mask=None,
n_seeds=1,
step_size=step_size,
min_length=min_length)
npt.assert_(len(sl[0]) >= min_length * step_size)
|
import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel.tmpdirs as nbtmp
from AFQ.csd import fit_csd
from AFQ.dti import fit_dti
from AFQ.tractography import track
from AFQ.utils.testing import make_tracking_data
seeds = np.array([[-80., -120., -60.],
[-81, -121, -61],
[-81, -120, -60]])
tmpdir = nbtmp.InTemporaryDirectory()
fbval = op.join(tmpdir.name, 'dti.bval')
fbvec = op.join(tmpdir.name, 'dti.bvec')
fdata = op.join(tmpdir.name, 'dti.nii.gz')
make_tracking_data(fbval, fbvec, fdata)
min_length = 20
step_size = 0.5
def test_csd_tracking():
for sh_order in [4, 8, 10]:
fname = fit_csd(fdata, fbval, fbvec,
response=((0.0015, 0.0003, 0.0003), 100),
sh_order=8, lambda_=1, tau=0.1, mask=None,
out_dir=tmpdir.name)
for directions in ["det", "prob"]:
sl = track(fname, directions,
max_angle=30.,
sphere=None,
seed_mask=None,
n_seeds=seeds,
stop_mask=None,
step_size=step_size,
min_length=min_length)
npt.assert_(len(sl[0]) >= step_size * min_length)
def test_dti_tracking():
fdict = fit_dti(fdata, fbval, fbvec)
for directions in ["det", "prob"]:
sl = track(fdict['params'],
directions,
max_angle=30.,
sphere=None,
seed_mask=None,
n_seeds=1,
step_size=step_size,
min_length=min_length)
npt.assert_(len(sl[0]) >= min_length * step_size)
|
bsd-2-clause
|
Python
|
705cb5cf3eec171baf3a8b91b8cc77d9987a1414
|
Fix ImproperlyConfigured exception
|
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
|
precision/accounts/views.py
|
precision/accounts/views.py
|
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.views.generic.base import TemplateResponseMixin, View
from .forms import LoginForm
class SignInView(TemplateResponseMixin, View):
template_name = 'accounts/sign_in.html'
def get(self, request):
form = LoginForm()
context = {'section': 'sign_in', 'form': form}
return self.render_to_response(context)
def post(self, request):
form = LoginForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
user = authenticate(email=cd['username'], password=cd['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('Authenticated successfully')
else:
return HttpResponse('Disabled account')
else:
return HttpResponse('Invalid login')
else:
return redirect('accounts:sign_in')
|
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.views.generic.base import TemplateResponseMixin, View
from .forms import LoginForm
class SignInView(TemplateResponseMixin, View):
def get(self, request):
template_name = 'accounts/sign_in.html'
form = LoginForm()
context = {'section': 'sign_in', 'form': form}
return self.render_to_response(context)
def post(self, request):
form = LoginForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
user = authenticate(email=cd['username'], password=cd['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('Authenticated successfully')
else:
return HttpResponse('Disabled account')
else:
return HttpResponse('Invalid login')
else:
return redirect('accounts:sign_in')
|
mit
|
Python
|
c0684358b217318327d71470ee86074b3556148a
|
Use double quotes consistently
|
fdev/bc125csv
|
bc125csv/__main__.py
|
bc125csv/__main__.py
|
from bc125csv.handler import main
if __name__ == "__main__":
main()
|
from bc125csv.handler import main
if __name__ == '__main__':
main()
|
mit
|
Python
|
32537dafa3c13761b910ab8449ff80d60df6f02b
|
Bump version to 2.3.4-dev
|
indico/indico,ThiefMaster/indico,DirkHoffmann/indico,pferreir/indico,DirkHoffmann/indico,pferreir/indico,indico/indico,pferreir/indico,pferreir/indico,ThiefMaster/indico,DirkHoffmann/indico,indico/indico,DirkHoffmann/indico,ThiefMaster/indico,ThiefMaster/indico,indico/indico
|
indico/__init__.py
|
indico/__init__.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import warnings
from indico.util.mimetypes import register_custom_mimetypes
__version__ = '2.3.4-dev'
register_custom_mimetypes()
# TODO: remove in 3.0
warnings.filterwarnings('ignore', message='Python 2 is no longer supported by the Python core team.',
module='authlib')
warnings.filterwarnings('ignore', message='Python 2 is no longer supported by the Python core team.',
module='cryptography')
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import warnings
from indico.util.mimetypes import register_custom_mimetypes
__version__ = '2.3.3'
register_custom_mimetypes()
# TODO: remove in 3.0
warnings.filterwarnings('ignore', message='Python 2 is no longer supported by the Python core team.',
module='authlib')
warnings.filterwarnings('ignore', message='Python 2 is no longer supported by the Python core team.',
module='cryptography')
|
mit
|
Python
|
862f81d54624ea198d6351f5ea7c88b66bc02019
|
Make the Nick an argument to Client.py
|
JacobAMason/Boa
|
src/Client.py
|
src/Client.py
|
#!python
__author__ = 'JacobAMason'
import sys
from twisted.words.protocols import irc
from twisted.internet import protocol, reactor
import StringIO
class Bot(irc.IRCClient):
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def signedOn(self):
self.join(self.factory.channel)
print "Signed on as %s." % (self.nickname)
def joined(self, channel):
print "Joined %s." % (channel)
def privmsg(self, user, channel, message):
if not message.startswith(self.nickname):
return
else:
idx = message.find(' ')
message = message[idx+1:]
# create file-like string to capture output
codeOut = StringIO.StringIO()
codeErr = StringIO.StringIO()
# capture output and errors
sys.stdout = codeOut
sys.stderr = codeErr
errorText = ""
try:
exec message
except Exception, err:
errorText = str(err)
# restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
s = codeErr.getvalue()
if s:
self.msg(channel, "error: %s\n" % s)
if errorText:
self.msg(channel, "error: %s\n" % errorText)
s = codeOut.getvalue()
if s:
self.msg(channel, "%s" % s)
codeOut.close()
codeErr.close()
def dataReceived(self, bytes):
print str(bytes).rstrip()
# Make sure to up-call - otherwise all of the IRC logic is disabled!
return irc.IRCClient.dataReceived(self, bytes)
class BotFactory(protocol.ClientFactory):
protocol = Bot
def __init__(self, channel, nickname="Boa"):
self.channel = channel
self.nickname = nickname
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting..." % (reason)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason)
if __name__ == "__main__":
channel = sys.argv[1]
nickname = sys.argv[2]
reactor.connectTCP("coop.test.adtran.com", 6667,
BotFactory('#' + channel, nickname))
reactor.run()
|
#!python
__author__ = 'JacobAMason'
import sys
from twisted.words.protocols import irc
from twisted.internet import protocol, reactor
import StringIO
class Bot(irc.IRCClient):
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def signedOn(self):
self.join(self.factory.channel)
print "Signed on as %s." % (self.nickname)
def joined(self, channel):
print "Joined %s." % (channel)
def privmsg(self, user, channel, message):
if not message.startswith(self.nickname):
return
else:
idx = message.find(' ')
message = message[idx+1:]
# create file-like string to capture output
codeOut = StringIO.StringIO()
codeErr = StringIO.StringIO()
# capture output and errors
sys.stdout = codeOut
sys.stderr = codeErr
errorText = ""
try:
exec message
except Exception, err:
errorText = str(err)
# restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
s = codeErr.getvalue()
if s:
self.msg(channel, "error: %s\n" % s)
if errorText:
self.msg(channel, "error: %s\n" % errorText)
s = codeOut.getvalue()
if s:
self.msg(channel, "%s" % s)
codeOut.close()
codeErr.close()
def dataReceived(self, bytes):
print str(bytes).rstrip()
# Make sure to up-call - otherwise all of the IRC logic is disabled!
return irc.IRCClient.dataReceived(self, bytes)
class BotFactory(protocol.ClientFactory):
protocol = Bot
def __init__(self, channel, nickname="Boa"):
self.channel = channel
self.nickname = nickname
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting..." % (reason)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason)
if __name__ == "__main__":
channel = sys.argv[1]
reactor.connectTCP("coop.test.adtran.com", 6667, BotFactory('#' + channel))
reactor.run()
|
mit
|
Python
|
ecf71bd004d99b679936e07453f5a938e19f71dc
|
Add aiohttp as a execution requirement
|
google/megalista,google/megalista
|
megalist_dataflow/setup.py
|
megalist_dataflow/setup.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
name='megalist_dataflow',
version='0.1',
author='Alvaro Stivi',
author_email='[email protected]',
url='https://cse.googlesource.com/solutions/megalist',
install_requires=['googleads==20.0.0', 'google-api-python-client==1.7.9',
'bloom-filter==1.3', 'google-cloud-core==1.0.2',
'google-cloud-datastore==1.9.0, aiohttp==3.6.2'],
packages=setuptools.find_packages(),
)
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
name='megalist_dataflow',
version='0.1',
author='Alvaro Stivi',
author_email='[email protected]',
url='https://cse.googlesource.com/solutions/megalist',
install_requires=['googleads==20.0.0', 'google-api-python-client==1.7.9',
'bloom-filter==1.3', 'google-cloud-core==1.0.2',
'google-cloud-datastore==1.9.0'],
packages=setuptools.find_packages(),
)
|
apache-2.0
|
Python
|
8ffb2beea77897e3fa40691f35f2e089dbc5df9a
|
Add more documentation
|
Shade5/coala,impmihai/coala,Tanmay28/coala,svsn2117/coala,SambitAcharya/coala,d6e/coala,arjunsinghy96/coala,Balaji2198/coala,jayvdb/coala,SambitAcharya/coala,Nosferatul/coala,coala/coala,svsn2117/coala,scriptnull/coala,sophiavanvalkenburg/coala,yashtrivedi96/coala,rresol/coala,NalinG/coala,karansingh1559/coala,MariosPanag/coala,MariosPanag/coala,sophiavanvalkenburg/coala,abhiroyg/coala,mr-karan/coala,SanketDG/coala,stevemontana1980/coala,AdeshAtole/coala,nemaniarjun/coala,yashLadha/coala,scriptnull/coala,arush0311/coala,CruiseDevice/coala,djkonro/coala,shreyans800755/coala,d6e/coala,ayushin78/coala,vinc456/coala,arafsheikh/coala,refeed/coala,yashtrivedi96/coala,sudheesh001/coala,Tanmay28/coala,yland/coala,ManjiriBirajdar/coala,saurabhiiit/coala,JohnS-01/coala,SambitAcharya/coala,arjunsinghy96/coala,nemaniarjun/coala,Asalle/coala,tltuan/coala,arush0311/coala,stevemontana1980/coala,NalinG/coala,netman92/coala,tushar-rishav/coala,sils1297/coala,AdeshAtole/coala,AbdealiJK/coala,arafsheikh/coala,NalinG/coala,saurabhiiit/coala,aptrishu/coala,damngamerz/coala,jayvdb/coala,vinc456/coala,Tanmay28/coala,kartikeys98/coala,nemaniarjun/coala,sils1297/coala,MattAllmendinger/coala,JohnS-01/coala,Uran198/coala,Tanmay28/coala,ayushin78/coala,FeodorFitsner/coala,andreimacavei/coala,JohnS-01/coala,yland/coala,NalinG/coala,meetmangukiya/coala,arjunsinghy96/coala,scottbelden/coala,Tanmay28/coala,arush0311/coala,Uran198/coala,NiklasMM/coala,Uran198/coala,d6e/coala,lonewolf07/coala,tushar-rishav/coala,yashLadha/coala,SambitAcharya/coala,dagdaggo/coala,aptrishu/coala,refeed/coala,swatilodha/coala,Asnelchristian/coala,scriptnull/coala,tushar-rishav/coala,meetmangukiya/coala,coala-analyzer/coala,abhiroyg/coala,MattAllmendinger/coala,meetmangukiya/coala,Balaji2198/coala,RJ722/coala,NalinG/coala,scottbelden/coala,kartikeys98/coala,andreimacavei/coala,sagark123/coala,coala/coala,refeed/coala,djkonro/coala,SambitAcharya/coala,yland/coala,AbdealiJK/coala,damngamerz/coala,sudheesh001/coala,NalinG/coala,svsn2117/coala,dagdaggo/coala,Shade5/coala,MariosPanag/coala,rresol/coala,SambitAcharya/coala,coala-analyzer/coala,MattAllmendinger/coala,FeodorFitsner/coala,yashtrivedi96/coala,ManjiriBirajdar/coala,impmihai/coala,saurabhiiit/coala,scottbelden/coala,ayushin78/coala,rresol/coala,swatilodha/coala,tltuan/coala,shreyans800755/coala,kartikeys98/coala,NiklasMM/coala,abhiroyg/coala,sudheesh001/coala,NiklasMM/coala,mr-karan/coala,scriptnull/coala,sagark123/coala,SanketDG/coala,SanketDG/coala,swatilodha/coala,stevemontana1980/coala,Tanmay28/coala,karansingh1559/coala,karansingh1559/coala,Asalle/coala,rimacone/testing2,scriptnull/coala,tltuan/coala,Tanmay28/coala,Asnelchristian/coala,AbdealiJK/coala,aptrishu/coala,arafsheikh/coala,coala-analyzer/coala,Asalle/coala,Nosferatul/coala,andreimacavei/coala,coala/coala,Shade5/coala,Asnelchristian/coala,CruiseDevice/coala,incorrectusername/coala,incorrectusername/coala,sophiavanvalkenburg/coala,djkonro/coala,NalinG/coala,FeodorFitsner/coala,sils1297/coala,CruiseDevice/coala,Tanmay28/coala,RJ722/coala,ManjiriBirajdar/coala,RJ722/coala,scriptnull/coala,sagark123/coala,jayvdb/coala,netman92/coala,AdeshAtole/coala,SambitAcharya/coala,rimacone/testing2,mr-karan/coala,vinc456/coala,damngamerz/coala,shreyans800755/coala,lonewolf07/coala,lonewolf07/coala,netman92/coala,Balaji2198/coala,Nosferatul/coala,dagdaggo/coala,rimacone/testing2,yashLadha/coala,impmihai/coala,incorrectusername/coala,scriptnull/coala
|
coalib/bearlib/languages/LanguageDefinition.py
|
coalib/bearlib/languages/LanguageDefinition.py
|
import os
from coalib.bearlib.abstractions.SectionCreatable import SectionCreatable
from coalib.misc.StringConstants import StringConstants
from coalib.parsing.ConfParser import ConfParser
class LanguageDefinition(SectionCreatable):
def __init__(self, language_family: str, language: str):
"""
Creates a new LanguageDefinition object from file.
A Language Definition holds constants which may help parsing the
language. If you want to write a bear you'll probably want to use those
definitions to keep your bear independent of the semantics of each
language.
:param language_family: The language family. E.g. C for
C++ and C and C# and so on.
:param language: The actual language (e.g. C++).
:raises ConfParser.FileNotFoundError: If no definition is available
for the given family.
:raises KeyError: If no definition is available
for the given language.
"""
SectionCreatable.__init__(self)
self.language = language.lower()
filename = os.path.join(StringConstants.language_definitions,
language_family.lower() + ".coalang")
self.lang_dict = ConfParser().parse(filename)[language.lower()]
def __getitem__(self, item):
return self.lang_dict[item]
|
import os
from coalib.bearlib.abstractions.SectionCreatable import SectionCreatable
from coalib.misc.StringConstants import StringConstants
from coalib.parsing.ConfParser import ConfParser
class LanguageDefinition(SectionCreatable):
def __init__(self, language_family: str, language: str):
"""
Creates a new LanguageDefinition object from file.
:param language_family: The language family. E.g. C for
C++ and C and C# and so on.
:param language: The actual language (e.g. C++).
:raises ConfParser.FileNotFoundError: If no definition is available
for the given family.
:raises KeyError: If no definition is available
for the given language.
"""
SectionCreatable.__init__(self)
self.language = language.lower()
filename = os.path.join(StringConstants.language_definitions,
language_family.lower() + ".coalang")
self.lang_dict = ConfParser().parse(filename)[language.lower()]
def __getitem__(self, item):
return self.lang_dict[item]
|
agpl-3.0
|
Python
|
74272b9916c29bb9e97d4761801ee3730b053b87
|
comment fix
|
heurezjusz/Athenet,heurezjusz/Athena
|
athenet/sparsifying/utils/numlike.py
|
athenet/sparsifying/utils/numlike.py
|
"""Template class with arithmetic operations that can be passed through neural
network.
All classes that are being used for derest should inherit from this class."""
class Numlike(object):
"""Template class with arithmetic operations that can be passed through
neural network.
All classes that are being used for derest should inherit from this
class."""
def __init__(self):
"""Create numlike."""
pass
def __getitem__(self, at):
"""Returns specified slice of numlike.
:at: Coordinates / slice to be taken.
"""
raise NotImplementedError
def __setitem__(self, at, other):
"""Just like Theano set_subtensor function, but as a operator.
:at: Coordinates / slice to be set.
:other: Data to be put at 'at'.
"""
raise NotImplementedError
def shape(self):
"""Returns shape of numlike."""
raise NotImplementedError
def __add__(self, other):
"""Returns sum of two numlikes.
:other: numlike.
"""
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __sub__(self, other):
"""Returns difference between two numlikes.
:other: numlike to be subtracted.
"""
raise NotImplementedError
def __rsub__(self, other):
"""Returns diffeerence between number and numlike.
:other: A number that self will be subtracted from.
"""
raise NotImplementedError
def __mul__(self, other):
"""Returns product of two numlikes.
:other: numlike to be multiplied.
"""
raise NotImplementedError
def __rmul__(self, other):
raise NotImplementedError
def __div__(self, other):
"""Returns quotient of self and other."""
raise NotImplementedError
def reciprocal(self):
"""Returns reciprocal of the numlike."""
raise NotImplementedError
def neg(self):
"""Returns (-1) * numlike."""
raise NotImplementedError
def exp(self):
"""Returns numlike representing the exponential of the numlike."""
raise NotImplementedError
def square(self):
"""Returns square of the numlike."""
raise NotImplementedError
def power(self, exponent):
"""For numlike N, returns N^exponent.
:exponent: Number to be passed as exponent to N^exponent."""
raise NotImplementedError
def dot(self, other):
"""Dot product of numlike vector and a number array (other)."""
raise NotImplementedError
|
"""Template class with arithmetic operations that can be passed through neural
network.
All classes that are being used for derest should inherit from this class."""
class Numlike(object):
"""Template class with arithmetic operations that can be passed through
neural network.
All classes that are being used for derest should inherit from this
class."""
def __init__(self):
"""Create numlike."""
pass
def __getitem__(self, at):
"""Returns specified slice of numlike
at: Coordinates / slice to be taken."""
raise NotImplementedError
def __setitem__(self, at, other):
"""Just like Theano set_subtensor function, but as a operator.
at: Coordinates / slice to be set.
other: Data to be put at 'at'"""
raise NotImplementedError
def shape(self):
"""Returns shape of numlike."""
raise NotImplementedError
def __add__(self, other):
"""Returns sum of two numlikes.
other: numlike."""
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __sub__(self, other):
"""Returns difference between two numlikes.
other: numlike to be subtracted."""
raise NotImplementedError
def __rsub__(self, other):
"""Returns diffeerence between number and numlike.
other: A number that self will be subtracted from."""
raise NotImplementedError
def __mul__(self, other):
"""Returns product of two numlikes.
other: numlike to be multiplied."""
raise NotImplementedError
def __rmul__(self, other):
raise NotImplementedError
def __div__(self, other):
"""Returns quotient of self and other."""
raise NotImplementedError
def reciprocal(self):
"""Returns reciprocal of the numlike."""
raise NotImplementedError
def neg(self):
"""Returns (-1) * numlike"""
raise NotImplementedError
def exp(self):
"""Returns numlike representing the exponential of the numlike."""
raise NotImplementedError
def square(self):
"""Returns square of the numlike."""
raise NotImplementedError
def power(self, exponent):
"""For numlike N, returns N^exponent.
exponent: Number to be passed as exponent to N^exponent."""
raise NotImplementedError
def dot(self, other):
"""Dot product of numlike vector and a number array (other)"""
raise NotImplementedError
|
bsd-2-clause
|
Python
|
9ba255886ca5315be1b95ccac28d496e3941f155
|
Bump alpha version
|
prkumar/uplink
|
uplink/__about__.py
|
uplink/__about__.py
|
"""
This module is the single source of truth for any package metadata
that is used both in distribution (i.e., setup.py) and within the
codebase.
"""
__version__ = "0.8.0a1"
|
"""
This module is the single source of truth for any package metadata
that is used both in distribution (i.e., setup.py) and within the
codebase.
"""
__version__ = "0.8.0a0"
|
mit
|
Python
|
ec665be1811b458f849cbed09ef3d3c61f9e4533
|
Change order of environment setup
|
metabolite-atlas/metatlas,metabolite-atlas/metatlas,metabolite-atlas/metatlas
|
metatlas/tools/notebook.py
|
metatlas/tools/notebook.py
|
"""Jupyter notebook helper functions"""
import logging
import os
import shutil
import sys
from pathlib import Path
import pandas as pd
from IPython.core.display import display, HTML
from metatlas.tools.logging import activate_logging
logger = logging.getLogger(__name__)
def configure_environment(log_level):
"""
Sets environment variables and configures logging
inputs:
log_level: one of 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
"""
activate_logging(console_level=log_level)
os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
def validate_kernel():
"""
Raise error if problem with kernel
When on NERSC, this will install the correct kernel if needed
"""
allowed_exe = [
"/global/common/software/m2650/metatlas-targeted-20210521/bin/python",
]
error_msg = "Invalid kernel setting in Jupyter Notebook."
on_nersc = "METATLAS_LOCAL" not in os.environ
if on_nersc and sys.executable not in allowed_exe:
install_kernel()
logger.critical('Please check that the kernel is set to "Metatlas Targeted".')
raise ValueError(error_msg)
try:
# pylint: disable=import-outside-toplevel,unused-import
import dataset # noqa: F401
except ModuleNotFoundError as module_error:
logger.critical(
'Could not find dataset module. Please check that the kernel is set to "Metatlas Targeted".'
)
raise ModuleNotFoundError from module_error
def install_kernel():
"""
Copies kernel.json from repo to active location under home directory.
Only for use on NERC!
"""
logger.info('Installing kernel.json for "Metatlas Targeted".')
repo_path = Path(__file__).resolve().parent.parent.parent
source = repo_path / "notebooks" / "kernels" / "metatlas-targeted.kernel.json"
dest_dir = Path.home() / ".local" / "share" / "jupyter" / "kernels" / "metatlas-targeted"
os.makedirs(dest_dir, exist_ok=True)
shutil.copyfile(source, dest_dir / "kernel.json")
logger.info('Reload the page and change kernel to "Metatlas Targeted".')
def configure_pandas_display(max_rows=5000, max_columns=500, max_colwidth=100):
"""Set pandas display options"""
pd.set_option("display.max_rows", max_rows)
pd.set_option("display.max_columns", max_columns)
pd.set_option("display.max_colwidth", max_colwidth)
def configure_notebook_display():
"""Configure output from Jupyter"""
# set notebook to have minimal side margins
display(HTML("<style>.container { width:100% !important; }</style>"))
def setup(log_level):
"""High level function to prepare the metatlas notebook"""
configure_environment(log_level)
validate_kernel()
configure_notebook_display()
configure_pandas_display()
|
"""Jupyter notebook helper functions"""
import logging
import os
import shutil
import sys
from pathlib import Path
import pandas as pd
from IPython.core.display import display, HTML
from metatlas.tools.logging import activate_logging
logger = logging.getLogger(__name__)
def configure_environment(log_level):
"""
Sets environment variables and configures logging
inputs:
log_level: one of 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
"""
activate_logging(console_level=log_level)
os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
def validate_kernel():
"""
Raise error if problem with kernel
When on NERSC, this will install the correct kernel if needed
"""
allowed_exe = [
"/global/common/software/m2650/metatlas-targeted-20210521/bin/python",
]
error_msg = "Invalid kernel setting in Jupyter Notebook."
on_nersc = "METATLAS_LOCAL" not in os.environ
if on_nersc and sys.executable not in allowed_exe:
install_kernel()
logger.critical('Please check that the kernel is set to "Metatlas Targeted".')
raise ValueError(error_msg)
try:
# pylint: disable=import-outside-toplevel,unused-import
import dataset # noqa: F401
except ModuleNotFoundError as module_error:
logger.critical(
'Could not find dataset module. Please check that the kernel is set to "Metatlas Targeted".'
)
raise ModuleNotFoundError from module_error
def install_kernel():
"""
Copies kernel.json from repo to active location under home directory.
Only for use on NERC!
"""
logger.info('Installing kernel.json for "Metatlas Targeted".')
repo_path = Path(__file__).resolve().parent.parent.parent
source = repo_path / "notebooks" / "kernels" / "metatlas-targeted.kernel.json"
dest_dir = Path.home() / ".local" / "share" / "jupyter" / "kernels" / "metatlas-targeted"
os.makedirs(dest_dir, exist_ok=True)
shutil.copyfile(source, dest_dir / "kernel.json")
logger.info('Reload the page and change kernel to "Metatlas Targeted".')
def configure_pandas_display(max_rows=5000, max_columns=500, max_colwidth=100):
"""Set pandas display options"""
pd.set_option("display.max_rows", max_rows)
pd.set_option("display.max_columns", max_columns)
pd.set_option("display.max_colwidth", max_colwidth)
def configure_notebook_display():
"""Configure output from Jupyter"""
# set notebook to have minimal side margins
display(HTML("<style>.container { width:100% !important; }</style>"))
def setup(log_level):
"""High level function to prepare the metatlas notebook"""
validate_kernel()
configure_environment(log_level)
configure_notebook_display()
configure_pandas_display()
|
bsd-3-clause
|
Python
|
646416efa7378b645af56031c06e7544cb72627f
|
Delete comment
|
fushime2/recommenblr
|
user_recommender.py
|
user_recommender.py
|
from tumblr_manager import TumblrManager, TumblrScraper
class UserRecommender(object):
user_counter = {}
def __init__(self, consumer_key=None, consumer_secret=None, oauth_token=None, oauth_token_secret=None):
self.tm = TumblrManager(consumer_key, consumer_secret, oauth_token, oauth_token_secret)
self.ts = TumblrScraper(consumer_key, consumer_secret, oauth_token, oauth_token_secret)
def recommend(self, n=20):
"""
:param int n: number of users
:return: a list including n users
"""
self.set_counter()
cnt = sorted(self.user_counter.items(), key=lambda x: x[1], reverse=True)
users = [t[0] for t in cnt]
return users[:n]
def set_counter(self):
url_list = self.tm.fetch_urls()
all_users = self.ts.fetch_users_from_url(url_list)
for user in all_users:
self.add_user(user)
def add_user(self, user):
if user not in self.user_counter:
self.user_counter[user] = 1
else:
self.user_counter[user] += 1
|
from tumblr_manager import TumblrManager, TumblrScraper
class UserRecommender(object):
user_counter = {}
def __init__(self, consumer_key=None, consumer_secret=None, oauth_token=None, oauth_token_secret=None):
self.tm = TumblrManager(consumer_key, consumer_secret, oauth_token, oauth_token_secret)
self.ts = TumblrScraper(consumer_key, consumer_secret, oauth_token, oauth_token_secret)
def recommend(self, n=20):
"""
:param int n: number of users
:return: a list including n users
"""
self.set_counter()
cnt = sorted(self.user_counter.items(), key=lambda x: x[1], reverse=True)
users = [t[0] for t in cnt]
return users[:n]
def set_counter(self):
url_list = self.tm.fetch_urls()
all_users = self.ts.fetch_users_from_url(url_list)
# non_followed_users = list(filter(lambda x: not self.tm.is_following(x), all_users))
for user in all_users:
self.add_user(user)
def add_user(self, user):
if user not in self.user_counter:
self.user_counter[user] = 1
else:
self.user_counter[user] += 1
|
mit
|
Python
|
906d765e387367654a02a36e9b5ba7aca4480ed6
|
Check if zip only contains one file
|
manly-man/moodle-destroyer-tools,manly-man/moodle-destroyer-tools
|
util/zipwrangler.py
|
util/zipwrangler.py
|
from pathlib import Path
from zipfile import ZipFile
from tempfile import TemporaryDirectory
import shutil
ignore = ['__MACOSX', '.DS_Store']
def get_cleaned_contents(zipfile, ignore_list=ignore, verbose=False):
contents = []
for info in zipfile.infolist():
if not any(ignored in info.filename for ignored in ignore_list):
contents.append(info)
elif verbose:
print(f'ignored: {info.filename}')
return contents
def clean_unzip_with_temp_dir(zipfilename: Path, target=None, ignore_list=ignore, overwrite=False, remove_zip=False):
zipfile = ZipFile(str(zipfilename))
if target is None:
target = Path.cwd() / zipfilename.stem
else:
target = target / zipfilename.stem
try:
target.mkdir(exist_ok=overwrite)
except FileExistsError:
print(f'file exists, not extracting {zipfilename.name} to {target}')
return
contents = get_cleaned_contents(zipfile, ignore_list)
with TemporaryDirectory(dir=Path.cwd().absolute()) as tempdir:
temp = Path(tempdir)
for file in contents:
zipfile.extract(file, path=tempdir)
contents = list(temp.iterdir())
while len(contents) == 1:
content = contents.pop()
if content.is_dir():
contents = list(content.iterdir())
else:
contents = [content]
break
for i in contents:
shutil.move(str(i), str(target))
if remove_zip:
zipfilename.unlink()
def main():
for zipfilename in Path.cwd().glob('*.zip'):
clean_unzip_with_temp_dir(zipfilename)
if __name__ == '__main__':
main()
|
from pathlib import Path
from zipfile import ZipFile
from tempfile import TemporaryDirectory
import shutil
ignore = ['__MACOSX', '.DS_Store']
def get_cleaned_contents(zipfile, ignore_list=ignore, verbose=False):
contents = []
for info in zipfile.infolist():
if not any(ignored in info.filename for ignored in ignore_list):
contents.append(info)
elif verbose:
print(f'ignored: {info.filename}')
return contents
def clean_unzip_with_temp_dir(zipfilename: Path, target=None, ignore_list=ignore, overwrite=False, remove_zip=False):
zipfile = ZipFile(str(zipfilename))
if target is None:
target = Path.cwd() / zipfilename.stem
else:
target = target / zipfilename.stem
try:
target.mkdir(exist_ok=overwrite)
except FileExistsError:
print(f'file exists, not extracting {zipfilename.name} to {target}')
return
contents = get_cleaned_contents(zipfile, ignore_list)
with TemporaryDirectory(dir=Path.cwd().absolute()) as tempdir:
temp = Path(tempdir)
for file in contents:
zipfile.extract(file, path=tempdir)
contents = list(temp.iterdir())
while len(contents) == 1:
content = contents.pop()
contents = list(content.iterdir())
for i in contents:
shutil.move(str(i), str(target))
if remove_zip:
zipfilename.unlink()
def main():
for zipfilename in Path.cwd().glob('*.zip'):
clean_unzip_with_temp_dir(zipfilename)
if __name__ == '__main__':
main()
|
mit
|
Python
|
2d09314ab58bb766372dc6e263fb17428b1fd3cd
|
Fix check for existing pools.
|
danielballan/photomosaic
|
doc/pool_scripts/cats.py
|
doc/pool_scripts/cats.py
|
import os
import photomosaic.flickr
import photomosaic as pm
if not os.path.isfile(os.path.expanduser('~/pools/cats/pool.json')):
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
|
import os
import photomosaic.flickr
import photomosaic as pm
if not os.path.isfile('~/pools/cats/pool.json'):
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
|
bsd-3-clause
|
Python
|
e83be594507c994069d20d5f2cd86c52905a52a6
|
Fix personal brain damage.
|
mk23/snmpy,mk23/snmpy
|
lib/plugin/disk_utilization.py
|
lib/plugin/disk_utilization.py
|
import datetime
import logging
import os
import snmpy.plugin
import subprocess
class disk_utilization(snmpy.plugin.TablePlugin):
def __init__(self, conf):
conf['table'] = [
{'dev': 'string'},
{'wait': 'integer'},
{'util': 'integer'},
]
snmpy.plugin.TablePlugin.__init__(self, conf)
def update(self):
os.environ['LC_TIME'] = 'POSIX'
disk = {}
date = datetime.datetime.now() - datetime.timedelta(minutes=20)
comm = [self.conf.get('sar_command', '/usr/bin/sar'), '-d', '-f', self.conf.get('sysstat_log', '/var/log/sysstat/sa%02d') % date.day, '-s', date.strftime('%H:%M:00')]
logging.debug('running sar command: %s', ' '.join(comm))
for line in subprocess.check_output(comm, stderr=open(os.devnull, 'w')).split('\n'):
logging.debug('line: %s', line)
part = line.split()
if part and part[0] != 'Average:' and part[1].startswith('dev'):
disk[part[-9]] = [int(float(part[-3])), int(float(part[-1]))]
self.clear()
for line in open('/proc/diskstats'):
name = 'dev{}-{}'.format(*line.split()[0:2])
self.append([line.split()[2]] + disk.get(name, [0, 0]))
|
import datetime
import logging
import os
import snmpy.plugin
import subprocess
class disk_utilization(snmpy.plugin.TablePlugin):
def __init__(self, conf):
conf['table'] = [
{'dev': 'string'},
{'wait': 'integer'},
{'util': 'integer'},
]
snmpy.plugin.TablePlugin.__init__(self, conf);
def update(self):
os.environ['LC_TIME'] = 'POSIX'
disk = {}
date = datetime.datetime.now() - datetime.timedelta(minutes=20)
comm = [self.conf.get('sar_command', '/usr/bin/sar'), '-d', '-f', self.conf.get('sysstat_log', '/var/log/sysstat/sa%02d') % date.day, '-s', date.strftime('%H:%M:00')]
logging.debug('running sar command: %s', ' '.join(comm))
for line in subprocess.check_output(comm, stderr=open(os.devnull, 'w')).split('\n'):
logging.debug('line: %s', line)
part = line.split()
if part and part[0] != 'Average:' and part[1].startswith('dev'):
disk[part[-9]] = [int(float(part[-3])), int(float(part[-1]))]
self.clear()
for line in open('/proc/diskstats'):
name = 'dev{}-{}'.format(*line.split()[0:2])
self.append([line.split()[2]] + disk.get(name, [0, 0]))
|
mit
|
Python
|
f4ace89a0ee029a276f5dba95b54731a15883c4f
|
hide warning
|
nimbis/django-shop,khchine5/django-shop,jrief/django-shop,awesto/django-shop,awesto/django-shop,awesto/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop,nimbis/django-shop,nimbis/django-shop,divio/django-shop,jrief/django-shop,khchine5/django-shop,jrief/django-shop
|
example/myshop/admin/__init__.py
|
example/myshop/admin/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from shop.admin.customer import CustomerProxy, CustomerAdmin
from shop.models.order import OrderModel
__all__ = ['OrderModel', 'commodity']
# models defined by the myshop instance itself
if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity':
from shop.admin import commodity
elif settings.SHOP_TUTORIAL == 'smartcard':
from . import manufacturer
from .smartcard import smartcard, order
elif settings.SHOP_TUTORIAL == 'i18n_smartcard':
from . import manufacturer
from . import i18n_smartcard
from .smartcard import order
elif settings.SHOP_TUTORIAL == 'polymorphic':
from . import manufacturer
from .polymorphic import product, order
admin.site.register(CustomerProxy, CustomerAdmin)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from shop.admin.customer import CustomerProxy, CustomerAdmin
from shop.models.order import OrderModel
# models defined by the myshop instance itself
if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity':
from shop.admin import commodity
elif settings.SHOP_TUTORIAL == 'smartcard':
from . import manufacturer
from .smartcard import smartcard, order
elif settings.SHOP_TUTORIAL == 'i18n_smartcard':
from . import manufacturer
from . import i18n_smartcard
from .smartcard import order
elif settings.SHOP_TUTORIAL == 'polymorphic':
from . import manufacturer
from .polymorphic import product, order
admin.site.register(CustomerProxy, CustomerAdmin)
|
bsd-3-clause
|
Python
|
4ceeed0eceff9d75b0bc3047c9a8e2fcb6877e31
|
Fix tasks reading of different course_id
|
marcore/edx-platform,marcore/edx-platform,dcosentino/edx-platform,marcore/edx-platform,dcosentino/edx-platform,dcosentino/edx-platform,dcosentino/edx-platform,dcosentino/edx-platform,marcore/edx-platform
|
lms/djangoapps/ecoapi/tasks.py
|
lms/djangoapps/ecoapi/tasks.py
|
from celery.task import task
from instructor.offline_gradecalc import student_grades , offline_grade_calculation
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import SlashSeparatedCourseKey
#TODO: add a better task management to prevent concurrent task execution with some course_id
@task()
def offline_calc(course_id):
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
offline_grade_calculation(course_key)
|
from celery.task import task
from instructor.offline_gradecalc import student_grades , offline_grade_calculation
#TODO: add a better task management to prevent concurrent task execution with some course_id
@task()
def offline_calc(course_id):
offline_grade_calculation(course_id)
|
agpl-3.0
|
Python
|
6d84f7eb25352c50e40950d0585c33bd1193649e
|
fix bug in init
|
onelab-eu/sfa,yippeecw/sfa,yippeecw/sfa,yippeecw/sfa,onelab-eu/sfa,onelab-eu/sfa
|
sfa/util/osxrn.py
|
sfa/util/osxrn.py
|
import re
from sfa.util.xrn import Xrn
from sfa.util.config import Config
class OSXrn(Xrn):
def __init__(self, name=None, type=None, **kwds):
config = Config()
if name is not None:
self.type = type
self.hrn = config.SFA_INTERFACE_HRN + "." + name
self.hrn_to_urn()
else:
Xrn.__init__(self, **kwds)
self.name = self.get_name()
def get_name(self):
self._normalize()
leaf = self.leaf
sliver_id_parts = leaf.split(':')
name = sliver_id_parts[0]
name = re.sub('[^a-zA-Z0-9_]', '', name)
return name
|
import re
from sfa.util.xrn import Xrn
from sfa.util.config import Config
class OSXrn(Xrn):
def __init__(self, name=None, type=None, *args, **kwds):
config = Config()
if name is not None:
self.type = type
self.hrn = config.SFA_INTERFACE_HRN + "." + name
self.hrn_to_urn()
self.name = self.get_name()
def get_name(self):
self._normalize()
leaf = self.leaf
sliver_id_parts = leaf.split(':')
name = sliver_id_parts[0]
name = re.sub('[^a-zA-Z0-9_]', '', name)
return name
|
mit
|
Python
|
3aba768c7a3c11f2941db36d0292cd5810433596
|
fix python2.7.9
|
nkrode/RedisLive,merlian/RedisLive,udomsak/RedisLive,udomsak/RedisLive,YongMan/RedisLive,jiejieling/RdsMonitor,fengshao0907/RedisLive,udomsak/RedisLive,jiejieling/RdsMonitor,fengshao0907/RedisLive,fengshao0907/RedisLive,nkrode/RedisLive,merlian/RedisLive,nkrode/RedisLive,YongMan/RedisLive,YongMan/RedisLive,merlian/RedisLive,jiejieling/RdsMonitor
|
src/api/util/timeutils.py
|
src/api/util/timeutils.py
|
import datetime
def total_seconds(td):
# Keep backward compatibility with Python 2.6 which doesn't have
# this method
if hasattr(td, 'total_seconds'):
return td.total_seconds()
else:
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def convert_to_epoch(timestamp):
if (type(timestamp) is datetime.date):
timestamp = datetime.datetime.fromordinal(timestamp.toordinal())
timestamp = timestamp.replace(tzinfo=None)
diff = (timestamp - datetime.datetime(1970, 1, 1))
seconds = int(total_seconds(diff))
return seconds
# Original fix for Py2.6: https://github.com/mozilla/mozdownload/issues/73
def total_seconds(dt):
# Keep backward compatibility with Python 2.6 which doesn't have
# this method
if hasattr(datetime.datetime, 'total_seconds'):
return dt.total_seconds()
else:
return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6
|
from datetime import datetime
def total_seconds(td):
# Keep backward compatibility with Python 2.6 which doesn't have
# this method
if hasattr(td, 'total_seconds'):
return td.total_seconds()
else:
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def convert_to_epoch(timestamp):
diff = (timestamp - datetime(1970, 1, 1))
seconds = int(total_seconds(diff))
return seconds
# Original fix for Py2.6: https://github.com/mozilla/mozdownload/issues/73
def total_seconds(dt):
# Keep backward compatibility with Python 2.6 which doesn't have
# this method
if hasattr(datetime, 'total_seconds'):
return dt.total_seconds()
else:
return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6
|
mit
|
Python
|
f98b30583fb9fca4674ad93afd242ffae7ac9f36
|
Fix tests
|
recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,explosion/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,recognai/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu
|
spacy/tests/conftest.py
|
spacy/tests/conftest.py
|
# coding: utf-8
from __future__ import unicode_literals
from ..en import English
from ..de import German
from ..es import Spanish
from ..it import Italian
from ..fr import French
from ..pt import Portuguese
from ..nl import Dutch
from ..sv import Swedish
from ..hu import Hungarian
from ..fi import Finnish
from ..bn import Bengali
from ..tokens import Doc
from ..strings import StringStore
from ..lemmatizer import Lemmatizer
from ..attrs import ORTH, TAG, HEAD, DEP
from io import StringIO, BytesIO
from pathlib import Path
import os
import pytest
LANGUAGES = [English, German, Spanish, Italian, French, Portuguese, Dutch,
Swedish, Hungarian, Finnish, Bengali]
@pytest.fixture(params=LANGUAGES)
def tokenizer(request):
lang = request.param
return lang.Defaults.create_tokenizer()
@pytest.fixture
def en_tokenizer():
return English.Defaults.create_tokenizer()
@pytest.fixture
def en_vocab():
return English.Defaults.create_vocab()
@pytest.fixture
def en_parser():
return English.Defaults.create_parser()
@pytest.fixture
def de_tokenizer():
return German.Defaults.create_tokenizer()
@pytest.fixture(scope='module')
def fr_tokenizer():
return French.Defaults.create_tokenizer()
@pytest.fixture
def hu_tokenizer():
return Hungarian.Defaults.create_tokenizer()
@pytest.fixture
def fi_tokenizer():
return Finnish.Defaults.create_tokenizer()
@pytest.fixture
def sv_tokenizer():
return Swedish.Defaults.create_tokenizer()
@pytest.fixture
def bn_tokenizer():
return Bengali.Defaults.create_tokenizer()
@pytest.fixture
def stringstore():
return StringStore()
@pytest.fixture
def en_entityrecognizer():
return English.Defaults.create_entity()
@pytest.fixture
def lemmatizer():
return English.Defaults.ceate_lemmatizer()
@pytest.fixture
def text_file():
return StringIO()
@pytest.fixture
def text_file_b():
return BytesIO()
# only used for tests that require loading the models
# in all other cases, use specific instances
@pytest.fixture(scope="session")
def EN():
return English()
@pytest.fixture(scope="session")
def DE():
return German()
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
# coding: utf-8
from __future__ import unicode_literals
from ..en import English
from ..de import German
from ..es import Spanish
from ..it import Italian
from ..fr import French
from ..pt import Portuguese
from ..nl import Dutch
from ..sv import Swedish
from ..hu import Hungarian
from ..fi import Finnish
from ..bn import Bengali
from ..tokens import Doc
from ..strings import StringStore
from ..lemmatizer import Lemmatizer
from ..attrs import ORTH, TAG, HEAD, DEP
from io import StringIO, BytesIO
from pathlib import Path
import os
import pytest
LANGUAGES = [English, German, Spanish, Italian, French, Portuguese, Dutch,
Swedish, Hungarian, Finnish, Bengali]
@pytest.fixture(params=LANGUAGES)
def tokenizer(request):
lang = request.param
return lang.Defaults.create_tokenizer()
@pytest.fixture
def en_tokenizer():
return English.Defaults.create_tokenizer()
@pytest.fixture
def en_vocab():
return English.Defaults.create_vocab()
@pytest.fixture
def en_parser():
return English.Defaults.create_parser()
@pytest.fixture
def de_tokenizer():
return German.Defaults.create_tokenizer()
@pytest.fixture(scope='module')
def fr_tokenizer():
return French.Defaults.create_tokenizer()
@pytest.fixture
def hu_tokenizer():
return Hungarian.Defaults.create_tokenizer()
@pytest.fixture
def fi_tokenizer():
return Finnish.Defaults.create_tokenizer()
@pytest.fixture
def sv_tokenizer():
return Swedish.Defaults.create_tokenizer()
@pytest.fixture
def bn_tokenizer():
return Bengali.Defaults.create_tokenizer()
@pytest.fixture
def stringstore():
return StringStore()
@pytest.fixture
def en_entityrecognizer():
return English.Defaults.create_entity()
@pytest.fixture
def lemmatizer(path):
return English.Defaults.ceate_lemmatizer()
@pytest.fixture
def text_file():
return StringIO()
@pytest.fixture
def text_file_b():
return BytesIO()
# only used for tests that require loading the models
# in all other cases, use specific instances
@pytest.fixture(scope="session")
def EN():
return English()
@pytest.fixture(scope="session")
def DE():
return German()
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
mit
|
Python
|
d6b69f7d5868597426f7718165d4933af72e154d
|
Fix typo in command-line
|
nafraf/spreads,miloh/spreads,miloh/spreads,adongy/spreads,nafraf/spreads,gareth8118/spreads,DIYBookScanner/spreads,adongy/spreads,DIYBookScanner/spreads,gareth8118/spreads,DIYBookScanner/spreads,gareth8118/spreads,miloh/spreads,nafraf/spreads,adongy/spreads
|
spreadsplug/pdfbeads.py
|
spreadsplug/pdfbeads.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Johannes Baiter <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, unicode_literals
import logging
import os
import subprocess
import time
from spreads.plugin import HookPlugin, OutputHookMixin
from spreads.util import MissingDependencyException, find_in_path
if not find_in_path('pdfbeads'):
raise MissingDependencyException("Could not find executable `pdfbeads` in"
" $PATH. Please install the appropriate"
" package(s)!")
logger = logging.getLogger('spreadsplug.pdfbeads')
class PDFBeadsPlugin(HookPlugin, OutputHookMixin):
__name__ = 'pdfbeads'
def output(self, path):
logger.info("Assembling PDF.")
path = path.absolute()
img_dir = path / 'data' / 'done'
pdf_file = path / 'data' / 'out' / "{0}.pdf".format(path.name)
img_files = [unicode(x.name) for x in sorted(img_dir.glob('*.tif'))]
cmd = ["pdfbeads", "-d"] + img_files + ["-o", unicode(pdf_file)]
logger.debug("Running " + " ".join(cmd))
# NOTE: pdfbeads only finds *html files for the text layer in the
# working directory...
os.chdir(unicode(img_dir))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
last_count = 0
while proc.poll() is None:
current_count = sum(1 for x in img_dir.glob('*.jbig2'))
if current_count > last_count:
last_count = current_count
self.on_progressed.send(
self, progress=float(current_count)/len(img_files))
time.sleep(.1)
logger.debug("Output:\n{0}".format(proc.stdout.read()))
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Johannes Baiter <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, unicode_literals
import logging
import os
import subprocess
import time
from spreads.plugin import HookPlugin, OutputHookMixin
from spreads.util import MissingDependencyException, find_in_path
if not find_in_path('pdfbeads'):
raise MissingDependencyException("Could not find executable `pdfbeads` in"
" $PATH. Please install the appropriate"
" package(s)!")
logger = logging.getLogger('spreadsplug.pdfbeads')
class PDFBeadsPlugin(HookPlugin, OutputHookMixin):
__name__ = 'pdfbeads'
def output(self, path):
logger.info("Assembling PDF.")
path = path.absolute()
img_dir = path / 'data' / 'done'
pdf_file = path / 'data' / ' out' / "{0}.pdf".format(path.name)
img_files = [unicode(x.name) for x in sorted(img_dir.glob('*.tif'))]
cmd = ["pdfbeads", "-d"] + img_files + ["-o", unicode(pdf_file)]
logger.debug("Running " + " ".join(cmd))
# NOTE: pdfbeads only finds *html files for the text layer in the
# working directory...
os.chdir(unicode(img_dir))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
last_count = 0
while proc.poll() is None:
current_count = sum(1 for x in img_dir.glob('*.jbig2'))
if current_count > last_count:
last_count = current_count
self.on_progressed.send(
self, progress=float(current_count)/len(img_files))
time.sleep(.1)
logger.debug("Output:\n{0}".format(proc.stdout.read()))
|
agpl-3.0
|
Python
|
20cf0f2b6647b7a03fb5a9808f9d854975feb651
|
add shebang line to demo.py
|
jomag/prettytask
|
demo.py
|
demo.py
|
#!/usr/bin/env python3
from prettytask import Task, TaskGroup, Error, prompt
def main():
with Task("A quick task"):
pass
with Task("A task with a custom success message") as task:
task.ok("that went well!")
with Task("A task that fails") as task:
raise Error
with Task("A task that fails with a custom error"):
raise Error("crash and burn...")
try:
with Task("A task that fails with some other exception"):
x = 1 / 0
except ZeroDivisionError:
print(" ... the exception was reraised and caught as expected ...")
with TaskGroup("This marks the start of a set of tasks"):
with Task("Here's one"):
pass
with Task("Another one that fails"):
raise Error
with Task("Finally a third one") as task:
task.ok("all done!")
x = prompt("What is your name?", type=str, stripped=True, default="Foo")
print("Hello, {} ({})".format(x, type(x)))
y = prompt("What is your age?", type=int, default=42, retries=3)
print("Got it: {} years ({})".format(y, type(y)))
z = prompt("What is your favourite color?", choices=["red", "green", "blue"], default="green")
print("Color: {} ({})".format(z, type(z)))
w = prompt("Are we done?", type=bool, default=True)
print("Done? {} ({})".format(w, type(w)))
if __name__ == "__main__":
main()
|
from prettytask import Task, TaskGroup, Error, prompt
def main():
with Task("A quick task"):
pass
with Task("A task with a custom success message") as task:
task.ok("that went well!")
with Task("A task that fails") as task:
raise Error
with Task("A task that fails with a custom error"):
raise Error("crash and burn...")
try:
with Task("A task that fails with some other exception"):
x = 1 / 0
except ZeroDivisionError:
print(" ... the exception was reraised and caught as expected ...")
with TaskGroup("This marks the start of a set of tasks"):
with Task("Here's one"):
pass
with Task("Another one that fails"):
raise Error
with Task("Finally a third one") as task:
task.ok("all done!")
x = prompt("What is your name?", type=str, stripped=True, default="Foo")
print("Hello, {} ({})".format(x, type(x)))
y = prompt("What is your age?", type=int, default=42, retries=3)
print("Got it: {} years ({})".format(y, type(y)))
z = prompt("What is your favourite color?", choices=["red", "green", "blue"], default="green")
print("Color: {} ({})".format(z, type(z)))
w = prompt("Are we done?", type=bool, default=True)
print("Done? {} ({})".format(w, type(w)))
if __name__ == "__main__":
main()
|
mit
|
Python
|
a6aec17cff730914c0901db9e9ab9bb4da660306
|
Switch elm-formato to post save
|
deadfoxygrandpa/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage,sekjun9878/Elm.tmLanguage
|
elm_format.py
|
elm_format.py
|
from __future__ import print_function
import subprocess
import os, os.path
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
settings = sublime.load_settings('Elm Language Support.sublime-settings')
path = settings.get('elm_paths', '')
if path:
old_path = os.environ['PATH']
os.environ['PATH'] = os.path.expandvars(path + ';$PATH')
command = ['elm-format', self.view.file_name(), '--yes']
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if path:
os.environ['PATH'] = old_path
output, errors = p.communicate()
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), '\nerrors: ' + str(errors.strip()))
if str(errors.strip()):
print('Your PATH is: ', os.environ['PATH'])
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_post_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
from __future__ import print_function
import subprocess
import os, os.path
import re
import sublime, sublime_plugin
class ElmFormatCommand(sublime_plugin.TextCommand):
def run(self, edit):
settings = sublime.load_settings('Elm Language Support.sublime-settings')
path = settings.get('elm_paths', '')
if path:
old_path = os.environ['PATH']
os.environ['PATH'] = os.path.expandvars(path + ';$PATH')
command = ['elm-format', self.view.file_name(), '--yes']
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if path:
os.environ['PATH'] = old_path
output, errors = p.communicate()
if settings.get('debug', False):
string_settings = sublime.load_settings('Elm User Strings.sublime-settings')
print(string_settings.get('logging.prefix', '') + '(elm-format) ' + str(output.strip()), '\nerrors: ' + str(errors.strip()))
if str(errors.strip()):
print('Your PATH is: ', os.environ['PATH'])
class ElmFormatOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
sel = view.sel()[0]
region = view.word(sel)
scope = view.scope_name(region.b)
if scope.find('source.elm') != -1:
settings = sublime.load_settings('Elm Language Support.sublime-settings')
if settings.get('elm_format_on_save', True):
regex = settings.get('elm_format_filename_filter', '')
if not (len(regex) > 0 and re.search(regex, view.file_name()) is not None):
view.run_command('elm_format')
|
mit
|
Python
|
d679f7dbedd3decc7cd4abc782d4c0fae0b872ea
|
Enable the ability to import H264 SubMe, MotionEstimationMethod and Trellis
|
bitmovin/bitmovin-python
|
bitmovin/resources/enums/__init__.py
|
bitmovin/resources/enums/__init__.py
|
from .status import Status
from .aac_channel_layout import AACChannelLayout
from .ac3_channel_layout import AC3ChannelLayout
from .aws_cloud_region import AWSCloudRegion
from .badapt import BAdapt
from .cloud_region import CloudRegion
from .crop_filter_unit import CropFilterUnit
from .google_cloud_region import GoogleCloudRegion
from .h264_level import H264Level
from .h264_profile import H264Profile
from .h265_level import H265Level
from .h265_profile import H265Profile
from .vp9_aq_mode import VP9AQMode
from .vp9_arnr_type import VP9ARNRType
from .vp9_quality import VP9Quality
from .max_ctu_size import MaxCTUSize
from .motion_search import MotionSearch
from .mv_prediction_mode import MVPredictionMode
from .tu_inter_depth import TUInterDepth
from .tu_intra_depth import TUIntraDepth
from .encoder_version import EncoderVersion
from .selection_mode import SelectionMode
from .acl_permission import ACLPermission
from .fmp4_representation_type import FMP4RepresentationType
from .webm_representation_type import WebMRepresentationType
from .id3_tag_position_mode import ID3TagPositionMode
from .deinterlace_mode import DeinterlaceMode
from .picture_field_parity import PictureFieldParity
from .audio_mix_filter_channel_layout import AudioMixFilterChannelLayout
from .audio_mix_filter_channel_type import AudioMixFilterChannelType
from .ftp_transfer_version import FTPTransferVersion
from .thumbnail_unit import ThumbnailUnit
from .pixel_format import PixelFormat
from .font import Font
from .chroma_location import ChromaLocation
from .color_primaries import ColorPrimaries
from .color_range import ColorRange
from .color_space import ColorSpace
from .color_transfer import ColorTransfer
from .input_color_range import InputColorRange
from .input_color_space import InputColorSpace
from .set_rai_on_au import SetRaiOnAu
from .h264_b_pyramid import H264BPyramid
from .h264_nal_hrd import H264NalHrd
from .mp2_channel_layout import MP2ChannelLayout
from .h264_partition import H264Partition
from .h264_interlace_mode import H264InterlaceMode
from .s3_sig_version import S3SignatureVersion
from .watermark_unit import WatermarkUnit
from .he_aac_signaling import HeAacSignaling
from .interlace_mode import InterlaceMode
from .vertical_low_pass_filtering_mode import VerticalLowPassFilteringMode
from .scaling_algorithm import ScalingAlgorithm
from .encoding_mode import EncodingMode
from .audio_video_sync_mode import AudioVideoSyncMode
from .stream_mode import StreamMode
from .playready_method import PlayReadyMethod
from .h264_trellis import H264Trellis
from .h264_sub_me import H264SubMe
from .h264_motion_estimation_method import H264MotionEstimationMethod
|
from .status import Status
from .aac_channel_layout import AACChannelLayout
from .ac3_channel_layout import AC3ChannelLayout
from .aws_cloud_region import AWSCloudRegion
from .badapt import BAdapt
from .cloud_region import CloudRegion
from .crop_filter_unit import CropFilterUnit
from .google_cloud_region import GoogleCloudRegion
from .h264_level import H264Level
from .h264_profile import H264Profile
from .h265_level import H265Level
from .h265_profile import H265Profile
from .vp9_aq_mode import VP9AQMode
from .vp9_arnr_type import VP9ARNRType
from .vp9_quality import VP9Quality
from .max_ctu_size import MaxCTUSize
from .motion_search import MotionSearch
from .mv_prediction_mode import MVPredictionMode
from .tu_inter_depth import TUInterDepth
from .tu_intra_depth import TUIntraDepth
from .encoder_version import EncoderVersion
from .selection_mode import SelectionMode
from .acl_permission import ACLPermission
from .fmp4_representation_type import FMP4RepresentationType
from .webm_representation_type import WebMRepresentationType
from .id3_tag_position_mode import ID3TagPositionMode
from .deinterlace_mode import DeinterlaceMode
from .picture_field_parity import PictureFieldParity
from .audio_mix_filter_channel_layout import AudioMixFilterChannelLayout
from .audio_mix_filter_channel_type import AudioMixFilterChannelType
from .ftp_transfer_version import FTPTransferVersion
from .thumbnail_unit import ThumbnailUnit
from .pixel_format import PixelFormat
from .font import Font
from .chroma_location import ChromaLocation
from .color_primaries import ColorPrimaries
from .color_range import ColorRange
from .color_space import ColorSpace
from .color_transfer import ColorTransfer
from .input_color_range import InputColorRange
from .input_color_space import InputColorSpace
from .set_rai_on_au import SetRaiOnAu
from .h264_b_pyramid import H264BPyramid
from .h264_nal_hrd import H264NalHrd
from .mp2_channel_layout import MP2ChannelLayout
from .h264_partition import H264Partition
from .h264_interlace_mode import H264InterlaceMode
from .s3_sig_version import S3SignatureVersion
from .watermark_unit import WatermarkUnit
from .he_aac_signaling import HeAacSignaling
from .interlace_mode import InterlaceMode
from .vertical_low_pass_filtering_mode import VerticalLowPassFilteringMode
from .scaling_algorithm import ScalingAlgorithm
from .encoding_mode import EncodingMode
from .audio_video_sync_mode import AudioVideoSyncMode
from .stream_mode import StreamMode
from .playready_method import PlayReadyMethod
|
unlicense
|
Python
|
b4a2bf0ee660aab40a885cd8b84c18c8b4a8580b
|
make host, ip and type dynamic
|
missionpinball/mpf,missionpinball/mpf
|
mpf/core/bcp/bcp_server.py
|
mpf/core/bcp/bcp_server.py
|
"""Bcp server for clients which connect and disconnect randomly."""
import asyncio
import logging
from mpf.core.utility_functions import Util
class BcpServer():
"""Server socket which listens for incoming BCP clients."""
def __init__(self, machine, ip, port, type):
self.machine = machine
self.log = logging.getLogger('BCPServer')
self._server = None
self._ip = ip
self._port = port
self._type = type
@asyncio.coroutine
def start(self):
"""Start the server."""
self._server = yield from self.machine.clock.start_server(
self._accept_client, self._ip, self._port, loop=self.machine.clock.loop)
def stop(self):
"""Stop the BCP server, i.e. closes the listening socket(s)."""
if self._server:
self._server.close()
self._server = None
@asyncio.coroutine
def _accept_client(self, client_reader, client_writer):
"""Accept an connection and create client."""
self.log.info("New client connected.")
client = Util.string_to_class(self._type)(self.machine, None, self.machine.bcp)
client.accept_connection(client_reader, client_writer)
self.machine.bcp.transport.register_transport(client)
|
"""Bcp server for clients which connect and disconnect randomly."""
import asyncio
from mpf.core.bcp.bcp_socket_client import BCPClientSocket
class BcpServer():
"""Server socket which listens for incoming BCP clients."""
def __init__(self, machine):
self.machine = machine
self._server = None
@asyncio.coroutine
def start(self):
"""Start the server."""
self._server = yield from self.machine.clock.start_server(
self._accept_client, '127.0.0.1', 5051, loop=self.machine.clock.loop)
@asyncio.coroutine
def stop(self, loop):
"""Stop the BCP server, i.e. closes the listening socket(s)."""
if self.server:
self.server.close()
yield from self.server.wait_closed()
self.server = None
@asyncio.coroutine
def _accept_client(self, client_reader, client_writer):
"""Accept an connection and create client."""
client = BCPClientSocket(self.machine, None, self.machine.bcp.interface)
client.accept_connection(client_reader, client_writer)
self.machine.bcp.transport.register_transport(client)
|
mit
|
Python
|
61d8ced0d46bb0e351b8c488814b75b1de2ddab3
|
Update Ejemplos.py
|
AnhellO/DAS_Sistemas,AnhellO/DAS_Sistemas,AnhellO/DAS_Sistemas
|
Ago-Dic-2018/Ejemplos/Ejemplos.py
|
Ago-Dic-2018/Ejemplos/Ejemplos.py
|
import collections
potenciaPares = 2
potenciaImpares = 3
# print(2 / 3)
#for i in range(0, 10):
#if i % 2:
# Estilo de formateo 1:
# print("Impar: %d" % (i))
# Estilo de formateo 2:
# print("El impar #{} ^ {} es = {}".format(i, potenciaImpares, i ** potenciaImpares))
#else:
# Estilo de formateo 1:
# print("Par: %d" % (i))
# Estilo de formateo 2:
# print("El par #{} ^ {} es = {}".format(i, potenciaPares, i ** potenciaPares))
# Corchetes [] para las listas
miListilla = [1, 'uai', 3, 'lista', 'puede', 'tener', 'de', 'todo', 'por ejemplo', [7, 777, 77]]
#print(miListilla)
#Hágalo con slicing
#print("Hola"[:2])
#print(miListilla[-2][1])
#for i in miListilla:
# if isinstance(i, collections.Iterable):
# for j in i:
# if j == 777:
# print("El número de la suerte :D esssss -> {}!!!".format(j))
miListilla.append(777)
miListilla.insert(2, [])
miListilla.insert(2, [])
miListilla.insert(2, 3)
miListilla.remove([])
miListilla[1] = 'Hey!'
miNuevaListilla = miListilla[:]
miNuevaListilla.reverse()
print(miNuevaListilla)
print(miListilla)
miTupla = (1, 2, 3)
# Aquí falla ehhh -> miTupla[1] = []
print(miTupla)
# Sumatoria de Gauss y Números triangulares
# (N * (N + 1)) / 2
# print(int((3 * (3 + 1)) / 2))
# Listas comprimidas
print([2 ** i for i in range(0, 10)])
frase = "Hola Buenas noches A!!!"
print([i.upper() for i in frase if i in 'AEIOUaeiou'])
# Diccionarios
diccionario = {
'llave': 'valor'
}
elDiccionario = {
'A': [
'aguacate',
'armadura',
'avanzar'
],
'E': [
'enfermo',
'error',
'elote'
],
'I': [
'información',
'imagen',
'invisible'
],
'O': [
'oreja',
'oso',
'olor'
],
'U': [
'umbral',
'unicornio',
'uva'
]
}
# Recorre los elementos del diccionario
for llave, valor in elDiccionario.items():
print("Llave {} => Valor {}".format(llave, valor))
# Recorre las tuplas que devuelve la función items()
for llave in elDiccionario.items():
print(llave)
# Recorre las llaves ordenadas e imprime el elemento[llave]
for llave in sorted(elDiccionario.keys()):
print(elDiccionario[llave])
def formatea(item):
if isinstance(item, list):
listaCopia = item[:]
listaCopia.append('agregao')
return "Una copia diferente -> {}".format(listaCopia)
return "Acá lo regresamos formateado -> {}".format(item)
miLista = [1, 2]
print(formatea(miLista))
print(miLista)
miLista2 = [3, 4]
print(formatea(miLista2))
print(miLista2)
print(formatea('String'))
print(formatea(64361349713976972364691))
|
import collections
potenciaPares = 2
potenciaImpares = 3
# print(2 / 3)
#for i in range(0, 10):
#if i % 2:
# Estilo de formateo 1:
# print("Impar: %d" % (i))
# Estilo de formateo 2:
# print("El impar #{} ^ {} es = {}".format(i, potenciaImpares, i ** potenciaImpares))
#else:
# Estilo de formateo 1:
# print("Par: %d" % (i))
# Estilo de formateo 2:
# print("El par #{} ^ {} es = {}".format(i, potenciaPares, i ** potenciaPares))
# Corchetes [] para las listas
miListilla = [1, 'uai', 3, 'lista', 'puede', 'tener', 'de', 'todo', 'por ejemplo', [7, 777, 77]]
#print(miListilla)
#Hágalo con slicing
#print("Hola"[:2])
#print(miListilla[-2][1])
#for i in miListilla:
# if isinstance(i, collections.Iterable):
# for j in i:
# if j == 777:
# print("El número de la suerte :D esssss -> {}!!!".format(j))
miListilla.append(777)
miListilla.insert(2, [])
miListilla.insert(2, [])
miListilla.insert(2, 3)
miListilla.remove([])
miListilla[1] = 'Hey!'
miNuevaListilla = miListilla[:]
miNuevaListilla.reverse()
print(miNuevaListilla)
print(miListilla)
miTupla = (1, 2, 3)
miTupla[1] = []
print(miTupla)
|
mit
|
Python
|
4b545d2e72080537672bb4ebb990708cad678344
|
Debug Google Cloud Run support
|
diodesign/diosix
|
entrypoint.py
|
entrypoint.py
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <[email protected]>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
os.system('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <[email protected]>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
mit
|
Python
|
6c3929806a19fbaac0c17887e697bba7ddeaa92d
|
create cache dir if it does not exist
|
biocore/micronota,biocore/micronota,RNAer/micronota,RNAer/micronota
|
micronota/commands/database.py
|
micronota/commands/database.py
|
# ----------------------------------------------------------------------------
# Copyright (c) 2015--, micronota development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from os import makedirs
from os.path import join
from importlib import import_module
import click
from ..cli import cmd, AliasedGroup
from .. import db
@cmd.group(cls=AliasedGroup)
@click.pass_context
def cli(ctx):
'''Database operations.'''
pass
@cli.command('prepare')
@click.argument('databases', nargs=-1)
@click.option('-d', '--cache_dir', required=True,
type=click.Path(file_okay=False),
help=('The directory to cache the downloaded files so that file '
'do not need to be downloaded again if it exists there.'))
@click.option('-f', '--force', is_flag=True,
help='Force overwrite.')
@click.pass_context
def create_db(ctx, databases, cache_dir, force):
'''Prepare database.
Download the files for the specified DATABASES and manipulate
them as proper format for micronota.'''
# this cmd is 2-level nested, so double "parent"
grandparent_ctx = ctx.parent.parent
config = grandparent_ctx.config
func_name = 'prepare_db'
makedirs(cache_dir, exist_ok=True)
for d in databases:
submodule = import_module('.%s' % d, db.__name__)
f = getattr(submodule, func_name)
out_d = join(config.db_dir, d)
makedirs(out_d, exist_ok=True)
f(out_d, cache_dir, force=force)
|
# ----------------------------------------------------------------------------
# Copyright (c) 2015--, micronota development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from os import makedirs
from os.path import join
from importlib import import_module
import click
from ..cli import cmd, AliasedGroup
from .. import db
@cmd.group(cls=AliasedGroup)
@click.pass_context
def cli(ctx):
'''Database operations.'''
pass
@cli.command('prepare')
@click.argument('databases', nargs=-1)
@click.option('-d', '--cache_dir', required=True,
type=click.Path(file_okay=False),
help=('The directory to cache the downloaded files so that file '
'do not need to be downloaded again if it exists there.'))
@click.option('-f', '--force', is_flag=True,
help='Force overwrite.')
@click.pass_context
def create_db(ctx, databases, cache_dir, force):
'''Prepare database.
Download the files for the specified DATABASES and manipulate
them as proper format for micronota.'''
# this cmd is 2-level nested, so double "parent"
grandparent_ctx = ctx.parent.parent
config = grandparent_ctx.config
func_name = 'prepare_db'
for d in databases:
submodule = import_module('.%s' % d, db.__name__)
f = getattr(submodule, func_name)
out_d = join(config.db_dir, d)
makedirs(out_d, exist_ok=True)
f(out_d, cache_dir, force=force)
|
bsd-3-clause
|
Python
|
07874ee51375b7597d79288e85acc68294d4b007
|
customize the JSON dump for Event objects
|
OAButton/OAButton_old,OAButton/OAButton_old,OAButton/OAButton_old
|
oabutton/apps/web/views.py
|
oabutton/apps/web/views.py
|
from django.shortcuts import render_to_response
from django.conf import settings
from django.core.context_processors import csrf
from oabutton.common import SigninForm
import json
def homepage(req):
# Need to lazy import the Event model so that tests work with
# mocks
c = {}
c.update(csrf(req))
from oabutton.apps.bookmarklet.models import Event
evt_count = Event.objects.count()
data = []
for evt in Event.objects.all():
data.append({'doi': evt.doi,
'coords': dict(evt.coords),
'accessed': evt.accessed.strftime("%b %d, %Y"),
'user_name': evt.user_name,
'user_profession': evt.user_profession,
'description': evt.description,
'story': evt.story,
'url': evt.url,
})
c.update({'count': evt_count,
'events': json.dumps(data),
'hostname': settings.HOSTNAME,
'signin_form': SigninForm()})
return render_to_response('web/index.jade', c)
|
from django.shortcuts import render_to_response
from django.conf import settings
from django.core.context_processors import csrf
from oabutton.common import SigninForm
def homepage(req):
# Need to lazy import the Event model so that tests work with
# mocks
c = {}
c.update(csrf(req))
from oabutton.apps.bookmarklet.models import Event
evt_count = Event.objects.count()
json_data = Event.objects.all().to_json()
c.update({'count': evt_count,
'events': json_data,
'hostname': settings.HOSTNAME,
'signin_form': SigninForm()})
return render_to_response('web/index.jade', c)
|
mit
|
Python
|
371ddf2c4beb79b82b1154abfa1efdd6bc5e379a
|
Change version to 0.5.dev
|
einvalentin/elasticutils,mozilla/elasticutils,einvalentin/elasticutils,mozilla/elasticutils,mozilla/elasticutils,einvalentin/elasticutils
|
elasticutils/_version.py
|
elasticutils/_version.py
|
# follow pep-386
# Examples:
# * 0.3 - released version
# * 0.3a1 - alpha version
# * 0.3.dev - version in developmentv
__version__ = '0.5.dev'
__releasedate__ = ''
|
# follow pep-386
# Examples:
# * 0.3 - released version
# * 0.3a1 - alpha version
# * 0.3.dev - version in developmentv
__version__ = '0.4'
__releasedate__ = '20120731'
|
bsd-3-clause
|
Python
|
ba6b5c50e5ea1875e117d72675fb58092325b193
|
add moves: left, right and down
|
hard7/Tetris
|
game.py
|
game.py
|
#using python2
import Tkinter
from visual import Visual
from relief import Relief
from figure import Figure
from random import randint
class Game:
def __init__(self):
self.root= Tkinter.Tk()
self.vis= Visual(self.root)
self.relief= Relief()
self.figure= None
self.relief.extend([(0,0), (0,3)])
self.root.after_idle(self.tick)
self.root.bind('<KeyPress>', self.press_key)
self.root.mainloop()
def tick(self):
self.root.after(300, self.tick)
if not self.figure:
self.figure= Figure()
if self.relief.have_collision(self.figure.get_all()):
print 'generate collision with relief'
self.root.quit()
self.figure.down_move()
if self.try_stand_figure():
self.figure= None
if self.relief.overload():
print 'You Fail'
self.root.quit()
self.redraw()
def redraw(self):
self.vis.reset()
self.vis.draw(self.relief.get_all(), 'red')
if self.figure:
self.vis.draw(self.figure.get_all(), 'green')
def press_key(self, event):
inp= event.char.upper()
if inp == 'D':
self.figure.right_move()
if self.relief.have_collision(self.figure.get_all()):
self.figure.rollback()
else:
self.redraw()
elif inp == 'A':
self.figure.left_move()
if self.relief.have_collision(self.figure.get_all()):
self.figure.rollback()
else:
self.redraw()
elif inp == 'S':
self.figure.down_move()
if self.relief.have_collision(self.figure.get_all()):
self.figure.rollback()
else:
self.redraw()
def try_stand_figure(self):
if self.relief.have_collision(self.figure.get_all()):
self.figure.rollback()
self.relief.extend(self.figure.get_all())
self.relief.remove_filled_lines()
return True
return False
Game()
|
#using python2
import Tkinter
from visual import Visual
from relief import Relief
from figure import Figure
from random import randint
class Game:
def __init__(self):
self.root= Tkinter.Tk()
self.vis= Visual(self.root)
self.relief= Relief()
self.figure= None
self.root.after_idle(self.tick)
self.root.bind('<KeyPress>', self.press_key)
self.root.mainloop()
def tick(self):
self.root.after(200, self.tick)
if not self.figure:
self.figure= Figure()
if self.relief.have_collision(self.figure.get_all()):
print 'generate collision with relief'
self.root.quit()
self.figure.down_move()
if self.try_stand_figure():
self.figure= None
if self.relief.overload():
print 'You Fail'
self.root.quit()
self.vis.reset()
self.vis.draw(self.relief.get_all(), 'powder blue')
if self.figure:
self.vis.draw(self.figure.get_all(), 'gray')
def press_key(self, event):
print 'pressed key'
def try_stand_figure(self):
if self.relief.have_collision(self.figure.get_all()):
self.figure.rollback()
self.relief.extend(self.figure.get_all())
self.relief.remove_filled_lines()
return True
return False
Game()
|
apache-2.0
|
Python
|
95eeefa9b8cf7decd51265eaf624ff4551ac6a15
|
add feature to create a new app from command line, remove commands that are not implemented
|
aacanakin/glim
|
glim.py
|
glim.py
|
from termcolor import colored
from glim.app import start as appify
# glim with use of click
import click
import shutil, errno
import os
@click.group()
def glim():
pass
@click.command()
@click.option('--host', default = '127.0.0.1', help = 'enter ip')
@click.option('--port', default = '8080', help = 'enter port')
@click.option('--env', default = 'development', help = 'enter environment (development)')
def start(host, port, env):
print colored('glim %s server is running on %s:%s' % (env, host, port), 'green')
appify(host, port, env)
@click.command()
def new():
# resolve prototype path and its childs
proto_path = 'glim/proto/project'
cpath = os.path.dirname(os.path.realpath(__file__))
try:
copytree(proto_path, cpath)
print colored('Created new glim app', 'blue')
except:
print colored('App already exists', 'red')
def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
glim.add_command(start)
glim.add_command(new)
if __name__ == '__main__':
glim()
|
from termcolor import colored
from glim.app import start as appify
# glim with use of click
import click
@click.group()
def glim():
pass
@click.command()
@click.option('--host', default = '127.0.0.1', help = 'enter ip')
@click.option('--port', default = '8080', help = 'enter port')
@click.option('--env', default = 'development', help = 'enter environment (development)')
def start(host, port, env):
print colored('glim %s server is running on %s:%s' % (env, host, port), 'green')
appify(host, port, env)
@click.command()
@click.argument('name')
def new(name):
print colored('Created new app %s' % name, 'blue')
@click.command()
@click.argument('name')
def model(name):
print colored('Creating new model %s' % name, 'blue')
@click.command()
@click.argument('name')
def controller(name):
print colored('Creating new controller %s' % name, 'blue')
@click.command()
def routes():
print colored('Dumping all routes ..', 'blue')
glim.add_command(start)
glim.add_command(new)
glim.add_command(model)
glim.add_command(controller)
glim.add_command(routes)
if __name__ == '__main__':
glim()
|
mit
|
Python
|
fc7d83eda95aa20f0782644cd4076a51e60cc46d
|
Remove unused properties from models.isolate.Isolate.
|
catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult
|
dashboard/dashboard/pinpoint/models/isolate.py
|
dashboard/dashboard/pinpoint/models/isolate.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Model for storing information to look up isolates.
An isolate is a way to describe the dependencies of a specific build.
More about isolates:
https://github.com/luci/luci-py/blob/master/appengine/isolate/doc/client/Design.md
"""
import hashlib
from google.appengine.ext import ndb
def Get(builder_name, change, target):
"""Retrieve an isolate hash from the Datastore.
Args:
builder_name: The name of the builder that produced the isolate.
change: The Change the isolate was built at.
target: The compile target the isolate is for.
Returns:
The isolate hash as a string.
"""
key = ndb.Key(Isolate, _Key(builder_name, change, target))
entity = key.get()
if not entity:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
return entity.isolate_hash
def Put(isolate_infos):
"""Add isolate hashes to the Datastore.
This function takes multiple entries to do a batched Datstore put.
Args:
isolate_infos: An iterable of tuples. Each tuple is of the form
(builder_name, change, target, isolate_hash).
"""
entities = []
for isolate_info in isolate_infos:
builder_name, change, target, isolate_hash = isolate_info
entity = Isolate(
isolate_hash=isolate_hash,
id=_Key(builder_name, change, target))
entities.append(entity)
ndb.put_multi(entities)
class Isolate(ndb.Model):
isolate_hash = ndb.StringProperty(indexed=False, required=True)
def _Key(builder_name, change, target):
# The key must be stable across machines, platforms,
# Python versions, and Python invocations.
string = '\n'.join((builder_name, change.id_string, target))
return hashlib.sha256(string).hexdigest()
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Model for storing information to look up isolates.
An isolate is a way to describe the dependencies of a specific build.
More about isolates:
https://github.com/luci/luci-py/blob/master/appengine/isolate/doc/client/Design.md
"""
import hashlib
from google.appengine.ext import ndb
def Get(builder_name, change, target):
"""Retrieve an isolate hash from the Datastore.
Args:
builder_name: The name of the builder that produced the isolate.
change: The Change the isolate was built at.
target: The compile target the isolate is for.
Returns:
The isolate hash as a string.
"""
key = ndb.Key(Isolate, _Key(builder_name, change, target))
entity = key.get()
if not entity:
raise KeyError('No isolate with builder %s, change %s, and target %s.' %
(builder_name, change, target))
return entity.isolate_hash
def Put(isolate_infos):
"""Add isolate hashes to the Datastore.
This function takes multiple entries to do a batched Datstore put.
Args:
isolate_infos: An iterable of tuples. Each tuple is of the form
(builder_name, change, target, isolate_hash).
"""
entities = []
for isolate_info in isolate_infos:
builder_name, change, target, isolate_hash = isolate_info
entity = Isolate(
builder_name=builder_name,
change=change,
target=target,
isolate_hash=isolate_hash,
id=_Key(builder_name, change, target))
entities.append(entity)
ndb.put_multi(entities)
class Isolate(ndb.Model):
builder_name = ndb.StringProperty(required=True)
change = ndb.PickleProperty(required=True)
target = ndb.StringProperty(required=True)
isolate_hash = ndb.StringProperty(required=True)
def _Key(builder_name, change, target):
# The key must be stable across machines, platforms,
# Python versions, and Python invocations.
string = '\n'.join((builder_name, change.id_string, target))
return hashlib.sha256(string).hexdigest()
|
bsd-3-clause
|
Python
|
4a25286506cc8e50b5e1225b12015f4d0da3ccfc
|
Put api token auth endpoint under v1.
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
smbackend/urls.py
|
smbackend/urls.py
|
from django.conf.urls import patterns, include, url
from services.api import all_views as services_views
from services.api import AccessibilityRuleView
from observations.api import views as observations_views
from rest_framework import routers
from observations.views import obtain_auth_token
from munigeo.api import all_views as munigeo_views
# from django.contrib import admin
# admin.autodiscover()
router = routers.DefaultRouter()
registered_api_views = set()
for view in services_views + munigeo_views + observations_views:
kwargs = {}
if view['name'] in registered_api_views:
continue
else:
registered_api_views.add(view['name'])
if 'base_name' in view:
kwargs['base_name'] = view['base_name']
router.register(view['name'], view['class'], **kwargs)
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'smbackend.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^', include(v1_api.urls)),
# url(r'^admin/', include(admin.site.urls)),
url(r'^open311/', 'services.views.post_service_request', name='services'),
url(r'^v1/', include(router.urls)),
url(r'^v1/api-token-auth/', obtain_auth_token)
)
|
from django.conf.urls import patterns, include, url
from services.api import all_views as services_views
from services.api import AccessibilityRuleView
from observations.api import views as observations_views
from rest_framework import routers
from observations.views import obtain_auth_token
from munigeo.api import all_views as munigeo_views
# from django.contrib import admin
# admin.autodiscover()
router = routers.DefaultRouter()
registered_api_views = set()
for view in services_views + munigeo_views + observations_views:
kwargs = {}
if view['name'] in registered_api_views:
continue
else:
registered_api_views.add(view['name'])
if 'base_name' in view:
kwargs['base_name'] = view['base_name']
router.register(view['name'], view['class'], **kwargs)
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'smbackend.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^', include(v1_api.urls)),
# url(r'^admin/', include(admin.site.urls)),
url(r'^open311/', 'services.views.post_service_request', name='services'),
url(r'^v1/', include(router.urls)),
url(r'^api-token-auth/', obtain_auth_token)
)
|
agpl-3.0
|
Python
|
94764b8daed7ef6df8ac47462013b08d30de7e8f
|
refactor for less ugliness, resolves #7
|
IanDCarroll/xox
|
source/display.py
|
source/display.py
|
class Display():
def __init__(self):
self.start = "Welcome"
self.draw = "Draw"
self.computer = "Computer Wins"
self.human = "You Win"
self.next_move = "What is your next move?"
self.bad_move = "That is not a legal move."
def show(self, text):
print text
def render_board(self, raw_board):
stringified_board = self.stringify_board(raw_board)
rendered_board = self.construct_board(stringified_board)
return rendered_board
def stringify_board(self, board):
for i in range(0, len(board)):
if board[i] == 0:
board[i] = " "
elif board[i] == 1:
board[i] = " X "
elif board[i] == 10:
board[i] = " O "
return board
def construct_board(self, board):
rack = self.construct_rack(board)
rows = self.construct_rows(board)
constructed_board = '\n' + rack.join(rows) + '\n'
return constructed_board
def construct_rack(self, board):
board_size = self.get_board_size(board)
corner = '+'
shelves = []
for i in range(0, board_size):
shelves.append('---')
rack = '\n' + corner.join(shelves) + '\n'
return rack
def construct_rows(self, board):
board_size = self.get_board_size(board)
wall = '|'
rows = []
for i in range(0, board_size):
rows.append([])
for j in range(0, board_size):
rows[i].append(board.pop(0))
working_row = rows[i]
rows[i] = wall.join(working_row)
return rows
def get_board_size(self, board):
from math import sqrt
return int(sqrt(len(board)))
|
class Display():
def __init__(self):
self.start = "Welcome"
self.draw = "Draw"
self.computer = "Computer Wins"
self.human = "You Win"
self.next_move = "What is your next move?"
self.bad_move = "That is not a legal move."
def show(self, text):
print text
def render_board(self, raw_board):
stringified_board = self.stringify_board(raw_board)
rendered_board = self.construct_board(stringified_board)
return rendered_board
def stringify_board(self, board):
for i in range(0, len(board)):
if board[i] == 0:
board[i] = " "
elif board[i] == 1:
board[i] = " X "
elif board[i] == 10:
board[i] = " O "
return board
def construct_board(self, board):
board_size = self.get_board_size(board)
rack = self.construct_rack(board_size)
wall = '|'
rows = []
for i in range(0, board_size):
rows.append([])
for j in range(0, board_size):
rows[i].append(board.pop(0))
working_row = rows[i]
rows[i] = wall.join(working_row)
constructed_board = '\n' + rack.join(rows) + '\n'
return constructed_board
def get_board_size(self, board):
from math import sqrt
return int(sqrt(len(board)))
def construct_rack(self, board_size):
corner = '+'
shelves = []
for i in range(0, board_size):
shelves.append('---')
rack = '\n' + corner.join(shelves) + '\n'
return rack
|
mit
|
Python
|
b6b99dff989fb6662f795a95895e070424f59822
|
Add test for login button instead of edit buttons if not logged
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
candidates/tests/test_person_view.py
|
candidates/tests/test_person_view.py
|
from __future__ import unicode_literals
import re
from django.test.utils import override_settings
from django_webtest import WebTest
from .dates import processors_before, processors_after
from .factories import (
CandidacyExtraFactory, PersonExtraFactory
)
from .uk_examples import UK2015ExamplesMixin
class TestPersonView(UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestPersonView, self).setUp()
person_extra = PersonExtraFactory.create(
base__id='2009',
base__name='Tessa Jowell'
)
CandidacyExtraFactory.create(
election=self.election,
base__person=person_extra.base,
base__post=self.dulwich_post_extra.base,
base__on_behalf_of=self.labour_party_extra.base
)
def test_get_tessa_jowell(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertTrue(
re.search(
r'''(?msx)
<h1>Tessa\s+Jowell</h1>\s*
<p>Candidate\s+for\s+
<a\s+href="/election/2015/post/65808/dulwich-and-west-norwood">Dulwich\s+
and\s+West\s+Norwood</a>\s+in\ <a\ href="/election/2015/constituencies">2015
\s+General\s+Election</a>\s*</p>''',
response.text
)
)
@override_settings(TEMPLATE_CONTEXT_PROCESSORS=processors_before)
def test_get_tessa_jowell_before_election(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertContains(response, 'Contesting the 2015 General Election')
@override_settings(TEMPLATE_CONTEXT_PROCESSORS=processors_after)
def test_get_tessa_jowell_after_election(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertContains(response, 'Contested the 2015 General Election')
def test_get_non_existent(self):
response = self.app.get(
'/person/987654/imaginary-person',
expect_errors=True
)
self.assertEqual(response.status_code, 404)
def test_shows_no_edit_buttons_if_user_not_authenticated(self):
response = self.app.get('/person/2009/tessa-jowell')
edit_buttons = response.html.find_all('a', attrs={'class': 'button'})
self.assertEqual(len(edit_buttons), 1)
self.assertEqual(edit_buttons[0].string, 'Log in to edit')
|
# Smoke tests for viewing a candidate's page
from __future__ import unicode_literals
import re
from django.test.utils import override_settings
from django_webtest import WebTest
from .dates import processors_before, processors_after
from .factories import (
CandidacyExtraFactory, PersonExtraFactory
)
from .uk_examples import UK2015ExamplesMixin
class TestPersonView(UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestPersonView, self).setUp()
person_extra = PersonExtraFactory.create(
base__id='2009',
base__name='Tessa Jowell'
)
CandidacyExtraFactory.create(
election=self.election,
base__person=person_extra.base,
base__post=self.dulwich_post_extra.base,
base__on_behalf_of=self.labour_party_extra.base
)
def test_get_tessa_jowell(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertTrue(
re.search(
r'''(?msx)
<h1>Tessa\s+Jowell</h1>\s*
<p>Candidate\s+for\s+
<a\s+href="/election/2015/post/65808/dulwich-and-west-norwood">Dulwich\s+
and\s+West\s+Norwood</a>\s+in\ <a\ href="/election/2015/constituencies">2015
\s+General\s+Election</a>\s*</p>''',
response.text
)
)
@override_settings(TEMPLATE_CONTEXT_PROCESSORS=processors_before)
def test_get_tessa_jowell_before_election(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertContains(response, 'Contesting the 2015 General Election')
@override_settings(TEMPLATE_CONTEXT_PROCESSORS=processors_after)
def test_get_tessa_jowell_after_election(self):
response = self.app.get('/person/2009/tessa-jowell')
self.assertContains(response, 'Contested the 2015 General Election')
def test_get_non_existent(self):
response = self.app.get(
'/person/987654/imaginary-person',
expect_errors=True
)
self.assertEqual(response.status_code, 404)
|
agpl-3.0
|
Python
|
45c17681bfdfc374e94b086f9cdda4f314be5045
|
Add entries and preamble arguments to BibliographyData.__init__().
|
live-clones/pybtex
|
pybtex/database/__init__.py
|
pybtex/database/__init__.py
|
# Copyright (C) 2006, 2007, 2008, 2009 Andrey Golovizin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pybtex.exceptions import PybtexError
class BibliographyDataError(PybtexError):
pass
class BibliographyData(object):
def __init__(self, entries=None, preamble=None):
self.entries = {}
self._preamble = []
if entries:
self.entries.update(entries)
if preamble:
self.preamble.extend(preamble)
def __eq__(self, other):
if not isinstance(other, BibliographyData):
return super(BibliographyData, self) == other
return (
self.entries == other.entries
and self._preamble == other._preamble
)
def add_to_preamble(self, s):
self._preamble.append(s)
def preamble(self):
return ''.join(self._preamble)
def add_entry(self, key, entry):
if key in self.entries:
raise BibliographyDataError('repeated bibliograhpy entry: %s' % key)
entry.collection = self
self.entries[key] = entry
def add_entries(self, entries):
for key, entry in entries:
self.add_entry(key, entry)
|
# Copyright (C) 2006, 2007, 2008, 2009 Andrey Golovizin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pybtex.exceptions import PybtexError
class BibliographyDataError(PybtexError):
pass
class BibliographyData(object):
def __init__(self):
self.entries = {}
self._preamble = []
def __eq__(self, other):
if not isinstance(other, BibliographyData):
return super(BibliographyData, self) == other
return (
self.entries == other.entries
and self._preamble == other._preamble
)
def add_to_preamble(self, s):
self._preamble.append(s)
def preamble(self):
return ''.join(self._preamble)
def add_entry(self, key, entry):
if key in self.entries:
raise BibliographyDataError('repeated bibliograhpy entry: %s' % key)
entry.collection = self
self.entries[key] = entry
def add_entries(self, entries):
for key, entry in entries:
self.add_entry(key, entry)
|
mit
|
Python
|
581b49ad98616b7450c12be1d86960e8f38df9ac
|
Update lca_calculations.py
|
architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS
|
cea/optimization/lca_calculations.py
|
cea/optimization/lca_calculations.py
|
# -*- coding: utf-8 -*-
"""
This file imports the price details from the cost database as a class. This helps in preventing multiple importing
of the corresponding values in individual files.
"""
from __future__ import division
import warnings
import pandas as pd
warnings.filterwarnings("ignore")
__author__ = "Sreepathi Bhargava Krishna"
__copyright__ = "Copyright 2017, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Sreepathi Bhargava Krishna"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
class LcaCalculations(object):
def __init__(self, locator):
resources_lca = pd.read_excel(locator.get_database_supply_systems(), sheet_name="FEEDSTOCKS")
resources_lca.set_index('code', inplace=True)
# Natural gas
self.NG_TO_CO2_EQ = resources_lca.loc['NATURALGAS']['CO2']
# Drybiomass
self.DRYBIOMASS_TO_CO2_EQ = resources_lca.loc['DRYBIOMASS']['CO2']
# WetBiomass
self.WETBIOMASS_TO_CO2_EQ = resources_lca.loc['WETBIOMASS']['CO2']
# Electricity MJ/MJoil and kg/MJ
self.EL_TO_CO2_EQ = resources_lca.loc['GRID']['CO2']
|
# -*- coding: utf-8 -*-
"""
This file imports the price details from the cost database as a class. This helps in preventing multiple importing
of the corresponding values in individual files.
"""
from __future__ import division
import warnings
import pandas as pd
warnings.filterwarnings("ignore")
__author__ = "Sreepathi Bhargava Krishna"
__copyright__ = "Copyright 2017, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Sreepathi Bhargava Krishna"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
class LcaCalculations(object):
def __init__(self, locator):
resources_lca = pd.read_excel(locator.get_database_supply_systems(), sheet_name="FEEDSTOCKS")
resources_lca.set_index('code', inplace=True)
# Natural gas
self.NG_TO_CO2_EQ = resources_lca.loc['NATURALGAS']['CO2']
self.NG_TO_OIL_EQ = resources_lca.loc['NATURALGAS']['PEN']
# Drybiomass
self.DRYBIOMASS_TO_CO2_EQ = resources_lca.loc['DRYBIOMASS']['CO2']
self.DRYBIOMASS_TO_OIL_EQ = resources_lca.loc['DRYBIOMASS']['PEN']
# WetBiomass
self.WETBIOMASS_TO_CO2_EQ = resources_lca.loc['WETBIOMASS']['CO2']
self.WETBIOMASS_TO_OIL_EQ = resources_lca.loc['WETBIOMASS']['PEN']
# Electricity MJ/MJoil and kg/MJ
self.EL_TO_CO2_EQ = resources_lca.loc['GRID']['CO2']
self.EL_TO_OIL_EQ = resources_lca.loc['GRID']['CO2']
|
mit
|
Python
|
7d69bcc6474d954b311251bf077750e0418170cb
|
Fix typo and execute JS script found in local folder.
|
henne-/guest-password-printer,henne-/guest-password-printer
|
button.py
|
button.py
|
import RPi.GPIO as GPIO
import time
import os
from optparse import OptionParser
# Parse input arguments
parser = OptionParser()
parser.add_option("-t", "--testGPIO", action="store_true", help="Test GPIO connection, does not call the JS script.")
# The option --pin sets the Input Pin for your Button
# It default to GPIO24 or HardwarePin 19
parser.add_option("-p", "--pin", dest="pin", help="GPIO pin to use. If not provided it defaults to HardwarePin 19.", default=19)
(options, args) = parser.parse_args()
testingGPIO = options.testGPIO != None
buttonPin = options.pin
#sets GPIO Mode to use Hardware Pin Layout
GPIO.setmode(GPIO.BCM)
#sets GPIO Pin to INPUT mode with a Pull Down Resistor
GPIO.setup(buttonPin,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
if(testingGPIO):
print "Press the connected button. If you are pressing but you do not see any further output then....there is something wrong with the connection."
while True:
#waits for Pin Input and then exectures the script below
if (GPIO.input(buttonPin)):
if (testingGPIO):
print "PIN " + buttonPin + " works correctly."
continue
#the script that will be executed (as root)
os.system("node index.js")
|
import RPi.GPIO as GPIO
import time
import os
from optparse import OptionParser
# Parse input arguments
parser = OptionParser()
parser.add_option("-t", "--testGPIO", action="store_true", help="Test GPIO connection, does not call the JS script.")
# The option --pin sets the Input Pin for your Button
# It default to GPIO24 or HardwarePin 19
parser.add_option("-p", "--pin", dest="pin", help="GPIO pin to use. If not provided it defaults to HardwarePin 19.", default=19)
(options, args) = parser.parse_args()
testingGPIO = options.testGPIO != None
buttonPin = options.pin
#sets GPIO Mode to use Hardware Pin Layout
GPIO.setmode(GPIO.BCM)
#sets GPIO Pin to INPUT mode with a Pull Down Resistor
GPIO.setup(buttonPin,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
if(testingGPIO):
print "Press the connected button. If you are pressing but you do not see any further output then....there is something wrong with the connection."
while True:
#waits for Pin Input and then exectures the script below
if (GPIO.input(buttonPin)):
if (testingGPIO):
print "PIN " + buttonPing + " works correctly."
continue
#the script that will be executed (as root)
os.system("node /home/pi/guest-password-printer/index.js")
|
mit
|
Python
|
b4ea95dc2dc1591e96d22b5058cef440416477e0
|
Bump version to 0.10.0b (#740)
|
stellargraph/stellargraph,stellargraph/stellargraph
|
stellargraph/version.py
|
stellargraph/version.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2018-2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Global version information
__version__ = "0.10.0b"
|
# -*- coding: utf-8 -*-
#
# Copyright 2018-2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Global version information
__version__ = "0.9.0b"
|
apache-2.0
|
Python
|
e4be1ad0b1bf575743280d9ae07e74efcf2530bb
|
Change to class
|
studiawan/pygraphc
|
pygraphc/misc/LogCluster.py
|
pygraphc/misc/LogCluster.py
|
import datetime
import hashlib
import textwrap
class LogCluster(object):
def __init__(self, wsize, csize):
self.wsize = wsize
self.csize = csize
self.wsketch = []
# This function logs the message given with parameter2,++,parameterN to
# syslog, using the level parameter1+ The message is also written to stderr+
def log_msg(*parameters):
level = parameters[0]
msg = ' '.join(parameters[1:])
now = datetime.datetime.now()
print now, level, msg
# This function hashes the string given with parameter1 to an integer
# in the range (0+++wsize-1) and returns the integer+ The wsize integer
# can be set with the --wsize command line option+
# L is unsigned 32 bit integer
def hash_string(self, parameter):
wraps = textwrap.wrap(parameter, self.wsize)
hashes = []
for wrap in wraps:
hash_value = hashlib.md5(wrap).hexdigest()
hashes.append(int(hash_value, 32))
return hashes
# This function hashes the candidate ID given with parameter1 to an integer
# in the range (0+++csize-1) and returns the integer+ The csize integer
# can be set with the --csize command line option+
def hash_candidate(self, parameter):
wraps = textwrap.wrap(parameter, self.csize)
hashes = []
for wrap in wraps:
hash_value = hashlib.md5(wrap).hexdigest()
hashes.append(int(hash_value, 32))
return hashes
# This function matches the line given with parameter1 with a regular
# expression lineregexp (the expression can be set with the --lfilter
# command line option)+ If the template string is defined (can be set
# with the --template command line option), the line is converted
# according to template (match variables in template are substituted
# with values from regular expression match, and the resulting string
# replaces the line)+ If the regular expression lineregexp does not match
# the line, 0 is returned, otherwise the line (or converted line, if
# --template option has been given) is returned+
# If the --lfilter option has not been given but --lcfunc option is
# present, the Perl function given with --lcfunc is used for matching
# and converting the line+ If the function returns 'undef', line is
# regarded non-matching, otherwise the value returned by the function
# replaces the original line+
# If neither --lfilter nor --lcfunc option has been given, the line
# is returned without a trailing newline+
def process_line(self):
pass
# This function makes a pass over the data set and builds the sketch
# @wsketch which is used for finding frequent words+ The sketch contains
# wsize counters (wsize can be set with --wsize command line option)+
def build_word_sketch(self):
for index in range(self.wsize):
self.wsketch[index] = 0
|
import datetime
import hashlib
import textwrap
# This function logs the message given with parameter2,++,parameterN to
# syslog, using the level parameter1+ The message is also written to stderr+
def log_msg(*parameters):
level = parameters[0]
msg = ' '.join(parameters[1:])
now = datetime.datetime.now()
print now, level, msg
# This function hashes the string given with parameter1 to an integer
# in the range (0+++wsize-1) and returns the integer+ The wsize integer
# can be set with the --wsize command line option+
# L is unsigned 32 bit integer
def hash_string(parameter, wsize):
wraps = textwrap.wrap(parameter, wsize)
hashes = []
for wrap in wraps:
hash_value = hashlib.md5(wrap).hexdigest()
hashes.append(int(hash_value, 32))
return hashes
# This function hashes the candidate ID given with parameter1 to an integer
# in the range (0+++csize-1) and returns the integer+ The csize integer
# can be set with the --csize command line option+
def hash_candidate(parameter, csize):
wraps = textwrap.wrap(parameter, csize)
hashes = []
for wrap in wraps:
hash_value = hashlib.md5(wrap).hexdigest()
hashes.append(int(hash_value, 32))
return hashes
# This function matches the line given with parameter1 with a regular
# expression lineregexp (the expression can be set with the --lfilter
# command line option)+ If the template string is defined (can be set
# with the --template command line option), the line is converted
# according to template (match variables in template are substituted
# with values from regular expression match, and the resulting string
# replaces the line)+ If the regular expression lineregexp does not match
# the line, 0 is returned, otherwise the line (or converted line, if
# --template option has been given) is returned+
# If the --lfilter option has not been given but --lcfunc option is
# present, the Perl function given with --lcfunc is used for matching
# and converting the line+ If the function returns 'undef', line is
# regarded non-matching, otherwise the value returned by the function
# replaces the original line+
# If neither --lfilter nor --lcfunc option has been given, the line
# is returned without a trailing newline+
def process_line():
pass
# This function makes a pass over the data set and builds the sketch
# @wsketch which is used for finding frequent words+ The sketch contains
# wsize counters (wsize can be set with --wsize command line option)+
def build_word_sketch():
pass
|
mit
|
Python
|
221e45828b9cc33d9ae02d08d94dfaa89977d3e7
|
update import_reading for Courtney
|
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk
|
vehicles/management/commands/import_reading.py
|
vehicles/management/commands/import_reading.py
|
from ciso8601 import parse_datetime
from django.utils.timezone import make_aware
from django.contrib.gis.geos import Point
from busstops.models import Service
from ...models import VehicleLocation, VehicleJourney
from ..import_live_vehicles import ImportLiveVehiclesCommand
class Command(ImportLiveVehiclesCommand):
url = 'http://rtl2.ods-live.co.uk/api/vehiclePositions'
source_name = 'Reading'
services = Service.objects.filter(operator__in=('RBUS', 'GLRB', 'KENN', 'NADS', 'THVB', 'CTNY'), current=True)
@staticmethod
def get_datetime(item):
return make_aware(parse_datetime(item['observed']))
def get_vehicle(self, item):
vehicle = item['vehicle']
defaults = {
'source': self.source,
'operator_id': 'RBUS'
}
if vehicle.isdigit():
defaults['fleet_number'] = vehicle
return self.vehicles.get_or_create(
defaults,
operator_id__in=('RBUS', 'CTNY'),
code=vehicle
)
def get_journey(self, item, vehicle):
journey = VehicleJourney()
journey.route_name = item['service']
latest_journey = vehicle.latest_location and vehicle.latest_location.journey
if latest_journey and latest_journey.service and latest_journey.route_name == journey.route_name:
journey.service = latest_journey.service
elif journey.route_name:
try:
journey.service = self.get_service(
self.services.filter(line_name__iexact=journey.route_name),
Point(float(item['longitude']), float(item['latitude']))
)
except Service.DoesNotExist:
pass
if not journey.service:
print(item)
return journey
def create_vehicle_location(self, item):
return VehicleLocation(
latlong=Point(float(item['longitude']), float(item['latitude'])),
heading=item['bearing'] or None
)
|
from ciso8601 import parse_datetime
from django.utils.timezone import make_aware
from django.contrib.gis.geos import Point
from busstops.models import Service
from ...models import VehicleLocation, VehicleJourney
from ..import_live_vehicles import ImportLiveVehiclesCommand
class Command(ImportLiveVehiclesCommand):
url = 'http://rtl2.ods-live.co.uk/api/vehiclePositions'
source_name = 'Reading'
services = Service.objects.filter(operator__in=('RBUS', 'GLRB', 'KENN', 'NADS', 'THVB'), current=True)
@staticmethod
def get_datetime(item):
return make_aware(parse_datetime(item['observed']))
def get_vehicle(self, item):
vehicle = item['vehicle']
defaults = {
'source': self.source
}
if vehicle.isdigit():
defaults['fleet_number'] = vehicle
return self.vehicles.get_or_create(
defaults,
operator_id='RBUS',
code=vehicle
)
def get_journey(self, item, vehicle):
journey = VehicleJourney()
journey.route_name = item['service']
latest_journey = vehicle.latest_location and vehicle.latest_location.journey
if latest_journey and latest_journey.service and latest_journey.route_name == journey.route_name:
journey.service = latest_journey.service
elif journey.route_name:
try:
journey.service = self.get_service(
self.services.filter(line_name__iexact=journey.route_name),
Point(float(item['longitude']), float(item['latitude']))
)
except Service.DoesNotExist:
pass
if not journey.service:
print(item)
return journey
def create_vehicle_location(self, item):
return VehicleLocation(
latlong=Point(float(item['longitude']), float(item['latitude'])),
heading=item['bearing'] or None
)
|
mpl-2.0
|
Python
|
13489726a9b3f9ce9dcd2ff9c3086279db7704fe
|
increment build id
|
SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32
|
esp32/modules/version.py
|
esp32/modules/version.py
|
build = 8
name = "Maffe Maniak"
|
build = 7
name = "Maffe Maniak"
|
mit
|
Python
|
66056c97972011831fb36ce0ae37cc9bd490ddba
|
Swap In New Function
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
web/impact/impact/v1/helpers/program_helper.py
|
web/impact/impact/v1/helpers/program_helper.py
|
from impact.models import Program
from impact.v1.helpers.model_helper import (
FLOAT_FIELD,
INTEGER_FIELD,
ModelHelper,
PK_FIELD,
STRING_FIELD,
)
PROGRAM_FIELDS = {
"id": PK_FIELD,
"name": STRING_FIELD,
"program_family_id": INTEGER_FIELD,
"program_family_name": STRING_FIELD,
"cycle_id": INTEGER_FIELD,
"cycle_name": STRING_FIELD,
"description": STRING_FIELD,
"start_date": STRING_FIELD,
"end_date": STRING_FIELD,
"location": STRING_FIELD,
"program_status": STRING_FIELD,
"currency_code": STRING_FIELD,
"regular_application_fee": FLOAT_FIELD,
"url_slug": STRING_FIELD,
"overview_start_date": STRING_FIELD,
"overview_deadline_date": STRING_FIELD,
}
class ProgramHelper(ModelHelper):
model = Program
@classmethod
def fields(cls):
return PROGRAM_FIELDS
@property
def cycle_name(self):
return self.field_element("cycle", "name")
@property
def program_family_name(self):
return self.field_element("program_family", "name")
|
from impact.models import Program
from impact.v1.helpers.model_helper import (
FLOAT_FIELD,
INTEGER_FIELD,
ModelHelper,
PK_FIELD,
STRING_FIELD,
)
PROGRAM_FIELDS = {
"id": PK_FIELD,
"name": STRING_FIELD,
"program_family_id": INTEGER_FIELD,
"program_family_name": STRING_FIELD,
"cycle_id": INTEGER_FIELD,
"cycle_name": STRING_FIELD,
"description": STRING_FIELD,
"start_date": STRING_FIELD,
"end_date": STRING_FIELD,
"location": STRING_FIELD,
"program_status": STRING_FIELD,
"currency_code": STRING_FIELD,
"regular_application_fee": FLOAT_FIELD,
"url_slug": STRING_FIELD,
"overview_start_date": STRING_FIELD,
"overview_deadline_date": STRING_FIELD,
}
class ProgramHelper(ModelHelper):
model = Program
@classmethod
def fields(cls):
return PROGRAM_FIELDS
@property
def cycle_name(self):
return self.get_field_value("cycle").name
@property
def program_family_name(self):
return self.get_field_value("program_family").name
|
mit
|
Python
|
fe6d37efa59cbf222dd703a52456de2aa628fecf
|
Update random-pick-with-weight.py
|
tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015
|
Python/random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
# Time: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
import bisect
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
# Time: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
mit
|
Python
|
ea660e370b05cfe34dc819211b2f28992a924194
|
Update random-pick-with-weight.py
|
kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode
|
Python/random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
# Time: ctor: O(n)
# pickIndex: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
import bisect
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
# Time: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
import bisect
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
mit
|
Python
|
056a1b769db7f05402b41ffdcb565585db06bf97
|
Update top-k-frequent-elements.py
|
kamyu104/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,yiwen-luo/LeetCode,githubutilities/LeetCode,yiwen-luo/LeetCode,githubutilities/LeetCode,yiwen-luo/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,jaredkoontz/leetcode
|
Python/top-k-frequent-elements.py
|
Python/top-k-frequent-elements.py
|
# Time: O(n)
# Space: O(n)
# Given a non-empty array of integers,
# return the k most frequent elements.
#
# For example,
# Given [1,1,1,2,2,3] and k = 2, return [1,2].
#
# Note:
# You may assume k is always valid,
# 1 <= k <= number of unique elements.
# Your algorithm's time complexity must be better
# than O(n log n), where n is the array's size.
from random import randint
class Solution(object):
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
counts = collections.defaultdict(int)
for i in nums:
counts[i] += 1
p = []
for key, val in counts.iteritems():
p.append((val, key))
self.kthElement(p, k);
result = []
for i in xrange(k):
result.append(p[i][1])
return result
def kthElement(self, nums, k):
def PartitionAroundPivot(left, right, pivot_idx, nums):
pivot_value = nums[pivot_idx][0]
new_pivot_idx = left
nums[pivot_idx], nums[right] = nums[right], nums[pivot_idx]
for i in xrange(left, right):
if nums[i][0] > pivot_value:
nums[i], nums[new_pivot_idx] = nums[new_pivot_idx], nums[i]
new_pivot_idx += 1
nums[right], nums[new_pivot_idx] = nums[new_pivot_idx], nums[right]
return new_pivot_idx
left, right = 0, len(nums) - 1
while left <= right:
pivot_idx = randint(left, right)
new_pivot_idx = PartitionAroundPivot(left, right, pivot_idx, nums)
if new_pivot_idx == k - 1:
return
elif new_pivot_idx > k - 1:
right = new_pivot_idx - 1
else: # new_pivot_idx < k - 1.
left = new_pivot_idx + 1
|
# Time: O(n)
# Space: O(n)
# Given a non-empty array of integers,
# return the k most frequent elements.
#
# For example,
# Given [1,1,1,2,2,3] and k = 2, return [1,2].
#
# Note:
# You may assume k is always valid,
# 1 <= k <= number of unique elements.
# Your algorithm's time complexity must be better
# than O(n log n), where n is the array's size.
from random import randint
class Solution(object):
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
counts = collections.defaultdict(int)
for i in nums:
counts[i] += 1
p = []
for key, val in counts.iteritems():
p.append((val, key))
self.kthElement(p, k);
result = []
for i in xrange(k):
result.append(p[i][1])
return result
def kthElement(self, nums, k):
def PartitionAroundPivot(left, right, pivot_idx, nums):
pivot_value = nums[pivot_idx][0]
new_pivot_idx = left
nums[pivot_idx], nums[right] = nums[right], nums[pivot_idx]
for i in xrange(left, right):
if nums[i][0] > pivot_value:
nums[i], nums[new_pivot_idx] = nums[new_pivot_idx], nums[i]
new_pivot_idx += 1
nums[right], nums[new_pivot_idx] = nums[new_pivot_idx], nums[right]
return new_pivot_idx
left, right = 0, len(nums) - 1
while left <= right:
pivot_idx = randint(left, right)
new_pivot_idx = PartitionAroundPivot(left, right, pivot_idx, nums)
if new_pivot_idx == k - 1:
return
elif new_pivot_idx > k - 1:
right = new_pivot_idx - 1
else: # new_pivot_idx < k - 1.
left = new_pivot_idx + 1
|
mit
|
Python
|
3a156a11cd7b8a9bfc40b515a2f1d1351969ce3a
|
Simplify loading config for instagram middleware
|
lord63/me-api
|
me_api/middleware/instagram.py
|
me_api/middleware/instagram.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import requests
from flask import Blueprint, jsonify, request, redirect
from me_api.cache import cache
from me_api.middleware.utils import MiddlewareConfig
config = MiddlewareConfig('instagram')
instagram_api = Blueprint('instagram', __name__, url_prefix=config.path)
@instagram_api.route('/')
@cache.cached(timeout=3600)
def instagram():
if not config.access_token:
return 'Need access token, please authenticate your app first.'
response = requests.get(
("https://api.instagram.com/v1/users/"
"self/media/recent/?access_token={0}").format(config.access_token)
)
return jsonify(instagram=response.json())
@instagram_api.route('/login')
def authorization():
if config.access_token:
return "You've already had an access token in the config file."
authorization_url = 'https://api.instagram.com/oauth/authorize'
return redirect(
'{0}?client_id={1}&redirect_uri={2}&response_type=code'.format(
authorization_url, config.client_id,
os.path.join(request.url, 'redirect')
)
)
@instagram_api.route('/login/redirect')
def get_access_toekn():
authorization_code = request.args.get('code', '')
token_url = 'https://api.instagram.com/oauth/access_token'
post_data = {
'client_id': config.client_id,
'client_secret': config.client_secret,
'redirect_uri': request.base_url,
'grant_type': 'authorization_code',
'code': authorization_code
}
response = requests.post(token_url, data=post_data)
return response.text
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import requests
from flask import Blueprint, jsonify, request, redirect
from me_api.configs import Config
from me_api.cache import cache
config = Config.modules['modules']['instagram']
path = config['path']
client_secret, access_token, client_id = (
config['data']['client_secret'],
config['data']['access_token'],
config['data']['client_id']
)
instagram_api = Blueprint('instagram', __name__, url_prefix=path)
@instagram_api.route('/')
@cache.cached(timeout=3600)
def instagram():
if not access_token:
return 'Need access token, please authenticate your app first.'
response = requests.get(
("https://api.instagram.com/v1/users/"
"self/media/recent/?access_token={0}").format(access_token)
)
return jsonify(instagram=response.json())
@instagram_api.route('/login')
def authorization():
if access_token:
return "You've already had an access token in the config file."
authorization_url = 'https://api.instagram.com/oauth/authorize'
return redirect(
'{0}?client_id={1}&redirect_uri={2}&response_type=code'.format(
authorization_url, client_id,
os.path.join(request.url, 'redirect')
)
)
@instagram_api.route('/login/redirect')
def get_access_toekn():
authorization_code = request.args.get('code', '')
token_url = 'https://api.instagram.com/oauth/access_token'
post_data = {
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': request.base_url,
'grant_type': 'authorization_code',
'code': authorization_code
}
response = requests.post(token_url, data=post_data)
return response.text
|
mit
|
Python
|
aeeb62f47a7211d945aafd294edb3d39d5d5cf6e
|
Modify error message
|
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
|
pytablereader/_validator.py
|
pytablereader/_validator.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import abc
import os.path
import dataproperty
import pathvalidate as pv
import six
from six.moves.urllib.parse import urlparse
from ._constant import SourceType
from .error import EmptyDataError
from .error import InvalidFilePathError
from .error import InvalidUrlError
@six.add_metaclass(abc.ABCMeta)
class ValidatorInterface(object):
@abc.abstractproperty
def source_type(self):
pass
@abc.abstractmethod
def validate(self):
pass
class BaseValidator(ValidatorInterface):
@property
def source(self):
return self.__source
def __init__(self, source):
self.__source = source
class FileValidator(BaseValidator):
@property
def source_type(self):
return SourceType.FILE
def validate(self):
try:
pv.validate_file_path(self.source)
except pv.NullNameError:
raise IOError("file path is empty")
except (pv.InvalidCharError, pv.InvalidLengthError) as e:
raise InvalidFilePathError(e)
if not os.path.isfile(self.source):
raise IOError("file not found")
class TextValidator(BaseValidator):
@property
def source_type(self):
return SourceType.TEXT
def validate(self):
if dataproperty.is_empty_string(self.source):
raise EmptyDataError("data source is empty")
class UrlValidator(BaseValidator):
@property
def source_type(self):
return SourceType.URL
def validate(self):
if dataproperty.is_empty_string(self.source):
raise InvalidUrlError("url is empty")
scheme = urlparse(self.source).scheme
if scheme not in ["http", "https"]:
raise InvalidUrlError(
"invalid scheme: expected=http/https, actual={}".format(
scheme))
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import abc
import os.path
import dataproperty
import pathvalidate as pv
import six
from six.moves.urllib.parse import urlparse
from ._constant import SourceType
from .error import EmptyDataError
from .error import InvalidFilePathError
from .error import InvalidUrlError
@six.add_metaclass(abc.ABCMeta)
class ValidatorInterface(object):
@abc.abstractproperty
def source_type(self):
pass
@abc.abstractmethod
def validate(self):
pass
class BaseValidator(ValidatorInterface):
@property
def source(self):
return self.__source
def __init__(self, source):
self.__source = source
class FileValidator(BaseValidator):
@property
def source_type(self):
return SourceType.FILE
def validate(self):
try:
pv.validate_file_path(self.source)
except pv.NullNameError:
raise IOError("file path is empty")
except (pv.InvalidCharError, pv.InvalidLengthError) as e:
raise InvalidFilePathError(e)
if not os.path.isfile(self.source):
raise IOError("file not found")
class TextValidator(BaseValidator):
@property
def source_type(self):
return SourceType.TEXT
def validate(self):
if dataproperty.is_empty_string(self.source):
raise EmptyDataError("data source is empty")
class UrlValidator(BaseValidator):
@property
def source_type(self):
return SourceType.URL
def validate(self):
if dataproperty.is_empty_string(self.source):
raise InvalidUrlError("url is empty")
scheme = urlparse(self.source).scheme
if scheme not in ["http", "https"]:
raise InvalidUrlError("invalid schema: {:s}".format(scheme))
|
mit
|
Python
|
6c095c0e14c084666b9417b4bd269f396804bfab
|
Update interface with the latest changes in functionality.
|
bolsote/py-cd-talk,bolsote/py-cd-talk
|
src/ensign/_interfaces.py
|
src/ensign/_interfaces.py
|
# pylint: skip-file
from zope.interface import Attribute, Interface
class IFlag(Interface):
"""
Flag Interface.
Any kind of flag must implement this interface.
"""
TYPE = Attribute("""Flag type""")
store = Attribute("""Flag storage backend""")
name = Attribute("""Flag name""")
value = Attribute("""Flag value""")
active = Attribute("""Flag activity indicator""")
info = Attribute("""Flag descriptive information""")
def create(name, store, **kwargs):
"""
Create a new flag with the given name and, optionally, extra data,
persisted in the given store.
"""
def all(store):
"""
Retrieve all flags in the store.
"""
def _check():
"""
Check whether the flag current value means the feature is active.
"""
class IStorage(Interface):
"""
Storage Interface.
Any kind of backing storage for flags must implement this interface.
"""
def create(name, type, **kwargs):
"""Create a new flag."""
def exists(name):
"""Check if the flag exists in the store."""
def load(name, type):
"""Load a value."""
def store(name, value, type):
"""Store a value."""
def used(name):
"""Get last used date."""
def info(name):
"""Get flag descriptive information."""
def all():
"""Get all flags."""
|
# pylint: skip-file
from zope.interface import Attribute, Interface
class IFlag(Interface):
"""
Flag Interface.
Any kind of flag must implement this interface.
"""
TYPE = Attribute("""Flag type""")
store = Attribute("""Flag storage backend""")
name = Attribute("""Flag name""")
value = Attribute("""Flag value""")
active = Attribute("""Flag activity indicator""")
info = Attribute("""Flag descriptive information""")
def create(name, store, **kwargs):
"""
Create a new flag with the given name and, optionally, extra data,
persisted in the given store.
"""
def _check():
"""
Check whether the flag current value means the feature is active.
"""
class IStorage(Interface):
"""
Storage Interface.
Any kind of backing storage for flags must implement this interface.
"""
def create(name, type, **kwargs):
"""Create a new flag."""
def exists(name):
"""Check if the flag exists in the store."""
def load(name, type):
"""Load a value."""
def store(name, value, type):
"""Store a value."""
def used(name):
"""Get last used date."""
def info(name):
"""Get flag descriptive information."""
|
isc
|
Python
|
a9253d6382c8eeb4261d0fc533d943046b51d109
|
Remove unused variable
|
pedrobaeza/account-financial-tools,dvitme/account-financial-tools,open-synergy/account-financial-tools,syci/account-financial-tools,pedrobaeza/account-financial-tools,Pexego/account-financial-tools,yelizariev/account-financial-tools,andhit-r/account-financial-tools,factorlibre/account-financial-tools,credativUK/account-financial-tools,acsone/account-financial-tools,VitalPet/account-financial-tools,lepistone/account-financial-tools,Endika/account-financial-tools,akretion/account-financial-tools,bringsvor/account-financial-tools,cysnake4713/account-financial-tools,factorlibre/account-financial-tools,taktik/account-financial-tools,cysnake4713/account-financial-tools,DarkoNikolovski/account-financial-tools,acsone/account-financial-tools,Nowheresly/account-financial-tools,abstract-open-solutions/account-financial-tools,yelizariev/account-financial-tools,acsone/account-financial-tools,diagramsoftware/account-financial-tools,andhit-r/account-financial-tools,raycarnes/account-financial-tools,ClearCorp-dev/account-financial-tools,OpenPymeMx/account-financial-tools,credativUK/account-financial-tools,abstract-open-solutions/account-financial-tools,dvitme/account-financial-tools,Pexego/account-financial-tools,nagyv/account-financial-tools,luc-demeyer/account-financial-tools,Domatix/account-financial-tools,open-synergy/account-financial-tools,Endika/account-financial-tools,damdam-s/account-financial-tools,diagramsoftware/account-financial-tools,alhashash/account-financial-tools,damdam-s/account-financial-tools,Antiun/account-financial-tools,andrius-preimantas/account-financial-tools,Domatix/account-financial-tools,OpenPymeMx/account-financial-tools,VitalPet/account-financial-tools,iDTLabssl/account-financial-tools,lepistone/account-financial-tools,akretion/account-financial-tools,adhoc-dev/oca-account-financial-tools,amoya-dx/account-financial-tools,open-synergy/account-financial-tools,andrius-preimantas/account-financial-tools,VitalPet/account-financial-tools,xpansa/account-financial-tools,OpenPymeMx/account-financial-tools,amoya-dx/account-financial-tools,ClearCorp-dev/account-financial-tools,Antiun/account-financial-tools,luc-demeyer/account-financial-tools,alhashash/account-financial-tools,raycarnes/account-financial-tools,taktik/account-financial-tools,bringsvor/account-financial-tools,iDTLabssl/account-financial-tools,Nowheresly/account-financial-tools,Domatix/account-financial-tools,DarkoNikolovski/account-financial-tools,adhoc-dev/oca-account-financial-tools,nagyv/account-financial-tools,xpansa/account-financial-tools,syci/account-financial-tools
|
account_tax_analysis/account_tax_analysis.py
|
account_tax_analysis/account_tax_analysis.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville. Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
class AccountTaxDeclarationAnalysis(models.TransientModel):
_name = 'account.vat.declaration.analysis'
_description = 'Account Vat Declaration'
fiscalyear_id = fields.Many2one(
comodel_name='account.fiscalyear',
string='Fiscalyear',
help='Fiscalyear to look on',
required=True,
)
period_list = fields.Many2many(
comodel_name='account.period',
relation='account_tax_period_rel',
column1='tax_analysis',
column2='period_id',
string='Periods',
required=True,
)
@api.multi
def show_vat(self):
if not self.period_list:
raise exceptions.Warning(_("You must select periods"))
domain = [('period_id', 'in', self.period_list.ids)]
action = self.env.ref('account_tax_analysis.action_view_tax_analysis')
action_fields = action.read()[0]
action_fields['domain'] = domain
return action_fields
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville. Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
class AccountTaxDeclarationAnalysis(models.TransientModel):
_name = 'account.vat.declaration.analysis'
_description = 'Account Vat Declaration'
fiscalyear_id = fields.Many2one(
comodel_name='account.fiscalyear',
string='Fiscalyear',
help='Fiscalyear to look on',
required=True,
)
period_list = fields.Many2many(
comodel_name='account.period',
relation='account_tax_period_rel',
column1='tax_analysis',
column2='period_id',
string='Periods',
required=True,
)
@api.multi
def show_vat(self):
action_obj = self.env['ir.actions.act_window']
if not self.period_list:
raise exceptions.Warning(_("You must select periods"))
domain = [('period_id', 'in', self.period_list.ids)]
action = self.env.ref('account_tax_analysis.action_view_tax_analysis')
action_fields = action.read()[0]
action_fields['domain'] = domain
return action_fields
|
agpl-3.0
|
Python
|
c433c649a9a4b32095a170f75c7e4aae9382089b
|
use absolute imports
|
geoscixyz/em_examples
|
em_examples/__init__.py
|
em_examples/__init__.py
|
import .Attenuation
import .BiotSavart
import .CondUtils
import .DC_cylinder
import .DCLayers
import .DCsphere
import .DCWidget
import .DCWidgetPlate2_5D
import .DCWidgetPlate_2D
import .DCWidgetResLayer2_5D
import .DCWidgetResLayer2D
import .DipoleWidget1D
import .DipoleWidgetFD
import .DipoleWidgetTD
import .EMcircuit
import .FDEMDipolarfields
import .FDEMPlanewave
import .FreqtoTime
import .HarmonicVMDCylWidget
import .InductionLoop
import .InductionSphereFEM
import .Loop
import .MT
import .PlanewaveWidgetFD
import .Reflection
import .sphereElectrostatic_example
import .TransientVMDCylWidget
import .View
import .VolumeWidget
import .VolumeWidgetPlane
__version__ = '0.0.8'
__author__ = 'GeoScixyz developers'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 GeoScixyz developers'
|
import Attenuation
import BiotSavart
import CondUtils
import DC_cylinder
import DCLayers
import DCsphere
import DCWidget
import DCWidgetPlate2_5D
import DCWidgetPlate_2D
import DCWidgetResLayer2_5D
import DCWidgetResLayer2D
import DipoleWidget1D
import DipoleWidgetFD
import DipoleWidgetTD
import EMcircuit
import FDEMDipolarfields
import FDEMPlanewave
import FreqtoTime
import HarmonicVMDCylWidget
import InductionLoop
import InductionSphereFEM
import Loop
import MT
import PlanewaveWidgetFD
import Reflection
import sphereElectrostatic_example
import TransientVMDCylWidget
import View
import VolumeWidget
import VolumeWidgetPlane
__version__ = '0.0.8'
__author__ = 'GeoScixyz developers'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 GeoScixyz developers'
|
mit
|
Python
|
c55e9136ee9c86dcd4088ba416043dbff7e65eac
|
Fix Fast.com autoupdate (#57552)
|
nkgilley/home-assistant,home-assistant/home-assistant,toddeye/home-assistant,mezz64/home-assistant,nkgilley/home-assistant,w1ll1am23/home-assistant,GenericStudent/home-assistant,w1ll1am23/home-assistant,lukas-hetzenecker/home-assistant,aronsky/home-assistant,jawilson/home-assistant,jawilson/home-assistant,toddeye/home-assistant,rohitranjan1991/home-assistant,rohitranjan1991/home-assistant,rohitranjan1991/home-assistant,GenericStudent/home-assistant,lukas-hetzenecker/home-assistant,aronsky/home-assistant,home-assistant/home-assistant,mezz64/home-assistant
|
homeassistant/components/fastdotcom/__init__.py
|
homeassistant/components/fastdotcom/__init__.py
|
"""Support for testing internet speed via Fast.com."""
from __future__ import annotations
from datetime import datetime, timedelta
import logging
from typing import Any
from fastdotcom import fast_com
import voluptuous as vol
from homeassistant.const import CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant, ServiceCall
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType
DOMAIN = "fastdotcom"
DATA_UPDATED = f"{DOMAIN}_data_updated"
_LOGGER = logging.getLogger(__name__)
CONF_MANUAL = "manual"
DEFAULT_INTERVAL = timedelta(hours=1)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Fast.com component."""
conf = config[DOMAIN]
data = hass.data[DOMAIN] = SpeedtestData(hass)
if not conf[CONF_MANUAL]:
async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL])
def update(service_call: ServiceCall | None = None) -> None:
"""Service call to manually update the data."""
data.update()
hass.services.async_register(DOMAIN, "speedtest", update)
hass.async_create_task(async_load_platform(hass, "sensor", DOMAIN, {}, config))
return True
class SpeedtestData:
"""Get the latest data from fast.com."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the data object."""
self.data: dict[str, Any] | None = None
self._hass = hass
def update(self, now: datetime | None = None) -> None:
"""Get the latest data from fast.com."""
_LOGGER.debug("Executing fast.com speedtest")
self.data = {"download": fast_com()}
dispatcher_send(self._hass, DATA_UPDATED)
|
"""Support for testing internet speed via Fast.com."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from fastdotcom import fast_com
import voluptuous as vol
from homeassistant.const import CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant, ServiceCall
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType
DOMAIN = "fastdotcom"
DATA_UPDATED = f"{DOMAIN}_data_updated"
_LOGGER = logging.getLogger(__name__)
CONF_MANUAL = "manual"
DEFAULT_INTERVAL = timedelta(hours=1)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Fast.com component."""
conf = config[DOMAIN]
data = hass.data[DOMAIN] = SpeedtestData(hass)
if not conf[CONF_MANUAL]:
async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL])
def update(service_call: ServiceCall | None = None) -> None:
"""Service call to manually update the data."""
data.update()
hass.services.async_register(DOMAIN, "speedtest", update)
hass.async_create_task(async_load_platform(hass, "sensor", DOMAIN, {}, config))
return True
class SpeedtestData:
"""Get the latest data from fast.com."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the data object."""
self.data: dict[str, Any] | None = None
self._hass = hass
def update(self) -> None:
"""Get the latest data from fast.com."""
_LOGGER.debug("Executing fast.com speedtest")
self.data = {"download": fast_com()}
dispatcher_send(self._hass, DATA_UPDATED)
|
apache-2.0
|
Python
|
717db7509b586e59c06d06ad60be3ca5671e1c35
|
add support for circleci
|
sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper,sdpython/pyquickhelper
|
src/pyquickhelper/pycode/ci_helper.py
|
src/pyquickhelper/pycode/ci_helper.py
|
"""
@file
@brief Helpers for CI
.. versionadded:: 1.3
"""
def is_travis_or_appveyor():
"""
tells if is a travis environment or appveyor
@return ``'travis'``, ``'appveyor'`` or ``None``
The function should rely more on environement variables
``CI``, ``TRAVIS``, ``APPVEYOR``.
.. versionadded:: 1.3
.. versionchanged:: 1.5
Takes into account *circleci*.
"""
import sys
if "travis" in sys.executable:
return "travis"
import os
if os.environ.get("USERNAME", os.environ.get("USER", None)) == "appveyor" or \
os.environ.get("APPVEYOR", "").lower() in ("true", "1"):
return "appveyor"
if os.environ.get('CIRCLECI', "undefined") != "undefined":
return "circleci"
return None
|
"""
@file
@brief Helpers for CI
.. versionadded:: 1.3
"""
def is_travis_or_appveyor():
"""
tells if is a travis environment or appveyor
@return ``'travis'``, ``'appveyor'`` or ``None``
The function should rely more on environement variables
``CI``, ``TRAVIS``, ``APPVEYOR``.
.. versionadded:: 1.3
"""
import sys
if "travis" in sys.executable:
return "travis"
import os
if os.environ.get("USERNAME", os.environ.get("USER", None)) == "appveyor" or \
os.environ.get("APPVEYOR", "").lower() in ("true", "1"):
return "appveyor"
return None
|
mit
|
Python
|
e6e68143e39dcc14833065b388f65879f2aa81f2
|
Update import export TestCase
|
jmakov/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core
|
src/tests/ggrc/converters/__init__.py
|
src/tests/ggrc/converters/__init__.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from flask import json
from os.path import abspath
from os.path import dirname
from os.path import join
from tests import ggrc
THIS_ABS_PATH = abspath(dirname(__file__))
class TestCase(ggrc.TestCase):
CSV_DIR = join(THIS_ABS_PATH, "test_csvs/")
def import_file(self, filename, dry_run=False):
data = {"file": (open(join(self.CSV_DIR, filename)), filename)}
headers = {
"X-test-only": "true" if dry_run else "false",
"X-requested-by": "gGRC",
}
response = self.client.post("/_service/import_csv",
data=data, headers=headers)
self.assert200(response)
return json.loads(response.data)
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from flask import json
from os.path import abspath
from os.path import dirname
from os.path import join
from tests import ggrc
THIS_ABS_PATH = abspath(dirname(__file__))
CSV_DIR = join(THIS_ABS_PATH, "test_csvs/")
class TestCase(ggrc.TestCase):
def import_file(self, filename, dry_run=False):
data = {"file": (open(join(CSV_DIR, filename)), filename)}
headers = {
"X-test-only": "true" if dry_run else "false",
"X-requested-by": "gGRC",
}
response = self.client.post("/_service/import_csv",
data=data, headers=headers)
self.assert200(response)
return json.loads(response.data)
|
apache-2.0
|
Python
|
671e877bc14eb2034bc4ff735c56c2d3aeb2e43d
|
Update a test
|
mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
examples/raw_parameter_script.py
|
examples/raw_parameter_script.py
|
""" The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
b = MyTestClass("test_basic")
b.browser = "chrome"
b.headless = False
b.servername = "localhost"
b.port = 4444
b.data = None
b.environment = "test"
b.user_agent = None
b.extension_zip = None
b.extension_dir = None
b.database_env = "test"
b.log_path = "latest_logs/"
b.archive_logs = False
b.disable_csp = False
b.enable_sync = False
b.visual_baseline = False
b.save_screenshot_after_test = False
b.timeout_multiplier = None
b.pytest_html_report = None
b.report_on = False
b.with_db_reporting = False
b.with_s3_logging = False
b.js_checking_on = False
b.is_pytest = False
b.demo_mode = False
b.demo_sleep = 1
b.message_duration = 2
b.user_data_dir = None
b.proxy_string = None
b.ad_block_on = False
b.highlights = None
b.check_js = False
b.cap_file = None
b.setUp()
try:
b.test_basic()
finally:
b.tearDown()
del b
|
""" The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
b = MyTestClass("test_basic")
b.browser = "chrome"
b.headless = False
b.servername = "localhost"
b.port = 4444
b.data = None
b.environment = "test"
b.user_agent = None
b.database_env = "test"
b.log_path = "latest_logs/"
b.archive_logs = False
b.disable_csp = False
b.visual_baseline = False
b.save_screenshot_after_test = False
b.timeout_multiplier = None
b.pytest_html_report = None
b.report_on = False
b.with_db_reporting = False
b.with_s3_logging = False
b.js_checking_on = False
b.is_pytest = False
b.demo_mode = False
b.demo_sleep = 1
b.message_duration = 2
b.proxy_string = None
b.ad_block_on = False
b.highlights = None
b.check_js = False
b.cap_file = None
b.setUp()
try:
b.test_basic()
finally:
b.tearDown()
del b
|
mit
|
Python
|
02a3fb6e1d7bde7b9f9d20089e8dd11040388e80
|
remove testing code
|
zacchiro/debsources,sophiejjj/debsources,sophiejjj/debsources,zacchiro/debsources,matthieucan/debsources,oorestisime/debsources,zacchiro/debsources,oorestisime/debsources,oorestisime/debsources,sophiejjj/debsources,devoxel/debsources,Debian/debsources,sophiejjj/debsources,vivekanand1101/debsources,oorestisime/debsources,vivekanand1101/debsources,matthieucan/debsources,devoxel/debsources,oorestisime/debsources,nonas/debian-qa,devoxel/debsources,clemux/debsources,clemux/debsources,vivekanand1101/debsources,vivekanand1101/debsources,clemux/debsources,devoxel/debsources,matthieucan/debsources,zacchiro/debsources,Debian/debsources,zacchiro/debsources,nonas/debian-qa,devoxel/debsources,nonas/debian-qa,Debian/debsources,matthieucan/debsources,clemux/debsources,matthieucan/debsources,vivekanand1101/debsources,sophiejjj/debsources,Debian/debsources,clemux/debsources,Debian/debsources,nonas/debian-qa
|
python/app/extract_stats.py
|
python/app/extract_stats.py
|
# Copyright (C) 2014 Matthieu Caneill <[email protected]>
#
# This file is part of Debsources.
#
# Debsources is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def extract_stats(filter_suites=None, filename="cache/stats.data"):
"""
Extracts information from the collected stats.
If filter_suites is None, all the information are extracted.
Otherwise suites must be an array of suites names (can contain "total").
e.g. extract_stats(filter_suites=["total", "debian_wheezy"])
"""
languages = set()
suites = set()
res = dict()
with open(filename) as f:
for line in f:
try:
(key, value) = line.split()
except:
continue
try:
value = int(value)
except:
pass
# we extract some information (suites, languages)
splits = key.split(".")
if splits[0][:7] == "debian_":
# we extract suites names
suites.add(splits[0])
if len(splits) == 3 and splits[1] == "sloccount":
# we extract language names
languages.add(splits[2])
# if this key/value is in the required suites, we add it
if filter_suites is None or splits[0] in filter_suites:
res[key] = value
# we use lists instead of sets, because they are JSON-serializable
return dict(results=res, suites=list(suites), languages=list(languages))
|
# Copyright (C) 2014 Matthieu Caneill <[email protected]>
#
# This file is part of Debsources.
#
# Debsources is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def extract_stats(filter_suites=None, filename="cache/stats.data"):
"""
Extracts information from the collected stats.
If filter_suites is None, all the information are extracted.
Otherwise suites must be an array of suites names (can contain "total").
e.g. extract_stats(filter_suites=["total", "debian_wheezy"])
"""
languages = set()
suites = set()
res = dict()
with open(filename) as f:
for line in f:
try:
(key, value) = line.split()
except:
continue
try:
value = int(value)
except:
pass
# we extract some information (suites, languages)
splits = key.split(".")
if splits[0][:7] == "debian_":
# we extract suites names
suites.add(splits[0])
if len(splits) == 3 and splits[1] == "sloccount":
# we extract language names
languages.add(splits[2])
# if this key/value is in the required suites, we add it
if filter_suites is None or splits[0] in filter_suites:
res[key] = value
# we use lists instead of sets, because they are JSON-serializable
return dict(results=res, suites=list(suites), languages=list(languages))
if __name__ == "__main__":
from pprint import pprint
pprint(extract_stats(filename="stats.data",
filter_suites=["debian_wheezy", "total"]))
|
agpl-3.0
|
Python
|
adfb7518b47c36396c14a513f547fd5055a29883
|
add bootstrap3
|
CooloiStudio/Django_MobileFoodOrderServer,CooloiStudio/Django_MobileFoodOrderServer,CooloiStudio/Django_MobileFoodOrderServer
|
MobileFoodOrderServer/settings.py
|
MobileFoodOrderServer/settings.py
|
"""
Django settings for MobileFoodOrderServer project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$6gqjkg1!57(@o6bs#tki8jt2@0p4z_ed@spnfrb@bh8lxqw$n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'order',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'MobileFoodOrderServer.urls'
WSGI_APPLICATION = 'MobileFoodOrderServer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
DEFAULT_CHARSET = 'utf-8'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR), 'templates']
|
"""
Django settings for MobileFoodOrderServer project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$6gqjkg1!57(@o6bs#tki8jt2@0p4z_ed@spnfrb@bh8lxqw$n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'order',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'MobileFoodOrderServer.urls'
WSGI_APPLICATION = 'MobileFoodOrderServer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
DEFAULT_CHARSET = 'utf-8'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR), 'templates']
|
mit
|
Python
|
d06fa1c8bfa5c782a5c28403caf44736620a3706
|
add get_instruction method modified: qaamus/test_angka_parser.py
|
ihfazhillah/qaamus-python
|
qaamus/test_angka_parser.py
|
qaamus/test_angka_parser.py
|
import unittest
from bs4 import BeautifulSoup
from ind_ara_parser import BaseParser
class AngkaParser(BaseParser):
"""Handle terjemah angka page."""
def get_instruction(self):
"""Return the instruction text.
text is returning 'Terjemah angka adalah menterjemahkan angka
kedalam bahasa arab, caranya cukup mudah ketik angka
(tanpa titik dan koma) yang akan di terjemahkan'."""
text = self.soup.select(".page-header > h1")[0].next_sibling.strip()
return text.split(",")[1].strip().capitalize()
class AngkaParserTestCase(unittest.TestCase):
with open("../html/angka123", "rb") as f:
f = f.read()
soup = BeautifulSoup(f)
def setUp(self):
self.angka_parser = AngkaParser(self.soup)
def test_get_angka(self):
result = self.angka_parser._get_query()
expected = '123'
self.assertEqual(result, expected)
def test_get_ara(self):
result = self.angka_parser._get_ara()
expected = 'المئة و الثالث و العشرون'
self.assertEqual(result, expected)
def test_get_footer(self):
result = self.angka_parser._get_footer()
expected = ''
self.assertEqual(result, expected)
def test_get_arti_master(self):
result = self.angka_parser.get_arti_master()
expected = {"ind": '123',
"ara": 'المئة و الثالث و العشرون',
"footer": ""}
self.assertEqual(result, expected)
def test_get_page_header(self):
result = self.angka_parser.get_instruction()
expected = ("Caranya cukup mudah ketik "
"angka (tanpa titik dan koma) yang akan di terjemahkan")
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
|
import unittest
from bs4 import BeautifulSoup
from ind_ara_parser import BaseParser
class AngkaParser(BaseParser):
pass
class AngkaParserTestCase(unittest.TestCase):
with open("../html/angka123", "rb") as f:
f = f.read()
soup = BeautifulSoup(f)
def setUp(self):
self.angka_parser = AngkaParser(self.soup)
def test_get_angka(self):
result = self.angka_parser._get_query()
expected = '123'
self.assertEqual(result, expected)
def test_get_ara(self):
result = self.angka_parser._get_ara()
expected = 'المئة و الثالث و العشرون'
self.assertEqual(result, expected)
def test_get_footer(self):
result = self.angka_parser._get_footer()
expected = ''
self.assertEqual(result, expected)
def test_get_arti_master(self):
result = self.angka_parser.get_arti_master()
expected = {"ind": '123',
"ara": 'المئة و الثالث و العشرون',
"footer": ""}
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
|
mit
|
Python
|
6509a1c1e9ee92841378d0b6f546ebf64991bbea
|
add xyz to exportable formats
|
rcpedersen/stl-to-voxel
|
stltovoxel.py
|
stltovoxel.py
|
import argparse
from PIL import Image
import numpy as np
import os.path
import slice
import stl_reader
import perimeter
from util import arrayToPixel
def doExport(inputFilePath, outputFilePath, resolution):
mesh = list(stl_reader.read_stl_verticies(inputFilePath))
(scale, shift, bounding_box) = slice.calculateScaleAndShift(mesh, resolution)
mesh = list(slice.scaleAndShiftMesh(mesh, scale, shift))
#Note: vol should be addressed with vol[z][x][y]
vol = np.zeros((bounding_box[2],bounding_box[0],bounding_box[1]), dtype=bool)
for height in range(bounding_box[2]):
print('Processing layer %d/%d'%(height+1,bounding_box[2]))
lines = slice.toIntersectingLines(mesh, height)
prepixel = np.zeros((bounding_box[0], bounding_box[1]), dtype=bool)
perimeter.linesToVoxels(lines, prepixel)
vol[height] = prepixel
outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)
if outputFileExtension == '.png':
exportPngs(vol, bounding_box, outputFilePath)
elif outputFileExtension == '.xyz':
exportXyz(vol, bounding_box, outputFilePath)
elif outputFileExtension == '.svx':
exportSvx(vol, bounding_box, outputFilePath)
def exportPngs(voxels, bounding_box, outputFilePath):
outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)
for height in range(bounding_box[2]):
img = Image.new('RGB', (bounding_box[0], bounding_box[1]), 'white') # create a new black image
pixels = img.load()
arrayToPixel(voxels[height], pixels)
path = outputFilePattern + '-' + str(height) + outputFileExtension
img.save(path)
def exportXyz(voxels, bounding_box, outputFilePath):
output = open(outputFilePath, 'w')
for z in bounding_box[2]:
for x in bounding_box[0]:
for y in bounding_box[1]:
if vol[z][x][y]:
output.write('%s %s %s\n'%(x,y,z))
output.close()
def exportSvx(voxels, bounding_box, outputFilePath):
pass
def file_choices(choices,fname):
filename, ext = os.path.splitext(fname)
if ext == '' or ext not in choices:
if len(choices) == 1:
parser.error('%s doesn\'t end with %s'%(fname,choices))
else:
parser.error('%s doesn\'t end with one of %s'%(fname,choices))
return fname
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert STL files to images/gifs')
parser.add_argument('input', nargs='?', type=lambda s:file_choices(('.stl'),s))
parser.add_argument('output', nargs='?', type=lambda s:file_choices(('.png', '.xyz', '.svx'),s))
args = parser.parse_args()
doExport(args.input, args.output, 256)
|
import argparse
from PIL import Image
import numpy as np
import os.path
import slice
import stl_reader
import perimeter
from util import arrayToPixel
def doExport(inputFilePath, outputFilePath, resolution):
mesh = list(stl_reader.read_stl_verticies(inputFilePath))
(scale, shift, bounding_box) = slice.calculateScaleAndShift(mesh, resolution)
mesh = list(slice.scaleAndShiftMesh(mesh, scale, shift))
outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)
for height in range(bounding_box[2]):
img = Image.new('RGB', (bounding_box[0], bounding_box[1]), "white") # create a new black image
pixels = img.load()
lines = slice.toIntersectingLines(mesh, height)
prepixel = np.zeros((bounding_box[0], bounding_box[1]), dtype=bool)
perimeter.linesToVoxels(lines, prepixel)
arrayToPixel(prepixel, pixels)
path = outputFilePattern + "-" + str(height) + outputFileExtension
print("%d/%d: Saving %s"%(height,bounding_box[2],path))
img.save(path)
def file_choices(choices,fname):
filename, ext = os.path.splitext(fname)
if ext == "" or ext not in choices:
if len(choices) == 1:
parser.error("file doesn't end with {}".format(choices))
else:
parser.error("file doesn't end with one of {}".format(choices))
return fname
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert STL files to images/gifs')
parser.add_argument('input', nargs='?', type=lambda s:file_choices((".stl"),s))
parser.add_argument('output', nargs='?', type=lambda s:file_choices((".png"),s))
args = parser.parse_args()
doExport(args.input, args.output, 256)
|
mit
|
Python
|
21f53bee1bfba8ef82b82898693c2cc09a7873c7
|
add get_weight() to Keras interface
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
syft/interfaces/keras/models/sequential.py
|
syft/interfaces/keras/models/sequential.py
|
import syft
import syft.nn as nn
import sys
from syft.interfaces.keras.layers import Log
class Sequential(object):
def __init__(self):
self.syft = nn.Sequential()
self.layers = list()
self.compiled = False
def add(self, layer):
if(len(self.layers) > 0):
# look to the previous layer to get the input shape for this layer
layer.input_shape = self.layers[-1].output_shape
# if layer doesn't know its output shape - it's probably dynamic
if not hasattr(layer, 'output_shape'):
layer.output_shape = layer.input_shape
layer.create_model()
self.layers.append(layer)
# sometimes keras has single layers that actually correspond
# to multiple syft layers - so they end up getting stored in
# an ordered list called "ordered_syft"
for l in layer.ordered_syft:
self.syft.add(l)
def summary(self):
self.syft.summary()
def compile(self,loss,optimizer,metrics,alpha=0.01):
if(not self.compiled):
self.compiled = True
if(loss == 'categorical_crossentropy'):
self.add(Log())
self.loss = nn.NLLLoss()
self.optimizer = optimizer
self.metrics = metrics
self.optimizer.init(syft_params=self.syft.parameters(),alpha=alpha)
else:
sys.stderr.write("Warning: Model already compiled... please rebuild from scratch if you need to change things")
def fit(self,x_train,y_train,batch_size,epochs,verbose,validation_data):
final_loss = self.syft.fit(input=x_train,
target=y_train,
batch_size=batch_size,
criterion=self.loss,
optim=self.optimizer.syft,
iters=epochs,
log_interval=1)
return final_loss
def predict(self,x):
return self.syft.forward(input=x)
def get_weights(self):
return self.syft.parameters()
|
import syft
import syft.nn as nn
import sys
from syft.interfaces.keras.layers import Log
class Sequential(object):
def __init__(self):
self.syft = nn.Sequential()
self.layers = list()
self.compiled = False
def add(self, layer):
if(len(self.layers) > 0):
# look to the previous layer to get the input shape for this layer
layer.input_shape = self.layers[-1].output_shape
# if layer doesn't know its output shape - it's probably dynamic
if not hasattr(layer, 'output_shape'):
layer.output_shape = layer.input_shape
layer.create_model()
self.layers.append(layer)
# sometimes keras has single layers that actually correspond
# to multiple syft layers - so they end up getting stored in
# an ordered list called "ordered_syft"
for l in layer.ordered_syft:
self.syft.add(l)
def summary(self):
self.syft.summary()
def compile(self,loss,optimizer,metrics,alpha=0.01):
if(not self.compiled):
self.compiled = True
if(loss == 'categorical_crossentropy'):
self.add(Log())
self.loss = nn.NLLLoss()
self.optimizer = optimizer
self.metrics = metrics
self.optimizer.init(syft_params=self.syft.parameters(),alpha=alpha)
else:
sys.stderr.write("Warning: Model already compiled... please rebuild from scratch if you need to change things")
def fit(self,x_train,y_train,batch_size,epochs,verbose,validation_data):
final_loss = self.syft.fit(input=x_train,
target=y_train,
batch_size=batch_size,
criterion=self.loss,
optim=self.optimizer.syft,
iters=epochs,
log_interval=1)
return final_loss
def predict(self,x):
return self.syft.forward(input=x)
|
apache-2.0
|
Python
|
cb0ba85a56c163436d6a4180413f0228407458d8
|
Correct path specification error
|
SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library
|
tests/workflows/test_component_wrappers.py
|
tests/workflows/test_component_wrappers.py
|
""" Unit tests for json helpers
"""
import os
import unittest
from data_models.parameters import arl_path
from workflows.wrappers.component_wrapper import component_wrapper
class TestComponentWrappers(unittest.TestCase):
def test_run_components(self):
files = ["test_results/test_pipeline.log",
"test_results/test_skymodel.hdf",
"test_results/test_empty_vislist.hdf",
"test_results/test_perfect_vislist.hdf",
"test_results/test_perfect_restored.fits",
"test_results/test_perfect_deconvolved.fits",
"test_results/test_perfect_residual.fits"
]
try:
for f in files:
os.remove(arl_path(f))
except FileNotFoundError:
pass
config_files = ["tests/workflows/test_create_vislist.json",
"tests/workflows/test_create_skymodel.json",
"tests/workflows/test_predict_vislist.json",
"tests/workflows/test_continuum_imaging.json"]
for config_file in config_files:
component_wrapper(arl_path(config_file))
for f in files:
assert os.path.isfile(arl_path(f)), "File %s does not exist" % arl_path(f)
if __name__ == '__main__':
unittest.main()
|
""" Unit tests for json helpers
"""
import os
import unittest
from data_models.parameters import arl_path
from workflows.wrappers.component_wrapper import component_wrapper
class TestComponentWrappers(unittest.TestCase):
def test_run_components(self):
files = ["test_results/test_pipeline.log",
"test_results/test_skymodel.hdf",
"test_results/test_empty_vislist.hdf",
"test_results/test_perfect_vislist.hdf",
"test_results/test_perfect_restored.fits",
"test_results/test_perfect_deconvolved.fits",
"test_results/test_perfect_residual.fits"
]
try:
for f in files:
os.remove(arl_path(f))
except FileNotFoundError:
pass
config_files = ["test_create_vislist.json",
"test_create_skymodel.json",
"test_predict_vislist.json",
"test_continuum_imaging.json"]
for config_file in config_files:
component_wrapper(config_file)
for f in files:
assert os.path.isfile(arl_path(f)), "File %s does not exist" % arl_path(f)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
Python
|
f535228e38f33263289f28d46e910ccb0a98a381
|
Use list comprehension to evaluate PYTZ_TIME_ZONE_CHOICES
|
goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic
|
tournamentcontrol/competition/constants.py
|
tournamentcontrol/competition/constants.py
|
import pytz
from dateutil.rrule import DAILY, WEEKLY
from django.utils.translation import ugettext_lazy as _
GENDER_CHOICES = (
('M', _('Male')),
('F', _('Female')),
('X', _('Unspecified')),
)
SEASON_MODE_CHOICES = (
(WEEKLY, _("Season")),
(DAILY, _("Tournament")),
)
WIN_LOSE = {
'W': _("Winner"),
'L': _("Loser"),
}
###################
# TIME ZONE NAMES #
###################
"""
Ideally this would be a better list for the specific uses of the site in
question. For example, it is perhaps much easier to list just the Australian
time zones for sites deployed for Australian customers.
This is also implemented in touchtechnology.common.forms and should probably
be moved and better leveraged in future release.
See https://bitbucket.org/touchtechnology/common/issue/16/
"""
PYTZ_TIME_ZONE_CHOICES = [('\x20Standard', (('UTC', 'UTC'), ('GMT', 'GMT')))]
for iso, name in pytz.country_names.items():
values = sorted(pytz.country_timezones.get(iso, []))
names = [s.rsplit("/", 1)[1].replace("_", " ") for s in values]
PYTZ_TIME_ZONE_CHOICES.append((name, [each for each in zip(values, names)]))
PYTZ_TIME_ZONE_CHOICES.sort()
|
import pytz
from dateutil.rrule import DAILY, WEEKLY
from django.utils.translation import ugettext_lazy as _
GENDER_CHOICES = (
('M', _('Male')),
('F', _('Female')),
('X', _('Unspecified')),
)
SEASON_MODE_CHOICES = (
(WEEKLY, _("Season")),
(DAILY, _("Tournament")),
)
WIN_LOSE = {
'W': _("Winner"),
'L': _("Loser"),
}
###################
# TIME ZONE NAMES #
###################
"""
Ideally this would be a better list for the specific uses of the site in
question. For example, it is perhaps much easier to list just the Australian
time zones for sites deployed for Australian customers.
This is also implemented in touchtechnology.common.forms and should probably
be moved and better leveraged in future release.
See https://bitbucket.org/touchtechnology/common/issue/16/
"""
PYTZ_TIME_ZONE_CHOICES = [('\x20Standard', (('UTC', 'UTC'), ('GMT', 'GMT')))]
for iso, name in pytz.country_names.items():
values = sorted(pytz.country_timezones.get(iso, []))
names = [s.rsplit("/", 1)[1].replace("_", " ") for s in values]
PYTZ_TIME_ZONE_CHOICES.append((name, zip(values, names)))
PYTZ_TIME_ZONE_CHOICES.sort()
|
bsd-3-clause
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.