commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
02f59b60062004fc23dbfbfc6201b326b08513a8
Add 404 exception
jiocloudservices/jcsclient
src/client/exceptions.py
src/client/exceptions.py
class HTTP4xx(Exception): pass class HTTP400(HTTP4xx): pass class HTTP404(HTTP4xx): pass class HTTP409(HTTP4xx): pass
class HTTP4xx(Exception): pass class HTTP400(HTTP4xx): pass class HTTP409(HTTP4xx): pass
apache-2.0
Python
8d06ccd7aeefe5945bab44b01764bd62685a2e17
Add missing member to API.
MoonShineVFX/core,mindbender-studio/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core,getavalon/core
mindbender/api.py
mindbender/api.py
"""Public API Anything that is not defined here is **internal** and unreliable for external use. Motivation for api.py: Storing the API in a module, as opposed to in __init__.py, enables use of it internally. For example, from `pipeline.py`: >> from . import api >> api.do_this() The important bit is avoiding circular dependencies, where api.py is calling upon a module which in turn calls upon api.py. """ import logging from . import schema from .pipeline import ( install, uninstall, ls, search, Loader, discover_loaders, register_root, register_data, register_host, register_format, register_silo, register_family, register_loaders_path, register_plugins, registered_host, registered_families, registered_loaders_paths, registered_formats, registered_data, registered_root, registered_silos, deregister_plugins, deregister_format, deregister_family, deregister_data, deregister_loaders_path, any_representation, fixture, ) from .lib import ( format_staging_dir, format_shared_dir, format_version, time, find_latest_version, parse_version, ) logging.basicConfig() __all__ = [ "install", "uninstall", "schema", "ls", "search", "Loader", "discover_loaders", "register_host", "register_data", "register_format", "register_silo", "register_family", "register_loaders_path", "register_plugins", "register_root", "registered_root", "registered_silos", "registered_loaders_paths", "registered_host", "registered_families", "registered_formats", "registered_data", "deregister_plugins", "deregister_format", "deregister_family", "deregister_data", "deregister_loaders_path", "format_staging_dir", "format_shared_dir", "format_version", "find_latest_version", "parse_version", "time", "any_representation", "fixture", ]
"""Public API Anything that is not defined here is **internal** and unreliable for external use. Motivation for api.py: Storing the API in a module, as opposed to in __init__.py, enables use of it internally. For example, from `pipeline.py`: >> from . import api >> api.do_this() The important bit is avoiding circular dependencies, where api.py is calling upon a module which in turn calls upon api.py. """ import logging from . import schema from .pipeline import ( install, uninstall, ls, search, Loader, discover_loaders, register_root, register_data, register_host, register_format, register_silo, register_family, register_loaders_path, register_plugins, registered_host, registered_families, registered_loaders_paths, registered_formats, registered_data, registered_root, registered_silos, deregister_plugins, deregister_format, deregister_family, deregister_data, deregister_loaders_path, any_representation, fixture, ) from .lib import ( format_staging_dir, format_shared_dir, format_version, time, find_latest_version, parse_version, ) logging.basicConfig() __all__ = [ "install", "uninstall", "schema", "ls", "search", "Loader", "discover_loaders", "register_host", "register_data", "register_format", "register_silo", "register_family", "register_loaders_path", "register_plugins", "register_root", "registered_root", "registered_silos", "registered_loaders_paths", "registered_host", "registered_families", "registered_formats", "registered_data", "deregister_plugins", "deregister_family", "deregister_data", "deregister_loaders_path", "format_staging_dir", "format_shared_dir", "format_version", "find_latest_version", "parse_version", "time", "any_representation", "fixture", ]
mit
Python
f68e8612f1e8198a4b300b67536d654e13809eb4
Allow SHA256 hashes in URLs
kkampardi/Plinth,harry-7/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,kkampardi/Plinth,vignanl/Plinth,vignanl/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,vignanl/Plinth,vignanl/Plinth,harry-7/Plinth
plinth/modules/monkeysphere/urls.py
plinth/modules/monkeysphere/urls.py
# # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ URLs for the monkeysphere module. """ from django.conf.urls import url from . import views urlpatterns = [ url(r'^sys/monkeysphere/$', views.index, name='index'), url(r'^sys/monkeysphere/(?P<ssh_fingerprint>[0-9A-Za-z:+/]+)/import/$', views.import_key, name='import'), url(r'^sys/monkeysphere/(?P<fingerprint>[0-9A-Fa-f]+)/details/$', views.details, name='details'), url(r'^sys/monkeysphere/(?P<fingerprint>[0-9A-Fa-f]+)/publish/$', views.publish, name='publish'), url(r'^sys/monkeysphere/cancel/$', views.cancel, name='cancel'), ]
# # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ URLs for the monkeysphere module. """ from django.conf.urls import url from . import views urlpatterns = [ url(r'^sys/monkeysphere/$', views.index, name='index'), url(r'^sys/monkeysphere/(?P<ssh_fingerprint>[0-9A-Fa-f:]+)/import/$', views.import_key, name='import'), url(r'^sys/monkeysphere/(?P<fingerprint>[0-9A-Fa-f]+)/details/$', views.details, name='details'), url(r'^sys/monkeysphere/(?P<fingerprint>[0-9A-Fa-f]+)/publish/$', views.publish, name='publish'), url(r'^sys/monkeysphere/cancel/$', views.cancel, name='cancel'), ]
agpl-3.0
Python
547c9e36255870bcee8a800a3fa95c3806a95c2c
Update links when it starts getting redirected
adityabansal/newsAroundMe,adityabansal/newsAroundMe,adityabansal/newsAroundMe
newsApp/linkManager.py
newsApp/linkManager.py
import os import time from constants import * from dbhelper import * from dbItemManagerV2 import DbItemManagerV2 from link import Link LINK_EXPIRY_TIME_IN_DAYS = 80 class LinkManager(DbItemManagerV2): """ Manage links stored on AWS dynamo db database. Contains functions for CRUD operations on the links stored Following environment variables need to be set - 'LINKTAGSTABLE_CONNECTIONSTRING' : connection string of link tags table. """ def __init__(self): """ Instantiates the linkManager. """ DbItemManagerV2.__init__(self, os.environ['LINKTAGSTABLE_CONNECTIONSTRING']) def get(self, linkId): """ Put a new link. """ dbItem = DbItemManagerV2.get(self, linkId); link = Link(linkId, dbItem.tags) #handle the case when link starts gettting redirected to new url if link.id != linkId: self.delete(linkId) self.put(link) return link def getStaleLinks(self): """ Returns a list of linkIds of stale links. """ linkExpiryCutoff = int(time.time()) - LINK_EXPIRY_TIME_IN_DAYS*24*60*60; scanResults = DbItemManagerV2.scan(self, pubtime__lte = linkExpiryCutoff) return (result.id for result in scanResults) def getUnprocessedLinks(self): return DbItemManagerV2.query_2( self, isProcessed__eq = 'false', index = 'isProcessed-itemId-index')
import os import time from constants import * from dbhelper import * from dbItemManagerV2 import DbItemManagerV2 from link import Link LINK_EXPIRY_TIME_IN_DAYS = 80 class LinkManager(DbItemManagerV2): """ Manage links stored on AWS dynamo db database. Contains functions for CRUD operations on the links stored Following environment variables need to be set - 'LINKTAGSTABLE_CONNECTIONSTRING' : connection string of link tags table. """ def __init__(self): """ Instantiates the linkManager. """ DbItemManagerV2.__init__(self, os.environ['LINKTAGSTABLE_CONNECTIONSTRING']) def get(self, linkId): """ Put a new link. """ dbItem = DbItemManagerV2.get(self, linkId); return Link(linkId, dbItem.tags) def getStaleLinks(self): """ Returns a list of linkIds of stale links. """ linkExpiryCutoff = int(time.time()) - LINK_EXPIRY_TIME_IN_DAYS*24*60*60; scanResults = DbItemManagerV2.scan(self, pubtime__lte = linkExpiryCutoff) return (result.id for result in scanResults) def getUnprocessedLinks(self): return DbItemManagerV2.query_2( self, isProcessed__eq = 'false', index = 'isProcessed-itemId-index')
mit
Python
366ecdd77520004c307cbbf127bb374ab546ce7e
Use windows API to change the AppID and use our icon.
BBN-Q/Quince
run-quince.py
run-quince.py
#!/usr/bin/env python3 # coding: utf-8 # Raytheon BBN Technologies 2016 # Contributiors: Graham Rowlands # # This file runs the main loop # Use PyQt5 by default import os os.environ["QT_API"] = 'pyqt5' from qtpy.QtWidgets import QApplication import sys import argparse import ctypes from quince.view import * if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('filename', type=str, help='Measurement library filename') args = parser.parse_args() app = QApplication([]) # Setup icon png_path = os.path.join(os.path.dirname(__file__), "assets/quince_icon.png") app.setWindowIcon(QIcon(png_path)) # Convince windows that this is a separate application to get the task bar icon working # https://stackoverflow.com/questions/1551605/how-to-set-applications-taskbar-icon-in-windows-7/1552105#1552105 if (os.name == 'nt'): myappid = u'BBN.quince.gui.0001' # arbitrary string ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) window = NodeWindow() window.load_yaml(args.filename) app.aboutToQuit.connect(window.cleanup) window.show() sys.exit(app.exec_())
#!/usr/bin/env python3 # coding: utf-8 # Raytheon BBN Technologies 2016 # Contributiors: Graham Rowlands # # This file runs the main loop # Use PyQt5 by default import os os.environ["QT_API"] = 'pyqt5' from qtpy.QtWidgets import QApplication import sys import argparse from quince.view import * if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('filename', type=str, help='Measurement library filename') args = parser.parse_args() app = QApplication([]) # Setup icon png_path = os.path.join(os.path.dirname(__file__), "assets/quince_icon.png") app.setWindowIcon(QIcon(png_path)) window = NodeWindow() window.load_yaml(args.filename) app.aboutToQuit.connect(window.cleanup) window.show() sys.exit(app.exec_())
apache-2.0
Python
9e6b596aa856e1d50a9c2c2882289cf1a5d8c0c0
Fix up plotting script
petebachant/waveFlapper-OpenFOAM,petebachant/waveFlapper-OpenFOAM,petebachant/waveFlapper-OpenFOAM
plot.py
plot.py
#!/usr/bin/env python """Processing routines for the waveFlapper case.""" import foampy import numpy as np import matplotlib.pyplot as plt width_2d = 0.1 width_3d = 3.66 m_paddle = 1270.0 # Paddle mass in kg, from OMB manual h_piston = 3.3147 I_paddle = 1/3*m_paddle*h_piston**2 def plot_force(): """Plots the streamwise force on the paddle over time.""" pass def plot_moment(): data = foampy.load_forces() i = 10 t = data["time"][i:] m = data.mz m = m[i:] * width_3d / width_2d period = 2.2 omega = 2 * np.pi / period theta = 0.048 * np.sin(omega * t) theta_doubledot = -0.048 * omega**2 * np.sin(omega * t) m_inertial = I_paddle * theta_doubledot m = m + m_inertial plt.figure() plt.plot(t, m) plt.xlabel("Time (s)") plt.ylabel("Flapper moment (Nm)") print( "Max moment from CFD (including inertia) = {:0.1f} Nm".format(m.max()) ) print("Theoretical max moment (including inertia) =", 5500*3.3, "Nm") plt.show() if __name__ == "__main__": plot_moment()
#!/usr/bin/env python """Processing routines for the waveFlapper case.""" import foampy import numpy as np import matplotlib.pyplot as plt width_2d = 0.1 width_3d = 3.66 m_paddle = 1270.0 # Paddle mass in kg, from OMB manual h_piston = 3.3147 I_paddle = 1/3*m_paddle*h_piston**2 def plot_force(): """Plots the streamwise force on the paddle over time.""" def plot_moment(): data = foampy.load_forces_moments() i = 10 t = data["time"][i:] m = data["moment"]["pressure"]["z"] + data["moment"]["viscous"]["z"] m = m[i:]*width_3d/width_2d period = 2.2 omega = 2*np.pi/period theta = 0.048*np.sin(omega*t) theta_doubledot = -0.048*omega**2*np.sin(omega*t) m_inertial = I_paddle*theta_doubledot m = m + m_inertial plt.figure() plt.plot(t, m) plt.xlabel("t (s)") plt.ylabel("Flapper moment (Nm)") print("Max moment from CFD (including inertia) = {:0.1f}".format(m.max()), "Nm") print("Theoretical max moment (including inertia) =", 5500*3.3, "Nm") plt.show() if __name__ == "__main__": plot_moment()
cc0-1.0
Python
5a6cdb9dc08924dc90a24271dc45f4412250b06a
bump version
hsharrison/experimentator
src/experimentator/__version__.py
src/experimentator/__version__.py
__version__ = '0.2.1'
__version__ = '0.2.0'
mit
Python
52dd018d08e00356218cb2789cee10976eff4359
Disable automatic geocoding for addresses in Django admin
FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares
firecares/firecares_core/admin.py
firecares/firecares_core/admin.py
import autocomplete_light from .models import Address, ContactRequest, AccountRequest, RegistrationWhitelist from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin as BaseUserAdmin from django.contrib.gis import admin from import_export.admin import ExportMixin from firecares.firecares_core.models import UserProfile, PredeterminedUser, DepartmentAssociationRequest User = get_user_model() class LocalOpenLayersAdmin(admin.OSMGeoAdmin): openlayers_url = settings.STATIC_URL + 'openlayers/OpenLayers.js' class AddressAdmin(LocalOpenLayersAdmin): list_display = ['__unicode__'] list_filter = ['state_province'] search_fields = ['address_line1', 'state_province', 'city'] class ContactRequestAdmin(ExportMixin, admin.ModelAdmin): list_display = ['name', 'email', 'created_at'] search_fields = ['name', 'email'] class AccountRequestAdmin(ExportMixin, admin.ModelAdmin): list_display = ['email', 'created_at'] search_fields = ['email'] form = autocomplete_light.modelform_factory(AccountRequest, fields='__all__') class ProfileInline(admin.StackedInline): model = UserProfile can_delete = False form = autocomplete_light.modelform_factory(UserProfile, fields='__all__') class UserAdmin(ExportMixin, BaseUserAdmin): list_display = ['username', 'email', 'first_name', 'last_name', 'is_staff', 'date_joined'] inlines = [ProfileInline] class DepartmentAssociationRequestAdmin(ExportMixin, admin.ModelAdmin): model = DepartmentAssociationRequest form = autocomplete_light.modelform_factory(DepartmentAssociationRequest, fields='__all__') search_fields = ['user__username', 'user__email', 'approved_by__username', 'denied_by__username'] list_filter = ['approved_by', 'denied_by', 'approved_at', 'denied_at'] class RegistrationWhitelistAdmin(ExportMixin, admin.ModelAdmin): model = RegistrationWhitelist form = autocomplete_light.modelform_factory(RegistrationWhitelist, fields='__all__') search_fields = ['email_or_domain', 'department__name', 'created_by__username'] list_filter = ['created_by', 'created_at', 'department__state'] class PredeterminedUserAdmin(ExportMixin, admin.ModelAdmin): model = PredeterminedUser form = autocomplete_light.modelform_factory(PredeterminedUser, fields='__all__') search_fields = ['email', 'department__name'] admin.site.register(Address, AddressAdmin) admin.site.register(ContactRequest, ContactRequestAdmin) admin.site.register(AccountRequest, AccountRequestAdmin) admin.site.register(RegistrationWhitelist, RegistrationWhitelistAdmin) admin.site.register(PredeterminedUser, PredeterminedUserAdmin) admin.site.register(DepartmentAssociationRequest, DepartmentAssociationRequestAdmin) admin.site.unregister(User) admin.site.register(User, UserAdmin)
import autocomplete_light from .models import Address, ContactRequest, AccountRequest, RegistrationWhitelist from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin as BaseUserAdmin from django.contrib.gis import admin from import_export.admin import ExportMixin from firecares.firecares_core.models import UserProfile, PredeterminedUser, DepartmentAssociationRequest User = get_user_model() class LocalOpenLayersAdmin(admin.OSMGeoAdmin): openlayers_url = settings.STATIC_URL + 'openlayers/OpenLayers.js' class AddressAdmin(LocalOpenLayersAdmin): list_display = ['__unicode__'] list_filter = ['state_province'] search_fields = ['address_line1', 'state_province', 'city'] def save_model(self, request, obj, form, change): if change: obj.geocode() super(AddressAdmin, self).save_model(request, obj, form, change) class ContactRequestAdmin(ExportMixin, admin.ModelAdmin): list_display = ['name', 'email', 'created_at'] search_fields = ['name', 'email'] class AccountRequestAdmin(ExportMixin, admin.ModelAdmin): list_display = ['email', 'created_at'] search_fields = ['email'] form = autocomplete_light.modelform_factory(AccountRequest, fields='__all__') class ProfileInline(admin.StackedInline): model = UserProfile can_delete = False form = autocomplete_light.modelform_factory(UserProfile, fields='__all__') class UserAdmin(ExportMixin, BaseUserAdmin): list_display = ['username', 'email', 'first_name', 'last_name', 'is_staff', 'date_joined'] inlines = [ProfileInline] class DepartmentAssociationRequestAdmin(ExportMixin, admin.ModelAdmin): model = DepartmentAssociationRequest form = autocomplete_light.modelform_factory(DepartmentAssociationRequest, fields='__all__') search_fields = ['user__username', 'user__email', 'approved_by__username', 'denied_by__username'] list_filter = ['approved_by', 'denied_by', 'approved_at', 'denied_at'] class RegistrationWhitelistAdmin(ExportMixin, admin.ModelAdmin): model = RegistrationWhitelist form = autocomplete_light.modelform_factory(RegistrationWhitelist, fields='__all__') search_fields = ['email_or_domain', 'department__name', 'created_by__username'] list_filter = ['created_by', 'created_at', 'department__state'] class PredeterminedUserAdmin(ExportMixin, admin.ModelAdmin): model = PredeterminedUser form = autocomplete_light.modelform_factory(PredeterminedUser, fields='__all__') search_fields = ['email', 'department__name'] admin.site.register(Address, AddressAdmin) admin.site.register(ContactRequest, ContactRequestAdmin) admin.site.register(AccountRequest, AccountRequestAdmin) admin.site.register(RegistrationWhitelist, RegistrationWhitelistAdmin) admin.site.register(PredeterminedUser, PredeterminedUserAdmin) admin.site.register(DepartmentAssociationRequest, DepartmentAssociationRequestAdmin) admin.site.unregister(User) admin.site.register(User, UserAdmin)
mit
Python
7b3f239964c6663a9b655553202567fccead85c8
Add 'me' to profile IdentifierError
mollie/mollie-api-python
mollie/api/resources/profiles.py
mollie/api/resources/profiles.py
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}' " "or it should be 'me'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
from ..error import IdentifierError from ..objects.profile import Profile from .base import Base class Profiles(Base): RESOURCE_ID_PREFIX = 'pfl_' def get_resource_object(self, result): return Profile(result, self.client) def get(self, profile_id, **params): if not profile_id or \ (not profile_id.startswith(self.RESOURCE_ID_PREFIX) and not profile_id == 'me'): raise IdentifierError( "Invalid profile ID: '{id}'. A profile ID should start with '{prefix}'.".format( id=profile_id, prefix=self.RESOURCE_ID_PREFIX) ) return super(Profiles, self).get(profile_id, **params)
bsd-2-clause
Python
5efdd29804249b40c9b9e589cb00cf10c56decb0
Add the standard imports
crateio/carrier
conveyor/tasks/bulk.py
conveyor/tasks/bulk.py
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import datetime import logging import time from requests.exceptions import ConnectionError, HTTPError from ..core import Conveyor logger = logging.getLogger(__name__) # We ignore the last component as we cannot properly handle it def get_jobs(last=0): current = time.mktime(datetime.datetime.utcnow().timetuple()) logger.info("Current time is '%s'", current) app = Conveyor() for package in set(app.processor.pypi.list_packages()): yield package def handle_job(name): try: tried = 0 delay = 1 while True: try: tried += 1 app = Conveyor() app.processor.update(name) break except (ConnectionError, HTTPError): # Attempt to process again if we have a connection error if tried >= 10: # Try a max of 10 times raise else: # Wait a moment time.sleep(delay) delay = delay * 2 except Exception as e: logger.exception(str(e)) raise
import datetime import logging import time from requests.exceptions import ConnectionError, HTTPError from ..core import Conveyor logger = logging.getLogger(__name__) # We ignore the last component as we cannot properly handle it def get_jobs(last=0): current = time.mktime(datetime.datetime.utcnow().timetuple()) logger.info("Current time is '%s'", current) app = Conveyor() for package in set(app.processor.pypi.list_packages()): yield package def handle_job(name): try: tried = 0 delay = 1 while True: try: tried += 1 app = Conveyor() app.processor.update(name) break except (ConnectionError, HTTPError): # Attempt to process again if we have a connection error if tried >= 10: # Try a max of 10 times raise else: # Wait a moment time.sleep(delay) delay = delay * 2 except Exception as e: logger.exception(str(e)) raise
bsd-2-clause
Python
00203b7fbf8ed8f8728ce18838acb21eb6224723
Disable unused code
flumotion-mirror/flumotion,flumotion-mirror/flumotion,Flumotion/flumotion,Flumotion/flumotion,timvideos/flumotion,timvideos/flumotion,Flumotion/flumotion,timvideos/flumotion,Flumotion/flumotion
flumotion/test/test_common_vfs.py
flumotion/test/test_common_vfs.py
# -*- Mode: Python; test-case-name: flumotion.test.test_common_planet -*- # vi:si:et:sw=4:sts=4:ts=4 # # Flumotion - a streaming media server # Copyright (C) 2008 Fluendo, S.L. (www.fluendo.com). # All rights reserved. # This file may be distributed and/or modified under the terms of # the GNU General Public License version 2 as published by # the Free Software Foundation. # This file is distributed without any warranty; without even the implied # warranty of merchantability or fitness for a particular purpose. # See "LICENSE.GPL" in the source distribution for more information. # Licensees having purchased or holding a valid Flumotion Advanced # Streaming Server license may use this file in accordance with the # Flumotion Advanced Streaming Server Commercial License Agreement. # See "LICENSE.Flumotion" in the source distribution for more information. # Headers in this file shall remain intact. import errno import os from flumotion.common.interfaces import IDirectory from flumotion.common.testsuite import TestCase from flumotion.common.vfs import listDirectory class VFSTest(TestCase): def setUp(self): self.path = os.path.dirname(__file__) def testListDirectory(self): try: d = listDirectory(self.path) except AssertionError: # missing backends return def done(directory): self.failUnless(IDirectory.providedBy(directory)) self.assertEqual(directory.filename, os.path.basename(self.path)) self.assertEqual(directory.getPath(), self.path) self.failUnless(directory.iconNames) d.addCallback(done) return d
# -*- Mode: Python; test-case-name: flumotion.test.test_common_planet -*- # vi:si:et:sw=4:sts=4:ts=4 # # Flumotion - a streaming media server # Copyright (C) 2008 Fluendo, S.L. (www.fluendo.com). # All rights reserved. # This file may be distributed and/or modified under the terms of # the GNU General Public License version 2 as published by # the Free Software Foundation. # This file is distributed without any warranty; without even the implied # warranty of merchantability or fitness for a particular purpose. # See "LICENSE.GPL" in the source distribution for more information. # Licensees having purchased or holding a valid Flumotion Advanced # Streaming Server license may use this file in accordance with the # Flumotion Advanced Streaming Server Commercial License Agreement. # See "LICENSE.Flumotion" in the source distribution for more information. # Headers in this file shall remain intact. import errno import os from flumotion.common.interfaces import IDirectory from flumotion.common.testsuite import TestCase from flumotion.common.vfs import listDirectory class VFSTest(TestCase): def setUp(self): self.path = os.path.dirname(__file__) try: os.mkdir(os.path.join(self.path, 'access-denied'), 000) except OSError, e: if e.errno != errno.EEXIST: raise def tearDown(self): os.rmdir(os.path.join(self.path, 'access-denied')) def testListDirectory(self): try: d = listDirectory(self.path) except AssertionError: # missing backends return def done(directory): self.failUnless(IDirectory.providedBy(directory)) self.assertEqual(directory.filename, os.path.basename(self.path)) self.assertEqual(directory.getPath(), self.path) self.failUnless(directory.iconNames) d.addCallback(done) return d
lgpl-2.1
Python
8a010b6601ecf2eed216b3aa0b604a0985d06544
Update chainer/training/extensions/__init__.py
wkentaro/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer,hvy/chainer,hvy/chainer,chainer/chainer,hvy/chainer,tkerola/chainer,niboshi/chainer,chainer/chainer,chainer/chainer,wkentaro/chainer,okuta/chainer,niboshi/chainer,wkentaro/chainer,keisuke-umezawa/chainer,pfnet/chainer,okuta/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer
chainer/training/extensions/__init__.py
chainer/training/extensions/__init__.py
# import classes and functions from chainer.training.extensions._snapshot import snapshot # NOQA from chainer.training.extensions._snapshot import snapshot_object # NOQA from chainer.training.extensions.computational_graph import DumpGraph # NOQA from chainer.training.extensions.evaluator import Evaluator # NOQA from chainer.training.extensions.exponential_shift import ExponentialShift # NOQA from chainer.training.extensions.fail_on_nonnumber import FailOnNonNumber # NOQA from chainer.training.extensions.inverse_shift import InverseShift # NOQA from chainer.training.extensions.linear_shift import LinearShift # NOQA from chainer.training.extensions.log_report import LogReport # NOQA from chainer.training.extensions.micro_average import MicroAverage # NOQA from chainer.training.extensions.multistep_shift import MultistepShift # NOQA from chainer.training.extensions.parameter_statistics import ParameterStatistics # NOQA from chainer.training.extensions.plot_report import PlotReport # NOQA from chainer.training.extensions.polynomial_shift import PolynomialShift # NOQA from chainer.training.extensions.print_report import PrintReport # NOQA from chainer.training.extensions.progress_bar import ProgressBar # NOQA from chainer.training.extensions.step_shift import StepShift # NOQA from chainer.training.extensions.value_observation import observe_lr # NOQA from chainer.training.extensions.value_observation import observe_value # NOQA from chainer.training.extensions.variable_statistics_plot import VariableStatisticsPlot # NOQA from chainer.training.extensions.warmup_shift import WarmupShift # NOQA # Alias from chainer.training.extensions.computational_graph import DumpGraph as dump_graph # NOQA
# import classes and functions from chainer.training.extensions._snapshot import snapshot # NOQA from chainer.training.extensions._snapshot import snapshot_object # NOQA from chainer.training.extensions.computational_graph import DumpGraph # NOQA from chainer.training.extensions.evaluator import Evaluator # NOQA from chainer.training.extensions.exponential_shift import ExponentialShift # NOQA from chainer.training.extensions.fail_on_nonnumber import FailOnNonNumber # NOQA from chainer.training.extensions.inverse_shift import InverseShift # NOQA from chainer.training.extensions.linear_shift import LinearShift # NOQA from chainer.training.extensions.log_report import LogReport # NOQA from chainer.training.extensions.micro_average import MicroAverage # NOQA from chainer.training.extensions.multistep_shift import MultistepShift # NOQA from chainer.training.extensions.parameter_statistics import ParameterStatistics # NOQA from chainer.training.extensions.plot_report import PlotReport # NOQA from chainer.training.extensions.polynomial_shift import PolynomialShift # NOQA from chainer.training.extensions.print_report import PrintReport # NOQA from chainer.training.extensions.progress_bar import ProgressBar # NOQA from chainer.training.extensions.step_shift import StepShift # NOQA from chainer.training.extensions.value_observation import observe_lr # NOQA from chainer.training.extensions.value_observation import observe_value # NOQA from chainer.training.extensions.variable_statistics_plot import VariableStatisticsPlot # NOQA from chainer.training.extensions.warmup_shift import WarmupShift # NOQA # Aliase from chainer.training.extensions.computational_graph import DumpGraph as dump_graph # NOQA
mit
Python
105dc001e5e0f2e1e02409cf77e5b31f0df30ffe
put on two lines
analyst-collective/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,analyst-collective/dbt
core/dbt/task/clean.py
core/dbt/task/clean.py
import os.path import os import shutil from dbt.task.base import ProjectOnlyTask from dbt.logger import GLOBAL_LOGGER as logger class CleanTask(ProjectOnlyTask): def __is_project_path(self, path): proj_path = os.path.abspath('.') return not os.path.commonprefix( [proj_path, os.path.abspath(path)] ) == proj_path def __is_protected_path(self, path): """ This function identifies protected paths, so as not to clean them. """ abs_path = os.path.abspath(path) protected_paths = self.config.source_paths + \ self.config.test_paths + ['.'] protected_abs_paths = [os.path.abspath for p in protected_paths] return abs_path in set(protected_abs_paths) or \ self.__is_project_path(abs_path) def run(self): """ This function takes all the paths in the target file and cleans the project paths that are not protected. """ for path in self.config.clean_targets: logger.info("Checking {}/*".format(path)) if not self.__is_protected_path(path): shutil.rmtree(path, True) logger.info(" Cleaned {}/*".format(path)) else: logger.info("ERROR: not cleaning {}/* because it is " "protected".format(path)) logger.info("Finished cleaning all paths.")
import os.path import os import shutil from dbt.task.base import ProjectOnlyTask from dbt.logger import GLOBAL_LOGGER as logger class CleanTask(ProjectOnlyTask): def __is_project_path(self, path): proj_path = os.path.abspath('.') return not os.path.commonprefix( [proj_path, os.path.abspath(path)] ) == proj_path def __is_protected_path(self, path): """ This function identifies protected paths, so as not to clean them. """ abs_path = os.path.abspath(path) protected_paths = self.config.source_paths + \ self.config.test_paths + ['.'] protected_abs_paths = [os.path.abspath for p in protected_paths] return abs_path in set(protected_abs_paths) or \ self.__is_project_path(abs_path) def run(self): """ This function takes all the paths in the target file and cleans the project paths that are not protected. """ for path in self.config.clean_targets: logger.info("Checking {}/*".format(path)) if not self.__is_protected_path(path): shutil.rmtree(path, True) logger.info(" Cleaned {}/*".format(path)) else: logger.info("ERROR: not cleaning {}/* because it is protected".format(path)) logger.info("Finished cleaning all paths.")
apache-2.0
Python
5860d28e0f8f08f1bf4ca2426c08a83b687f33f8
Fix Python3 issue (#173)
code-disaster/fips,floooh/fips,code-disaster/fips,floooh/fips,floooh/fips
mod/tools/node.py
mod/tools/node.py
"""wrapper for node.js, only check_exists""" import subprocess name = 'node' platforms = ['linux'] optional = True not_found = 'node.js required for emscripten cross-compiling' #------------------------------------------------------------------------------ def check_exists(fips_dir) : try : out = subprocess.check_output(['node', '--version']) if not out.startswith(b'v') : log.warn("this doesn't look like a proper node.js 'node'") return False return True except (OSError, subprocess.CalledProcessError) : return False
"""wrapper for node.js, only check_exists""" import subprocess name = 'node' platforms = ['linux'] optional = True not_found = 'node.js required for emscripten cross-compiling' #------------------------------------------------------------------------------ def check_exists(fips_dir) : try : out = subprocess.check_output(['node', '--version']) if not out.startswith('v') : log.warn("this doesn't look like a proper node.js 'node'") return False return True except (OSError, subprocess.CalledProcessError) : return False
mit
Python
1794fb8865241e22a5af30020111471ea00a6250
check if you the plugins really need to be reloaded
inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree
InvenTree/plugin/admin.py
InvenTree/plugin/admin.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import admin from django.apps import apps import plugin.models as models def plugin_update(queryset, new_status: bool): """general function for bulk changing plugins""" apps_changed = False # run through all plugins in the queryset as the save method needs to be overridden for model in queryset: if model.active is not new_status: model.active = new_status apps_changed = True model.save(no_reload=True) # reload plugins if they changed if apps_changed: app = apps.get_app_config('plugin') app.reload_plugins() @admin.action(description='Activate plugin(s)') def plugin_activate(modeladmin, request, queryset): """activate a set of plugins""" plugin_update(queryset, True) @admin.action(description='Deactivate plugin(s)') def plugin_deactivate(modeladmin, request, queryset): """deactivate a set of plugins""" plugin_update(queryset, False) class PluginConfigAdmin(admin.ModelAdmin): """Custom admin with restricted id fields""" readonly_fields = ["key", "name", ] list_display = ['key', 'name', 'active', ] actions = [plugin_activate, plugin_deactivate, ] admin.site.register(models.PluginConfig, PluginConfigAdmin)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import admin from django.apps import apps import plugin.models as models def plugin_update(queryset, new_status: bool): """general function for bulk changing plugins""" for model in queryset: model.active = new_status model.save(no_reload=True) app = apps.get_app_config('plugin') app.reload_plugins() @admin.action(description='Activate plugin(s)') def plugin_activate(modeladmin, request, queryset): """activate a set of plugins""" plugin_update(queryset, True) @admin.action(description='Deactivate plugin(s)') def plugin_deactivate(modeladmin, request, queryset): """deactivate a set of plugins""" plugin_update(queryset, False) class PluginConfigAdmin(admin.ModelAdmin): """Custom admin with restricted id fields""" readonly_fields = ["key", "name", ] list_display = ['key', 'name', 'active', ] actions = [plugin_activate, plugin_deactivate, ] admin.site.register(models.PluginConfig, PluginConfigAdmin)
mit
Python
3747f72e81a3c143145dcbbdcfbfc13b292f19e1
add filter plot test
srcole/neurodsp,voytekresearch/neurodsp,srcole/neurodsp
neurodsp/tests/test_plts_filt.py
neurodsp/tests/test_plts_filt.py
""" test_plts_filt.py Test filtering plots """ import numpy as np from neurodsp.filt import filter_signal from neurodsp.plts.filt import plot_frequency_response def test_plot_frequency_response(): """ Confirm frequency response plotting function works """ # Test plotting through the filter function sig = np.random.randn(2000) fs = 1000 sig_filt, kernel = filter_signal(sig, fs, 'bandpass', (8, 12), plot_freq_response=True, return_kernel=True, verbose=False) # Test calling frequency response plot directly plot_frequency_response(fs, kernel) assert True
""" test_burst.py Test burst detection functions """ import os import numpy as np import neurodsp from .util import _load_example_data def test_detect_bursts_dual_threshold(): """ Confirm consistency in burst detection results on a generated neural signal """ # Load data and ground-truth filtered signal sig = _load_example_data(data_idx=1) fs = 1000 f_range = (13, 30) # Load past burst findings bursting_true = np.load(os.path.dirname(neurodsp.__file__) + '/tests/data/sample_data_1_burst_deviation.npy') # Detect bursts with different algorithms bursting = neurodsp.detect_bursts_dual_threshold(sig, fs, f_range, (0.9, 2)) assert np.isclose(np.sum(bursting - bursting_true), 0)
apache-2.0
Python
d57c3ad63b737fda4632f5896c8049329bcd4fe2
Make this test work under Windows as well.
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
Lib/test/test_fpformat.py
Lib/test/test_fpformat.py
''' Tests for fpformat module Nick Mathewson ''' from test_support import run_unittest import unittest from fpformat import fix, sci, NotANumber StringType = type('') # Test the old and obsolescent fpformat module. # # (It's obsolescent because fix(n,d) == "%.*f"%(d,n) and # sci(n,d) == "%.*e"%(d,n) # for all reasonable numeric n and d, except that sci gives 3 exponent # digits instead of 2. # # Differences only occur for unreasonable n and d. <.2 wink>) class FpformatTest(unittest.TestCase): def checkFix(self, n, digits): result = fix(n, digits) if isinstance(n, StringType): n = repr(n) expected = "%.*f" % (digits, float(n)) self.assertEquals(result, expected) def checkSci(self, n, digits): result = sci(n, digits) if isinstance(n, StringType): n = repr(n) expected = "%.*e" % (digits, float(n)) # add the extra 0 if needed num, exp = expected.split("e") if len(exp) < 4: exp = exp[0] + "0" + exp[1:] expected = "%se%s" % (num, exp) self.assertEquals(result, expected) def test_basic_cases(self): self.assertEquals(fix(100.0/3, 3), '33.333') self.assertEquals(sci(100.0/3, 3), '3.333e+001') def test_reasonable_values(self): for d in range(7): for val in (1000.0/3, 1000, 1000.0, .002, 1.0/3, 1e10): for realVal in (val, 1.0/val, -val, -1.0/val): self.checkFix(realVal, d) self.checkSci(realVal, d) def test_failing_values(self): # Now for 'unreasonable n and d' self.assertEquals(fix(1.0, 1000), '1.'+('0'*1000)) self.assertEquals(sci("1"+('0'*1000), 0), '1e+1000') # This behavior is inconsistent. sci raises an exception; fix doesn't. yacht = "Throatwobbler Mangrove" self.assertEquals(fix(yacht, 10), yacht) try: sci(yacht, 10) except NotANumber: pass else: self.fail("No exception on non-numeric sci") run_unittest(FpformatTest)
''' Tests for fpformat module Nick Mathewson ''' from test_support import run_unittest import unittest from fpformat import fix, sci, NotANumber StringType = type('') # Test the old and obsolescent fpformat module. # # (It's obsolescent because fix(n,d) == "%.*f"%(d,n) and # sci(n,d) == "%.*e"%(d,n) # for all reasonable numeric n and d, except that sci gives 3 exponent # digits instead of 2. # # Differences only occur for unreasonable n and d. <.2 wink>) class FpformatTest(unittest.TestCase): def checkFix(self, n, digits): result = fix(n, digits) if isinstance(n, StringType): n = repr(n) expected = "%.*f" % (digits, float(n)) self.assertEquals(result, expected) def checkSci(self, n, digits): result = sci(n, digits) if isinstance(n, StringType): n = repr(n) expected = "%.*e" % (digits, float(n)) # add the extra 0 expected = expected[:-2]+'0'+expected[-2:] self.assertEquals(result, expected) def test_basic_cases(self): self.assertEquals(fix(100.0/3, 3), '33.333') self.assertEquals(sci(100.0/3, 3), '3.333e+001') def test_reasonable_values(self): for d in range(7): for val in (1000.0/3, 1000, 1000.0, .002, 1.0/3, 1e10): for realVal in (val, 1.0/val, -val, -1.0/val): self.checkFix(realVal, d) self.checkSci(realVal, d) def test_failing_values(self): # Now for 'unreasonable n and d' self.assertEquals(fix(1.0, 1000), '1.'+('0'*1000)) self.assertEquals(sci("1"+('0'*1000), 0), '1e+1000') # This behavior is inconsistent. sci raises an exception; fix doesn't. yacht = "Throatwobbler Mangrove" self.assertEquals(fix(yacht, 10), yacht) try: sci(yacht, 10) except NotANumber: pass else: self.fail("No exception on non-numeric sci") run_unittest(FpformatTest)
mit
Python
d3c7f5de6a4c1d15ab3ffe19da18faaecd466fb6
replace mysteriously missing haystack settings from staging
izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend
tndata_backend/tndata_backend/settings/staging.py
tndata_backend/tndata_backend/settings/staging.py
from .base import * DEBUG = False #DEBUG = True STAGING = True # Site's FQDN and URL. For building links in email. SITE_DOMAIN = "staging.tndata.org" SITE_URL = "https://{0}".format(SITE_DOMAIN) INSTALLED_APPS = INSTALLED_APPS + ( 'debug_toolbar', 'querycount', ) # Just like production, but without the cached template loader TEMPLATES[0]['OPTIONS']['debug'] = DEBUG TEMPLATES[0]['OPTIONS']['loaders'] = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] # django-cors-headers: https://github.com/ottoyiu/django-cors-headers/ CORS_ORIGIN_ALLOW_ALL = True # EMAIL via Mailgun. Production server details, below (staging.tndata.org) EMAIL_SUBJECT_PREFIX = "[Staging TNData] " EMAIL_HOST = 'smtp.mailgun.org' EMAIL_HOST_USER = '[email protected]' EMAIL_HOST_PASSWORD = 'ac2a70a9988127ff7fa217f559c2d59a' EMAIL_PORT = '587' EMAIL_USE_TLS = True EMAIL_USE_SSL = False # Caching # Redis notes: redis_max_clients: 10000, edis_max_memory: 512mb REDIS_PASSWORD = 'VPoDYBZgeyktxArddu4EHrNMdFsUzf7TtFKTP' REDIS_HOST = 'worker.tndata.org' REDIS_CACHE_DB = 2 REDIS_CACHE_URL = 'redis://:{password}@{host}:{port}/{db}'.format( password=REDIS_PASSWORD, host=REDIS_HOST, port=REDIS_PORT, db=REDIS_CACHE_DB ) CACHES['default']['LOCATION'] = REDIS_CACHE_URL CACHES['default']['OPTIONS']['IGNORE_EXCEPTIONS'] = True # django-haystack settings for staging HAYSTACK_CONNECTIONS['default']['URL'] = 'http://worker.tndata.org:9200/' HAYSTACK_CONNECTIONS['default']['INDEX_NAME'] = 'haystack_staging' # django-cacheops CACHEOPS_REDIS = { 'host': REDIS_HOST, 'port': REDIS_PORT, 'db': REDIS_CACHE_DB, 'socket_timeout': 5, 'password': REDIS_PASSWORD, } # Explicit setting for debug_toolbar DEBUG_TOOLBAR_PATCH_SETTINGS = False MIDDLEWARE_CLASSES = ( 'querycount.middleware.QueryCountMiddleware', 'debug_toolbar.middleware.DebugToolbarMiddleware', ) + MIDDLEWARE_CLASSES INTERNAL_IPS = ( '159.203.68.206', '127.0.0.1', '::1', )
from .base import * DEBUG = False #DEBUG = True STAGING = True # Site's FQDN and URL. For building links in email. SITE_DOMAIN = "staging.tndata.org" SITE_URL = "https://{0}".format(SITE_DOMAIN) INSTALLED_APPS = INSTALLED_APPS + ( 'debug_toolbar', 'querycount', ) # Just like production, but without the cached template loader TEMPLATES[0]['OPTIONS']['debug'] = DEBUG TEMPLATES[0]['OPTIONS']['loaders'] = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] # django-cors-headers: https://github.com/ottoyiu/django-cors-headers/ CORS_ORIGIN_ALLOW_ALL = True # EMAIL via Mailgun. Production server details, below (staging.tndata.org) EMAIL_SUBJECT_PREFIX = "[Staging TNData] " EMAIL_HOST = 'smtp.mailgun.org' EMAIL_HOST_USER = '[email protected]' EMAIL_HOST_PASSWORD = 'ac2a70a9988127ff7fa217f559c2d59a' EMAIL_PORT = '587' EMAIL_USE_TLS = True EMAIL_USE_SSL = False # Caching # Redis notes: redis_max_clients: 10000, edis_max_memory: 512mb REDIS_PASSWORD = 'VPoDYBZgeyktxArddu4EHrNMdFsUzf7TtFKTP' REDIS_HOST = 'worker.tndata.org' REDIS_CACHE_DB = 2 REDIS_CACHE_URL = 'redis://:{password}@{host}:{port}/{db}'.format( password=REDIS_PASSWORD, host=REDIS_HOST, port=REDIS_PORT, db=REDIS_CACHE_DB ) CACHES['default']['LOCATION'] = REDIS_CACHE_URL CACHES['default']['OPTIONS']['IGNORE_EXCEPTIONS'] = True # django-cacheops CACHEOPS_REDIS = { 'host': REDIS_HOST, 'port': REDIS_PORT, 'db': REDIS_CACHE_DB, 'socket_timeout': 5, 'password': REDIS_PASSWORD, } # Explicit setting for debug_toolbar DEBUG_TOOLBAR_PATCH_SETTINGS = False MIDDLEWARE_CLASSES = ( 'querycount.middleware.QueryCountMiddleware', 'debug_toolbar.middleware.DebugToolbarMiddleware', ) + MIDDLEWARE_CLASSES INTERNAL_IPS = ( '159.203.68.206', '127.0.0.1', '::1', )
mit
Python
ec4c9a07dc5ca2fab6b341932f65d0cfbd6a332b
Bump version to 1.1
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
molly/__init__.py
molly/__init__.py
""" Molly Project http://mollyproject.org A framework for creating Mobile Web applications for HE/FE institutions. """ __version__ = '1.1'
""" Molly Project http://mollyproject.org A framework for creating Mobile Web applications for HE/FE institutions. """ __version__ = '1.0'
apache-2.0
Python
75e61ecf5efebe78676512d714fc7551f3dfac4c
Fix test
Igalia/snabbswitch,eugeneia/snabb,alexandergall/snabbswitch,SnabbCo/snabbswitch,Igalia/snabbswitch,alexandergall/snabbswitch,snabbco/snabb,eugeneia/snabb,snabbco/snabb,snabbco/snabb,eugeneia/snabb,eugeneia/snabb,Igalia/snabb,Igalia/snabb,Igalia/snabbswitch,eugeneia/snabb,alexandergall/snabbswitch,eugeneia/snabb,snabbco/snabb,Igalia/snabb,snabbco/snabb,Igalia/snabb,alexandergall/snabbswitch,Igalia/snabb,SnabbCo/snabbswitch,eugeneia/snabb,snabbco/snabb,alexandergall/snabbswitch,alexandergall/snabbswitch,alexandergall/snabbswitch,SnabbCo/snabbswitch,Igalia/snabbswitch,Igalia/snabbswitch,Igalia/snabb,alexandergall/snabbswitch,snabbco/snabb,Igalia/snabb,SnabbCo/snabbswitch,Igalia/snabb,snabbco/snabb,eugeneia/snabb
src/program/lwaftr/tests/subcommands/generate_binding_table_test.py
src/program/lwaftr/tests/subcommands/generate_binding_table_test.py
""" Test uses "snabb lwaftr generate-configuration" subcommand. Does not need NICs as it doesn't use any network functionality. The command is just to produce a binding table config result. """ from test_env import ENC, SNABB_CMD, BaseTestCase NUM_SOFTWIRES = 10 class TestGenerateBindingTable(BaseTestCase): generation_args = ( str(SNABB_CMD), 'lwaftr', 'generate-configuration', '193.5.1.100', str(NUM_SOFTWIRES), 'fc00::100', 'fc00:1:2:3:4:5:0:7e', '1') def test_binding_table_generation(self): """ This runs the generate-configuration subcommand and verifies that the output contains a valid binding-table. Usage can be found in the README; however, it's: <ipv4> <num_ipv4s> <br_address> <b4> <psid_len> <shift> """ # Get generate-configuration command output. output = self.run_cmd(self.generation_args) # Split it into lines. config = str(output, ENC).split('\n')[:-1] # Check out that output is softwire-config plus a binding-table. self.assertIn('softwire-config {', config[0].strip()) self.assertIn('binding-table {', config[1].strip()) lineno = 2 while lineno < len(config): line = config[lineno].strip() if not line.startswith('softwire {'): break self.assertTrue(line.startswith('softwire {')) self.assertTrue(line.endswith('}')) lineno = lineno + 1 self.assertTrue(lineno < len(config)) self.assertTrue(config[lineno].strip() == '}')
""" Test uses "snabb lwaftr generate-binding-table" subcommand. Does not need NICs as it doesn't use any network functionality. The command is just to produce a binding table config result. """ from test_env import ENC, SNABB_CMD, BaseTestCase NUM_SOFTWIRES = 10 class TestGenerateBindingTable(BaseTestCase): generation_args = ( str(SNABB_CMD), 'lwaftr', 'generate-binding-table', '193.5.1.100', str(NUM_SOFTWIRES), 'fc00::100', 'fc00:1:2:3:4:5:0:7e', '1') def test_binding_table_generation(self): """ This runs the generate-binding-table subcommand and verifies that it gets back the number of softwires it expects. Usage can be found in the README; however, it's: <ipv4> <num_ipv4s> <br_address> <b4> <psid_len> <shift> """ # Get generate-binding-table command output. output = self.run_cmd(self.generation_args) # Split it into lines. config = str(output, ENC).split('\n')[:-1] # The output should be "binding-table {" followed by NUM_SOFTWIRES # softwires, then "}". self.assertIn('binding-table {', config[0], 'Start line: %s' % config[0]) for idx, softwire in enumerate(config[1:-1]): line_msg = 'Line #%d: %s' % (idx + 2, softwire) self.assertTrue(softwire.startswith(' softwire {'), line_msg) self.assertTrue(softwire.endswith('}'), line_msg) self.assertIn(config[-1], '}', 'End line: %s' % config[0]) # Check that the number of lines is the number of softwires # plus the start and end lines. self.assertEqual(len(config), NUM_SOFTWIRES + 2, len(config))
apache-2.0
Python
224522e88347d4eafd68202222bb83c2d596524b
Modify SCons tools
StatisKit/StatisKit,StatisKit/StatisKit
conda/python-dev/boost_python.py
conda/python-dev/boost_python.py
from types import MethodType import itertools def generate(env): """Add Builders and construction variables to the Environment.""" if not 'boost_python' in env['TOOLS'][:-1]: env.Tool('system') env.AppendUnique(LIBS = ['boost_python']) env.AppendUnique(CPPDEFINES = ['BOOST_PYTHON_DYNAMIC_LIB', 'BOOST_ALL_NO_LIB']) def BuildBoostPython(env, target, sources): # Code to build "target" from "source" target = env.File(target).srcnode() targets = list(itertools.chain(*[env.SharedObject(None, source) for source in sources if source.suffix in ['.cpp', '.cxx', '.c++']])) sources = [source for source in sources if source.suffix == '.h'] SYSTEM = env['SYSTEM'] if SYSTEM == 'linux' and len(sources) == 1: cmd = env.Command(sources[0].target_from_source('', '.h.gch'), sources[0], '$CXX -o $TARGET -x c++-header -c -fPIC $SHCXXFLAGS $_CCCOMCOM $SOURCE') env.Depends(targets, cmd) env.Depends(target, targets) source = env.File('response_file.rsp') with open(source.abspath, 'w') as filehandler: filehandler.write(' '.join(target.abspath.replace('\\','/') + ' ' for target in targets)) env.Append(LINKFLAGS = '@' + source.abspath) kwargs = dict(SHLIBSUFFIX = '.so', SHLIBPREFIX = '') if SYSTEM == 'osx': return env.LoadableModule(target, [], LDMODULESUFFIX='.so', FRAMEWORKSFLAGS = '-flat_namespace -undefined suppress', **kwargs) else: return env.LoadableModule(target, [], **kwargs) env.BuildBoostPython = MethodType(BuildBoostPython, env) env.Tool('python') def exists(env): return 1
from types import MethodType import itertools def generate(env): """Add Builders and construction variables to the Environment.""" if not 'boost_python' in env['TOOLS'][:-1]: env.Tool('system') env.AppendUnique(LIBS = ['boost_python']) env.AppendUnique(CPPDEFINES = ['BOOST_PYTHON_DYNAMIC_LIB', 'BOOST_ALL_NO_LIB']) def BuildBoostPython(env, target, sources): # Code to build "target" from "source" target = env.File(target).srcnode() targets = list(itertools.chain(*[env.SharedObject(None, source) for source in sources if source.suffix in ['.cpp', '.cxx', '.c++']])) print sources sources = [source for source in sources if source.suffix == '.h'] print sources SYSTEM = env['SYSTEM'] print SYSTEM if SYSTEM == 'linux' and len(sources) == 1: cmd = env.Command(sources[0].target_from_source('', '.h.gch'), sources[0], '$CXX -o $TARGET -x c++-header -c -fPIC $SHCXXFLAGS $_CCCOMCOM $SOURCE') env.Depends(targets, cmd) env.Depends(target, targets) source = env.File('response_file.rsp') with open(source.abspath, 'w') as filehandler: filehandler.write(' '.join(target.abspath.replace('\\','/') + ' ' for target in targets)) env.Append(LINKFLAGS = '@' + source.abspath) kwargs = dict(SHLIBSUFFIX = '.so', SHLIBPREFIX = '') if SYSTEM == 'osx': return env.LoadableModule(target, [], LDMODULESUFFIX='.so', FRAMEWORKSFLAGS = '-flat_namespace -undefined suppress', **kwargs) else: return env.LoadableModule(target, [], **kwargs) env.BuildBoostPython = MethodType(BuildBoostPython, env) env.Tool('python') def exists(env): return 1
apache-2.0
Python
fc22465decac6a33543e5232097af7ea847c4029
Bump version to 1.0.1-machtfit-41
machtfit/django-oscar,machtfit/django-oscar,machtfit/django-oscar
src/oscar/__init__.py
src/oscar/__init__.py
import os # Use 'dev', 'beta', or 'final' as the 4th element to indicate release type. VERSION = (1, 0, 1, 'machtfit', 41) def get_short_version(): return '%s.%s' % (VERSION[0], VERSION[1]) def get_version(): return '{}.{}.{}-{}-{}'.format(*VERSION) # Cheeky setting that allows each template to be accessible by two paths. # Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both # 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be # extended by templates with the same filename OSCAR_MAIN_TEMPLATE_DIR = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'templates/oscar') OSCAR_CORE_APPS = [ 'oscar', 'oscar.apps.analytics', 'oscar.apps.checkout', 'oscar.apps.address', 'oscar.apps.shipping', 'oscar.apps.catalogue', 'oscar.apps.partner', 'oscar.apps.basket', 'oscar.apps.payment', 'oscar.apps.offer', 'oscar.apps.order', 'oscar.apps.customer', 'oscar.apps.voucher', 'oscar.apps.dashboard', 'oscar.apps.dashboard.users', 'oscar.apps.dashboard.orders', 'oscar.apps.dashboard.catalogue', 'oscar.apps.dashboard.offers', 'oscar.apps.dashboard.partners', 'oscar.apps.dashboard.ranges', # 3rd-party apps that oscar depends on 'treebeard', 'sorl.thumbnail', 'django_tables2', ] def get_core_apps(overrides=None): """ Return a list of oscar's apps amended with any passed overrides """ if not overrides: return OSCAR_CORE_APPS def get_app_label(app_label, overrides): pattern = app_label.replace('oscar.apps.', '') for override in overrides: if override.endswith(pattern): if 'dashboard' in override and 'dashboard' not in pattern: continue return override return app_label apps = [] for app_label in OSCAR_CORE_APPS: apps.append(get_app_label(app_label, overrides)) return apps
import os # Use 'dev', 'beta', or 'final' as the 4th element to indicate release type. VERSION = (1, 0, 1, 'machtfit', 40) def get_short_version(): return '%s.%s' % (VERSION[0], VERSION[1]) def get_version(): return '{}.{}.{}-{}-{}'.format(*VERSION) # Cheeky setting that allows each template to be accessible by two paths. # Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both # 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be # extended by templates with the same filename OSCAR_MAIN_TEMPLATE_DIR = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'templates/oscar') OSCAR_CORE_APPS = [ 'oscar', 'oscar.apps.analytics', 'oscar.apps.checkout', 'oscar.apps.address', 'oscar.apps.shipping', 'oscar.apps.catalogue', 'oscar.apps.partner', 'oscar.apps.basket', 'oscar.apps.payment', 'oscar.apps.offer', 'oscar.apps.order', 'oscar.apps.customer', 'oscar.apps.voucher', 'oscar.apps.dashboard', 'oscar.apps.dashboard.users', 'oscar.apps.dashboard.orders', 'oscar.apps.dashboard.catalogue', 'oscar.apps.dashboard.offers', 'oscar.apps.dashboard.partners', 'oscar.apps.dashboard.ranges', # 3rd-party apps that oscar depends on 'treebeard', 'sorl.thumbnail', 'django_tables2', ] def get_core_apps(overrides=None): """ Return a list of oscar's apps amended with any passed overrides """ if not overrides: return OSCAR_CORE_APPS def get_app_label(app_label, overrides): pattern = app_label.replace('oscar.apps.', '') for override in overrides: if override.endswith(pattern): if 'dashboard' in override and 'dashboard' not in pattern: continue return override return app_label apps = [] for app_label in OSCAR_CORE_APPS: apps.append(get_app_label(app_label, overrides)) return apps
bsd-3-clause
Python
14ee6e2e9986c58fdeb8e482f3426b756ab1d2cb
Bump dev version
rueckstiess/mtools,rueckstiess/mtools
mtools/version.py
mtools/version.py
#!/usr/bin/env python3 """Mtools version.""" __version__ = '1.7.0-dev'
#!/usr/bin/env python3 """Mtools version.""" __version__ = '1.6.4'
apache-2.0
Python
f83ce11dccd7209e4c124e9dadbcbbd86568e320
Comment reason why the example is commented out
stefanseefeld/numba,cpcloud/numba,sklam/numba,pitrou/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stonebig/numba,sklam/numba,cpcloud/numba,stuartarchibald/numba,jriehl/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,IntelLabs/numba,seibert/numba,sklam/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,pitrou/numba,jriehl/numba,stonebig/numba,pitrou/numba,gmarkall/numba,gmarkall/numba,stonebig/numba,stuartarchibald/numba,jriehl/numba,seibert/numba,jriehl/numba,gmarkall/numba,seibert/numba,pitrou/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,pombredanne/numba,stefanseefeld/numba,stefanseefeld/numba,stefanseefeld/numba,seibert/numba,pitrou/numba,numba/numba,IntelLabs/numba,pombredanne/numba,pombredanne/numba,numba/numba,stefanseefeld/numba,cpcloud/numba,stuartarchibald/numba,sklam/numba,gmarkall/numba,numba/numba,seibert/numba,pombredanne/numba,IntelLabs/numba,numba/numba,pombredanne/numba
numba/tests/compile_with_pycc.py
numba/tests/compile_with_pycc.py
import cmath import numpy as np from numba import exportmany, export from numba.pycc import CC # # New API # cc = CC('pycc_test_simple') @cc.export('multf', 'f4(f4, f4)') @cc.export('multi', 'i4(i4, i4)') def mult(a, b): return a * b _two = 2 # This one can't be compiled by the legacy API as it doesn't execute # the script in a proper module. @cc.export('square', 'i8(i8)') def square(u): return u ** _two # These ones need helperlib cc_helperlib = CC('pycc_test_helperlib') @cc_helperlib.export('power', 'i8(i8, i8)') def power(u, v): return u ** v @cc_helperlib.export('sqrt', 'c16(c16)') def sqrt(u): return cmath.sqrt(u) @cc_helperlib.export('size', 'i8(f8[:])') def sqrt(arr): return arr.size # This one clashes with libc random() unless pycc takes measures # to disambiguate implementation names. @cc_helperlib.export('random', 'f8(i4)') def random_impl(seed): np.random.seed(seed) return np.random.random() # These ones need NRT cc_nrt = CC('pycc_test_nrt') cc_nrt.use_nrt = True @cc_nrt.export('zero_scalar', 'f8(i4)') def zero_scalar(n): arr = np.zeros(n) return arr[-1] # Fails because it needs an environment #@cc_nrt.export('zeros', 'f8[:](i4)') #def zeros(n): #return np.zeros(n) # # Legacy API # exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult) # Needs to link to helperlib to due with complex arguments # export('multc c16(c16,c16)')(mult) export('mult f8(f8, f8)')(mult)
import cmath import numpy as np from numba import exportmany, export from numba.pycc import CC # # New API # cc = CC('pycc_test_simple') @cc.export('multf', 'f4(f4, f4)') @cc.export('multi', 'i4(i4, i4)') def mult(a, b): return a * b _two = 2 # This one can't be compiled by the legacy API as it doesn't execute # the script in a proper module. @cc.export('square', 'i8(i8)') def square(u): return u ** _two # These ones need helperlib cc_helperlib = CC('pycc_test_helperlib') @cc_helperlib.export('power', 'i8(i8, i8)') def power(u, v): return u ** v @cc_helperlib.export('sqrt', 'c16(c16)') def sqrt(u): return cmath.sqrt(u) @cc_helperlib.export('size', 'i8(f8[:])') def sqrt(arr): return arr.size # This one clashes with libc random() unless pycc takes measures # to disambiguate implementation names. @cc_helperlib.export('random', 'f8(i4)') def random_impl(seed): np.random.seed(seed) return np.random.random() # These ones need NRT cc_nrt = CC('pycc_test_nrt') cc_nrt.use_nrt = True @cc_nrt.export('zero_scalar', 'f8(i4)') def zero_scalar(n): arr = np.zeros(n) return arr[-1] #@cc_nrt.export('zeros', 'f8(i4)') #def empty_scalar(n): #arr = np.empty(n) #return arr[-1] # # Legacy API # exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult) # Needs to link to helperlib to due with complex arguments # export('multc c16(c16,c16)')(mult) export('mult f8(f8, f8)')(mult)
bsd-2-clause
Python
6eedd6e5b96d9ee051e7708c4c127fdfb6c2a92b
modify file : add class Report and Score
KIKUYA-Takumi/NippoKun,KIKUYA-Takumi/NippoKun,KIKUYA-Takumi/NippoKun
NippoKun/report/models.py
NippoKun/report/models.py
from django.contrib.auth.models import User from django.db import models # Create your models here. class Report(models.Model): report_author = models.ForeignKey(User, related_name='report_author') report_title = models.CharField(max_length=50) report_content = models.TextField(max_length=999) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) class Score(models.Model): report = models.ForeignKey(Report, related_name='score') score_author = models.ForeignKey(User, related_name='score_author') score = models.IntegerField() evaluate_point = models.TextField(max_length=30) comment = models.TextField(max_length=999, blank=True) average_score = models.FloatField() scored_at = models.DateTimeField(auto_now=True)
from django.db import models # Create your models here.
mit
Python
a2eae87fc76ba1e9fbfa8102c3e19c239445a62a
Fix form retrieval in ModelForm
exekias/droplet,exekias/droplet,exekias/droplet
nazs/web/forms.py
nazs/web/forms.py
from achilles.forms import * # noqa from nazs.models import SingletonModel # Override forms template Form.template_name = 'web/form.html' class ModelForm(ModelForm): def get_form(self, form_data=None, *args, **kwargs): # manage SingletonModels if issubclass(self.form_class.Meta.model, SingletonModel): instance = self.form_class.Meta.model.get() return self.form_class(form_data, instance=instance) else: return super(ModelForm, self).get_form(form_data, *args, **kwargs)
from achilles.forms import * # noqa from nazs.models import SingletonModel # Override forms template Form.template_name = 'web/form.html' class ModelForm(ModelForm): def get_form(self, form_data=None, *args, **kwargs): # manage SingletonModels if issubclass(self.form_class.Meta.model, SingletonModel): instance = self.form_class.Meta.model.get() return self.form_class(form_data, instance=instance) else: return super(ModelForm, self).get_form(*args, **kwargs)
agpl-3.0
Python
a4ee20e078175c5d75380afca7b02305440ab32f
Add a couple numeric columns to better portray overall performance.
python-postgres/fe,python-postgres/fe
postgresql/test/perf_query_io.py
postgresql/test/perf_query_io.py
#!/usr/bin/env python ## # copyright 2009, James William Pye # http://python.projects.postgresql.org ## # Statement I/O: Mass insert and select performance ## import os import time import sys import decimal def insertSamples(count, insert_records): recs = [ (-3, 123, 0xfffffea023, decimal.Decimal("90900023123.40031"), decimal.Decimal("432.40031"), 'some_óäæ_thing', 'varying', 'æ') for x in range(count) ] gen = time.time() insert_records.load(recs) fin = time.time() xacttime = fin - gen ats = count / xacttime sys.stderr.write( "INSERT Summary,\n " \ "inserted tuples: %d\n " \ "total time: %f\n " \ "average tuples per second: %f\n\n" %( count, xacttime, ats, ) ) def timeTupleRead(portal): loops = 0 tuples = 0 genesis = time.time() for x in portal.chunks: loops += 1 tuples += len(x) finalis = time.time() looptime = finalis - genesis ats = tuples / looptime sys.stderr.write( "SELECT Summary,\n " \ "looped: {looped}\n " \ "looptime: {looptime}\n " \ "tuples: {ntuples}\n " \ "average tuples per second: {tps}\n ".format( looped = loops, looptime = looptime, ntuples = tuples, tps = ats ) ) def main(count): execute('CREATE TEMP TABLE samples ' '(i2 int2, i4 int4, i8 int8, n numeric, n2 numeric, t text, v varchar, c char)') insert_records = prepare( "INSERT INTO samples VALUES ($1, $2, $3, $4, $5, $6, $7, $8)" ) select_records = prepare("SELECT * FROM samples") try: insertSamples(count, insert_records) timeTupleRead(select_records()) finally: execute("DROP TABLE samples") def command(args): main(int((args + [25000])[1])) if __name__ == '__main__': command(sys.argv)
#!/usr/bin/env python ## # copyright 2009, James William Pye # http://python.projects.postgresql.org ## # Statement I/O: Mass insert and select performance ## import os import time import sys def insertSamples(count, insert_records): recs = [ (-3, 123, 0xfffffea023, 'some_óäæ_thing', 'varying', 'æ') for x in range(count) ] gen = time.time() insert_records.load(recs) fin = time.time() xacttime = fin - gen ats = count / xacttime sys.stderr.write( "INSERT Summary,\n " \ "inserted tuples: %d\n " \ "total time: %f\n " \ "average tuples per second: %f\n\n" %( count, xacttime, ats, ) ) def timeTupleRead(portal): loops = 0 tuples = 0 genesis = time.time() for x in portal.chunks: loops += 1 tuples += len(x) finalis = time.time() looptime = finalis - genesis ats = tuples / looptime sys.stderr.write( "SELECT Summary,\n " \ "looped: {looped}\n " \ "looptime: {looptime}\n " \ "tuples: {ntuples}\n " \ "average tuples per second: {tps}\n ".format( looped = loops, looptime = looptime, ntuples = tuples, tps = ats ) ) def main(count): execute('CREATE TEMP TABLE samples ' '(i2 int2, i4 int4, i8 int8, t text, v varchar, c char)') insert_records = prepare( "INSERT INTO samples VALUES ($1, $2, $3, $4, $5, $6)" ) select_records = prepare("SELECT * FROM samples") try: insertSamples(count, insert_records) timeTupleRead(select_records()) finally: execute("DROP TABLE samples") def command(args): main(int((args + [25000])[1])) if __name__ == '__main__': command(sys.argv)
bsd-3-clause
Python
b6dff8fcd7dec56703006f2a7bcf1c8c72d0c21b
FIX price sec. related field as readonly
ingadhoc/product,ingadhoc/product
price_security/models/invoice.py
price_security/models/invoice.py
# -*- coding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from openerp import fields, models, api class account_invoice_line(models.Model): _inherit = 'account.invoice.line' # we add this fields instead of making original readonly because we need # on change to change values, we make readonly in view because sometimes # we want them to be writeable invoice_line_tax_id_readonly = fields.Many2many( related='invoice_line_tax_id', ) price_unit_readonly = fields.Float( related='price_unit', ) product_can_modify_prices = fields.Boolean( related='product_id.can_modify_prices', readonly=True, string='Product Can modify prices') @api.one @api.constrains( 'discount', 'product_can_modify_prices') def check_discount(self): if ( self.user_has_groups( 'price_security.group_restrict_prices') and not self.product_can_modify_prices and self.invoice_id ): self.env.user.check_discount( self.discount, self.invoice_id.partner_id.property_product_pricelist.id)
# -*- coding: utf-8 -*- ############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from openerp import fields, models, api class account_invoice_line(models.Model): _inherit = 'account.invoice.line' # we add this fields instead of making original readonly because we need # on change to change values, we make readonly in view because sometimes # we want them to be writeable invoice_line_tax_id_readonly = fields.Many2many( related='invoice_line_tax_id', ) price_unit_readonly = fields.Float( related='price_unit', ) product_can_modify_prices = fields.Boolean( related='product_id.can_modify_prices', string='Product Can modify prices') @api.one @api.constrains( 'discount', 'product_can_modify_prices') def check_discount(self): if ( self.user_has_groups( 'price_security.group_restrict_prices') and not self.product_can_modify_prices and self.invoice_id ): self.env.user.check_discount( self.discount, self.invoice_id.partner_id.property_product_pricelist.id)
agpl-3.0
Python
fb142d3324ca974c9308cb8ab18dd9db2c2aae0b
Use monospace font
Aldenis2112/qutepart,Aldenis2112/qutepart,Aldenis2112/qutepart,hlamer/qutepart,Aldenis2112/qutepart,hlamer/qutepart,andreikop/qutepart,hlamer/qutepart,Aldenis2112/qutepart,andreikop/qutepart,Aldenis2112/qutepart,hlamer/qutepart,andreikop/qutepart,andreikop/qutepart,hlamer/qutepart,Aldenis2112/qutepart,andreikop/qutepart,hlamer/qutepart,hlamer/qutepart,andreikop/qutepart,Aldenis2112/qutepart,andreikop/qutepart,hlamer/qutepart,andreikop/qutepart,hlamer/qutepart,andreikop/qutepart,Aldenis2112/qutepart,hlamer/qutepart,Aldenis2112/qutepart,Aldenis2112/qutepart,andreikop/qutepart,hlamer/qutepart,andreikop/qutepart
editor.py
editor.py
#!/usr/bin/env python import sys import sip sip.setapi('QString', 2) from PyQt4.QtGui import QApplication, QFont, QPlainTextEdit, QSyntaxHighlighter, \ QTextCharFormat, QTextBlockUserData from qutepart.SyntaxHighlighter import SyntaxHighlighter from qutepart.syntax_manager import SyntaxManager def main(): if len(sys.argv) != 2: print 'Usage:\n\t%s FILE' % sys.argv[0] filePath = sys.argv[1] try: syntax = SyntaxManager().getSyntaxBySourceFileName(filePath) except KeyError: print 'No syntax for', filePath return print 'Using syntax', syntax.name with open(filePath) as file: text = file.read() app = QApplication(sys.argv) pte = QPlainTextEdit() pte.setPlainText(text) pte.setWindowTitle(filePath) pte.setFont(QFont("Monospace")) hl = SyntaxHighlighter(syntax, pte.document()) pte.show() return app.exec_() if __name__ == '__main__': main()
#!/usr/bin/env python import sys import sip sip.setapi('QString', 2) from PyQt4.QtGui import QApplication, QPlainTextEdit, QSyntaxHighlighter, \ QTextCharFormat, QTextBlockUserData from qutepart.SyntaxHighlighter import SyntaxHighlighter from qutepart.syntax_manager import SyntaxManager def main(): if len(sys.argv) != 2: print 'Usage:\n\t%s FILE' % sys.argv[0] filePath = sys.argv[1] try: syntax = SyntaxManager().getSyntaxBySourceFileName(filePath) except KeyError: print 'No syntax for', filePath return print 'Using syntax', syntax.name with open(filePath) as file: text = file.read() app = QApplication(sys.argv) pte = QPlainTextEdit() pte.setPlainText(text) pte.setWindowTitle(filePath) hl = SyntaxHighlighter(syntax, pte.document()) pte.show() return app.exec_() if __name__ == '__main__': main()
lgpl-2.1
Python
a098efa1b69d2de3b1e2437a056b0c6937cbf998
add documentation
armijnhemel/binaryanalysis
src/bat/images.py
src/bat/images.py
#!/usr/bin/python ## Binary Analysis Tool ## Copyright 2012 Armijn Hemel for Tjaldur Software Governance Solutions ## Licensed under Apache 2.0, see LICENSE file for details ''' This is a plugin for the Binary Analysis Tool. It generates images of files, both full files and thumbnails. The files can be used for informational purposes, such as detecting roughly where offsets can be found, if data is compressed or encrypted, etc. It also generates histograms, which show how different byte values are distributed. This can provide another visual clue about how files are constructed. Binaries from the same type (like ELF binaries) are actually quite similar, so binaries that significantly deviate from this could mean something interesting. This should be run as a postrun scan ''' import os, os.path, sys, subprocess, array from PIL import Image def generateImages(filename, unpackreport, leafscans, envvars={}): if not unpackreport.has_key('sha256'): return scanenv = os.environ if envvars != None: for en in envvars.split(':'): try: (envname, envvalue) = en.split('=') scanenv[envname] = envvalue except Exception, e: pass ## TODO: check if BAT_IMAGEDIR exists imagedir = scanenv.get('BAT_IMAGEDIR', '.') fwfile = open(filename) ## this is very inefficient for large files, but we *really* need all the data :-( fwdata = fwfile.read() fwfile.close() fwlen = len(fwdata) if fwlen > 1024: height = 1024 else: height = fwlen width = fwlen/height ## we might need to add some bytes so we can create a valid picture if fwlen%height > 0: width = width + 1 for i in range(0, height - (fwlen%height)): fwdata = fwdata + chr(0) imgbuffer = buffer(bytearray(fwdata)) im = Image.frombuffer("L", (height, width), imgbuffer, "raw", "L", 0, 1) im.save("%s/%s.png" % (imagedir, unpackreport['sha256'])) if width > 100: imthumb = im.thumbnail((height/4, width/4)) im.save("%s/%s-thumbnail.png" % (imagedir, unpackreport['sha256'])) ''' ## generate histogram p = subprocess.Popen(['python', '/home/armijn/gpltool/trunk/bat-extratools/bat-visualisation/bat-generate-histogram.py', '-i', filename, '-o', '%s/%s-histogram.png' % (imagedir, unpackreport['sha256'])], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) (stanout, stanerr) = p.communicate() if p.returncode != 0: print >>sys.stderr, stanerr '''
#!/usr/bin/python ## Binary Analysis Tool ## Copyright 2012 Armijn Hemel for Tjaldur Software Governance Solutions ## Licensed under Apache 2.0, see LICENSE file for details ''' This is a plugin for the Binary Analysis Tool. It generates images of files, both full files and thumbnails. The files can be used for informational purposes, such as detecting roughly where offsets can be found, if data is compressed or encrypted, etc. This should be run as a postrun scan ''' import os, os.path, sys, subprocess, array from PIL import Image def generateImages(filename, unpackreport, leafscans, envvars={}): if not unpackreport.has_key('sha256'): return scanenv = os.environ if envvars != None: for en in envvars.split(':'): try: (envname, envvalue) = en.split('=') scanenv[envname] = envvalue except Exception, e: pass ## TODO: check if BAT_IMAGEDIR exists imagedir = scanenv.get('BAT_IMAGEDIR', '.') fwfile = open(filename) ## this is very inefficient for large files, but we *really* need all the data :-( fwdata = fwfile.read() fwfile.close() fwlen = len(fwdata) if fwlen > 1024: height = 1024 else: height = fwlen width = fwlen/height ## we might need to add some bytes so we can create a valid picture if fwlen%height > 0: width = width + 1 for i in range(0, height - (fwlen%height)): fwdata = fwdata + chr(0) imgbuffer = buffer(bytearray(fwdata)) im = Image.frombuffer("L", (height, width), imgbuffer, "raw", "L", 0, 1) im.save("%s/%s.png" % (imagedir, unpackreport['sha256'])) if width > 100: imthumb = im.thumbnail((height/4, width/4)) im.save("%s/%s-thumbnail.png" % (imagedir, unpackreport['sha256'])) ''' p = subprocess.Popen(['python', '/home/armijn/gpltool/trunk/bat-extratools/bat-visualisation/bat-generate-histogram.py', '-i', filename, '-o', '%s/%s-histogram.png' % (imagedir, unpackreport['sha256'])], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) (stanout, stanerr) = p.communicate() if p.returncode != 0: print >>sys.stderr, stanerr '''
apache-2.0
Python
7a60bd74b3af40223553c64dafed07c46c5db639
add a --jit commandline option
cosmoharrigan/pyrolog
prolog/targetprologstandalone.py
prolog/targetprologstandalone.py
""" A simple standalone target for the prolog interpreter. """ import sys from prolog.interpreter.translatedmain import repl, execute # __________ Entry point __________ from prolog.interpreter.continuation import Engine, jitdriver from prolog.interpreter import term from prolog.interpreter import arithmetic # for side effects from prolog import builtin # for side effects e = Engine(load_system=True) term.DEBUG = False def entry_point(argv): e.clocks.startup() # XXX crappy argument handling for i in range(len(argv)): if argv[i] == "--jit": if len(argv) == i + 1: print "missing argument after --jit" return 2 jitarg = argv[i + 1] del argv[i:i+2] jitdriver.set_user_param(jitarg) break if len(argv) == 2: execute(e, argv[1]) if len(argv) > 2: print "too many arguments" return 2 try: repl(e) except SystemExit: return 1 return 0 # _____ Define and setup target ___ def target(driver, args): driver.exe_name = 'pyrolog-%(backend)s' return entry_point, None def portal(driver): from prolog.interpreter.portal import get_portal return get_portal(driver) def jitpolicy(self): from pypy.jit.codewriter.policy import JitPolicy return JitPolicy() if __name__ == '__main__': entry_point(sys.argv)
""" A simple standalone target for the prolog interpreter. """ import sys from prolog.interpreter.translatedmain import repl, execute # __________ Entry point __________ from prolog.interpreter.continuation import Engine from prolog.interpreter import term from prolog.interpreter import arithmetic # for side effects from prolog import builtin # for side effects e = Engine(load_system=True) term.DEBUG = False def entry_point(argv): e.clocks.startup() if len(argv) == 2: execute(e, argv[1]) try: repl(e) except SystemExit: return 1 return 0 # _____ Define and setup target ___ def target(driver, args): driver.exe_name = 'pyrolog-%(backend)s' return entry_point, None def portal(driver): from prolog.interpreter.portal import get_portal return get_portal(driver) def jitpolicy(self): from pypy.jit.codewriter.policy import JitPolicy return JitPolicy() if __name__ == '__main__': entry_point(sys.argv)
mit
Python
3f1f86c358efc6d38012191c4b613aa775861805
Fix 'graph3d.py' to read from VTKData directory
ashray/VTK-EVM,cjh1/VTK,gram526/VTK,demarle/VTK,johnkit/vtk-dev,demarle/VTK,jmerkow/VTK,candy7393/VTK,hendradarwin/VTK,SimVascular/VTK,gram526/VTK,cjh1/VTK,collects/VTK,keithroe/vtkoptix,candy7393/VTK,demarle/VTK,ashray/VTK-EVM,keithroe/vtkoptix,jmerkow/VTK,ashray/VTK-EVM,gram526/VTK,aashish24/VTK-old,candy7393/VTK,keithroe/vtkoptix,biddisco/VTK,sumedhasingla/VTK,sankhesh/VTK,sankhesh/VTK,johnkit/vtk-dev,jmerkow/VTK,keithroe/vtkoptix,candy7393/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,hendradarwin/VTK,sumedhasingla/VTK,aashish24/VTK-old,biddisco/VTK,msmolens/VTK,johnkit/vtk-dev,collects/VTK,msmolens/VTK,keithroe/vtkoptix,hendradarwin/VTK,collects/VTK,demarle/VTK,SimVascular/VTK,johnkit/vtk-dev,SimVascular/VTK,ashray/VTK-EVM,demarle/VTK,johnkit/vtk-dev,candy7393/VTK,gram526/VTK,berendkleinhaneveld/VTK,sankhesh/VTK,sankhesh/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,mspark93/VTK,ashray/VTK-EVM,demarle/VTK,aashish24/VTK-old,mspark93/VTK,mspark93/VTK,berendkleinhaneveld/VTK,cjh1/VTK,jmerkow/VTK,SimVascular/VTK,msmolens/VTK,candy7393/VTK,SimVascular/VTK,biddisco/VTK,jmerkow/VTK,candy7393/VTK,sumedhasingla/VTK,biddisco/VTK,sumedhasingla/VTK,mspark93/VTK,ashray/VTK-EVM,demarle/VTK,sankhesh/VTK,gram526/VTK,ashray/VTK-EVM,sumedhasingla/VTK,gram526/VTK,cjh1/VTK,hendradarwin/VTK,SimVascular/VTK,candy7393/VTK,cjh1/VTK,msmolens/VTK,sumedhasingla/VTK,sankhesh/VTK,aashish24/VTK-old,collects/VTK,mspark93/VTK,berendkleinhaneveld/VTK,msmolens/VTK,gram526/VTK,sankhesh/VTK,SimVascular/VTK,hendradarwin/VTK,cjh1/VTK,collects/VTK,keithroe/vtkoptix,mspark93/VTK,biddisco/VTK,msmolens/VTK,hendradarwin/VTK,berendkleinhaneveld/VTK,mspark93/VTK,demarle/VTK,berendkleinhaneveld/VTK,sumedhasingla/VTK,msmolens/VTK,collects/VTK,ashray/VTK-EVM,johnkit/vtk-dev,aashish24/VTK-old,gram526/VTK,johnkit/vtk-dev,sumedhasingla/VTK,biddisco/VTK,keithroe/vtkoptix,SimVascular/VTK,hendradarwin/VTK,mspark93/VTK,msmolens/VTK,sankhesh/VTK,aashish24/VTK-old,keithroe/vtkoptix,jmerkow/VTK,biddisco/VTK
Examples/Infovis/Python/graph3d.py
Examples/Infovis/Python/graph3d.py
from vtk import * from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() reader = vtkXGMLReader() reader.SetFileName(VTK_DATA_ROOT + "/Data/Infovis/fsm.gml") reader.Update() strategy = vtkSpanTreeLayoutStrategy() strategy.DepthFirstSpanningTreeOn() view = vtkGraphLayoutView() view.AddRepresentationFromInputConnection(reader.GetOutputPort()) view.SetVertexLabelArrayName("vertex id") view.SetVertexLabelVisibility(True) view.SetVertexColorArrayName("vertex id") view.SetColorVertices(True) view.SetLayoutStrategy( strategy ) view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom theme = vtkViewTheme.CreateMellowTheme() theme.SetCellColor(.2,.2,.6) theme.SetLineWidth(2) theme.SetPointSize(10) view.ApplyViewTheme(theme) theme.FastDelete() view.GetRenderWindow().SetSize(600, 600) view.ResetCamera() view.Render() #Here's the window with David's original layout methodology # Aside from the theme elements in the view above, the notable # difference between the two views is the angling on the edges. layout = vtkGraphLayout() layout.SetLayoutStrategy(strategy) layout.SetInputConnection(reader.GetOutputPort()) edge_geom = vtkGraphToPolyData() edge_geom.SetInputConnection(layout.GetOutputPort()) vertex_geom = vtkGraphToPoints() vertex_geom.SetInputConnection(layout.GetOutputPort()) # Vertex pipeline - mark each vertex with a cube glyph cube = vtkCubeSource() cube.SetXLength(0.3) cube.SetYLength(0.3) cube.SetZLength(0.3) glyph = vtkGlyph3D() glyph.SetInputConnection(vertex_geom.GetOutputPort()) glyph.SetSourceConnection(0, cube.GetOutputPort()) gmap = vtkPolyDataMapper() gmap.SetInputConnection(glyph.GetOutputPort()) gact = vtkActor() gact.SetMapper(gmap) gact.GetProperty().SetColor(0,0,1) # Edge pipeline - map edges to lines mapper = vtkPolyDataMapper() mapper.SetInputConnection(edge_geom.GetOutputPort()) actor = vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetColor(0.4,0.4,0.6) # Renderer, window, and interaction ren = vtkRenderer() ren.AddActor(actor) ren.AddActor(gact) ren.ResetCamera() renWin = vtkRenderWindow() renWin.AddRenderer(ren) renWin.SetSize(800,550) iren = vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) iren.Initialize() #iren.Start() view.GetInteractor().Start()
from vtk import * reader = vtkXGMLReader() reader.SetFileName("fsm.gml") reader.Update() strategy = vtkSpanTreeLayoutStrategy() strategy.DepthFirstSpanningTreeOn() view = vtkGraphLayoutView() view.AddRepresentationFromInputConnection(reader.GetOutputPort()) view.SetVertexLabelArrayName("vertex id") view.SetVertexLabelVisibility(True) view.SetVertexColorArrayName("vertex id") view.SetColorVertices(True) view.SetLayoutStrategy( strategy ) view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom theme = vtkViewTheme.CreateMellowTheme() theme.SetCellColor(.2,.2,.6) theme.SetLineWidth(2) theme.SetPointSize(10) view.ApplyViewTheme(theme) theme.FastDelete() view.GetRenderWindow().SetSize(600, 600) view.ResetCamera() view.Render() #Here's the window with David's original layout methodology # Aside from the theme elements in the view above, the notable # difference between the two views is the angling on the edges. layout = vtkGraphLayout() layout.SetLayoutStrategy(strategy) layout.SetInputConnection(reader.GetOutputPort()) edge_geom = vtkGraphToPolyData() edge_geom.SetInputConnection(layout.GetOutputPort()) vertex_geom = vtkGraphToPoints() vertex_geom.SetInputConnection(layout.GetOutputPort()) # Vertex pipeline - mark each vertex with a cube glyph cube = vtkCubeSource() cube.SetXLength(0.3) cube.SetYLength(0.3) cube.SetZLength(0.3) glyph = vtkGlyph3D() glyph.SetInputConnection(vertex_geom.GetOutputPort()) glyph.SetSourceConnection(0, cube.GetOutputPort()) gmap = vtkPolyDataMapper() gmap.SetInputConnection(glyph.GetOutputPort()) gact = vtkActor() gact.SetMapper(gmap) gact.GetProperty().SetColor(0,0,1) # Edge pipeline - map edges to lines mapper = vtkPolyDataMapper() mapper.SetInputConnection(edge_geom.GetOutputPort()) actor = vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetColor(0.4,0.4,0.6) # Renderer, window, and interaction ren = vtkRenderer() ren.AddActor(actor) ren.AddActor(gact) ren.ResetCamera() renWin = vtkRenderWindow() renWin.AddRenderer(ren) renWin.SetSize(800,550) iren = vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) iren.Initialize() #iren.Start() view.GetInteractor().Start()
bsd-3-clause
Python
0cd2af0f20b6b544f0d36140a098ca8e3058d8fa
Update constants
bankonme/OpenBazaar,must-/OpenBazaar,must-/OpenBazaar,saltduck/OpenBazaar,bglassy/OpenBazaar,atsuyim/OpenBazaar,STRML/OpenBazaar,hoffmabc/OpenBazaar,im0rtel/OpenBazaar,atsuyim/OpenBazaar,kordless/OpenBazaar,bankonme/OpenBazaar,dlcorporation/openbazaar,habibmasuro/OpenBazaar,STRML/OpenBazaar,Renelvon/OpenBazaar,must-/OpenBazaar,atsuyim/OpenBazaar,freebazaar/FreeBazaar,saltduck/OpenBazaar,dlcorporation/openbazaar,NolanZhao/OpenBazaar,kordless/OpenBazaar,dionyziz/OpenBazaar,bankonme/OpenBazaar,dlcorporation/openbazaar,rllola/OpenBazaar,zenhacklab/OpenBazaar,dionyziz/OpenBazaar,rllola/OpenBazaar,freebazaar/FreeBazaar,bglassy/OpenBazaar,habibmasuro/OpenBazaar,akhavr/OpenBazaar,mirrax/OpenBazaar,hoffmabc/OpenBazaar,yagoulas/OpenBazaar,akhavr/OpenBazaar,matiasbastos/OpenBazaar,kordless/OpenBazaar,yagoulas/OpenBazaar,matiasbastos/OpenBazaar,dlcorporation/openbazaar,mirrax/OpenBazaar,NolanZhao/OpenBazaar,Renelvon/OpenBazaar,yagoulas/OpenBazaar,tortxof/OpenBazaar,yagoulas/OpenBazaar,akhavr/OpenBazaar,bglassy/OpenBazaar,zenhacklab/OpenBazaar,matiasbastos/OpenBazaar,dionyziz/OpenBazaar,STRML/OpenBazaar,mirrax/OpenBazaar,im0rtel/OpenBazaar,tortxof/OpenBazaar,dlcorporation/openbazaar,freebazaar/FreeBazaar,im0rtel/OpenBazaar,dionyziz/OpenBazaar,hoffmabc/OpenBazaar,freebazaar/FreeBazaar,atsuyim/OpenBazaar,Renelvon/OpenBazaar,habibmasuro/OpenBazaar,rllola/OpenBazaar,zenhacklab/OpenBazaar,blakejakopovic/OpenBazaar,NolanZhao/OpenBazaar,hoffmabc/OpenBazaar,saltduck/OpenBazaar,habibmasuro/OpenBazaar,hoffmabc/OpenBazaar,zenhacklab/OpenBazaar,akhavr/OpenBazaar,hoffmabc/OpenBazaar,akhavr/OpenBazaar,bankonme/OpenBazaar,blakejakopovic/OpenBazaar,dionyziz/OpenBazaar,zenhacklab/OpenBazaar,dlcorporation/openbazaar,STRML/OpenBazaar,bglassy/OpenBazaar,NolanZhao/OpenBazaar,kordless/OpenBazaar,tortxof/OpenBazaar,mirrax/OpenBazaar,must-/OpenBazaar,matiasbastos/OpenBazaar,tortxof/OpenBazaar,rllola/OpenBazaar,im0rtel/OpenBazaar,Renelvon/OpenBazaar,blakejakopovic/OpenBazaar,freebazaar/FreeBazaar,blakejakopovic/OpenBazaar,saltduck/OpenBazaar
node/constants.py
node/constants.py
######### KADEMLIA CONSTANTS ########### #: Small number Representing the degree of parallelism in network calls alpha = 3 #: Maximum number of contacts stored in a bucket; this should be an even number k = 8 #: Timeout for network operations (in seconds) rpcTimeout = 5 # Delay between iterations of iterative node lookups (for loose parallelism) (in seconds) iterativeLookupDelay = rpcTimeout / 2 #: If a k-bucket has not been used for this amount of time, refresh it (in seconds) refreshTimeout = 3600 # 1 hour #: The interval at which nodes replicate (republish/refresh) data they are holding replicateInterval = refreshTimeout # The time it takes for data to expire in the network; the original publisher of the data # will also republish the data at this time if it is still valid dataExpireTimeout = 86400 # 24 hours ######## IMPLEMENTATION-SPECIFIC CONSTANTS ########### #: The interval in which the node should check its whether any buckets need refreshing, #: or whether any data needs to be republished (in seconds) checkRefreshInterval = refreshTimeout/5 #: Max size of a single UDP datagram, in bytes. If a message is larger than this, it will #: be spread accross several UDP packets. udpDatagramMaxSize = 8192 # 8 KB
######### KADEMLIA CONSTANTS ########### #: Small number Representing the degree of parallelism in network calls alpha = 3 #: Maximum number of contacts stored in a bucket; this should be an even number k = 8 # Delay between iterations of iterative node lookups (for loose parallelism) (in seconds) iterativeLookupDelay = rpcTimeout / 2 #: If a k-bucket has not been used for this amount of time, refresh it (in seconds) refreshTimeout = 3600 # 1 hour #: The interval at which nodes replicate (republish/refresh) data they are holding replicateInterval = refreshTimeout # The time it takes for data to expire in the network; the original publisher of the data # will also republish the data at this time if it is still valid dataExpireTimeout = 86400 # 24 hours ######## IMPLEMENTATION-SPECIFIC CONSTANTS ########### #: The interval in which the node should check its whether any buckets need refreshing, #: or whether any data needs to be republished (in seconds) checkRefreshInterval = refreshTimeout/5
mit
Python
8765ac953047ba1c63eb2eb2eb087ba92e9213bc
fix switch template
zpriddy/Firefly,zpriddy/Firefly,zpriddy/Firefly,zpriddy/Firefly
Firefly/core/templates/__init__.py
Firefly/core/templates/__init__.py
# -*- coding: utf-8 -*- # @Author: Zachary Priddy # @Date: 2016-04-12 13:33:30 # @Last Modified by: Zachary Priddy # @Last Modified time: 2016-04-12 13:33:30 class Templates(object): def __init__(self): self._filepath = 'core/templates/' self._switch_template = self.get_template('switch') def get_template(self, template): with open('%s%s.html' % (self._filepath, template)) as template_file: return template_file.read().replace('\n', '') @property def switch(self): """ Builds a switch template from switch.html. Returns: template (str): string of switch template """ return self._switch_template ffTemplates = Templates()
# -*- coding: utf-8 -*- # @Author: Zachary Priddy # @Date: 2016-04-12 13:33:30 # @Last Modified by: Zachary Priddy # @Last Modified time: 2016-04-12 13:33:30 class Templates(object): def __init__(self): self._filepath = 'core/templates/' self._switch_template = self.get_template('switch') def get_template(self, template): with open('%s%s.html' % (self._filepath, template)) as template_file: return template_file.read().replace('\n', '') @property def switch(self): """ Builds a switch template from switch.html. Returns: template (str): string of switch template """ return self._switch ffTemplates = Templates()
apache-2.0
Python
38de795103748ca757a03a62da8ef3d89b0bf682
Fix bug that prevent commands with no values from being added
Nzaga/GoProController,joshvillbrandt/GoProController,Nzaga/GoProController,joshvillbrandt/GoProController
GoProController/models.py
GoProController/models.py
from django.db import models class Camera(models.Model): ssid = models.CharField(max_length=255) password = models.CharField(max_length=255) date_added = models.DateTimeField(auto_now_add=True) last_attempt = models.DateTimeField(auto_now=True) last_update = models.DateTimeField(null=True, blank=True) image_last_update = models.DateTimeField(null=True, blank=True) image = models.TextField(blank=True) summary = models.TextField(blank=True) status = models.TextField(blank=True) connection_attempts = models.IntegerField(default=0) connection_failures = models.IntegerField(default=0) def __unicode__(self): return self.ssid class Command(models.Model): camera = models.ForeignKey(Camera) command = models.CharField(max_length=255) value = models.CharField(blank=True, max_length=255) date_added = models.DateTimeField(auto_now_add=True) time_completed = models.DateTimeField(null=True, blank=True) def __unicode__(self): return self.camera.__unicode__() + ' > ' + self.command
from django.db import models class Camera(models.Model): ssid = models.CharField(max_length=255) password = models.CharField(max_length=255) date_added = models.DateTimeField(auto_now_add=True) last_attempt = models.DateTimeField(auto_now=True) last_update = models.DateTimeField(null=True, blank=True) image_last_update = models.DateTimeField(null=True, blank=True) image = models.TextField(blank=True) summary = models.TextField(blank=True) status = models.TextField(blank=True) connection_attempts = models.IntegerField(default=0) connection_failures = models.IntegerField(default=0) def __unicode__(self): return self.ssid class Command(models.Model): camera = models.ForeignKey(Camera) command = models.CharField(max_length=255) value = models.CharField(max_length=255) date_added = models.DateTimeField(auto_now_add=True) time_completed = models.DateTimeField(null=True, blank=True) def __unicode__(self): return self.camera.__unicode__() + ' > ' + self.command
apache-2.0
Python
e1ad05fb19577aa108b94ea500106e36b29915fc
update indentation
Statistica/676-candidates
amount_raised_by_candidate.py
amount_raised_by_candidate.py
# Written by Jonathan Saewitz, released May 24th, 2016 for Statisti.ca # Released under the MIT License (https://opensource.org/licenses/MIT) import csv, plotly.plotly as plotly, plotly.graph_objs as go, requests from bs4 import BeautifulSoup candidates=[] with open('presidential_candidates.csv', 'r') as f: reader=csv.reader(f) reader.next() #skip the headers row for row in reader: #loop through the candidates c_id=row[15] #row[15] is the candidate's FEC id html=requests.get('https://beta.fec.gov/data/candidate/' + c_id).text #get the candidate's FEC page b=BeautifulSoup(html, 'html.parser') if len(b.find_all(class_='t-big-data'))==0: #if this class isn't found on the candidate's FEC page, #the candidate raised $0 amt=0.0 else: amt=float(b.find_all(class_="t-big-data")[0].text.strip().replace("$", "").replace(",", "")) #class "t-big-data" contains the money data #the 0th element contains the total receipts #.text gets only the text (i.e. amount raised) #.strip() removes all whitespace #.replace("$", "") removes the dollar sign #.replace(",", "") removes all commas #we should be left with the total amount raised in the form 0.00 name=row[14] #row[14] is the candidate's name candidates.append({'name': name, 'amount': amt}) candidates=sorted(candidates, key=lambda k: k['amount']) #sort the candidates by amount raised trace=go.Bar( x=[candidate['name'] for candidate in candidates], y=[candidate['amount'] for candidate in candidates] ) layout=go.Layout( title="Presidential Candidates by Money Raised", xaxis=dict( title="Candidates", ), yaxis=dict( title="Amount raised ($)", ) ) data=[trace] fig=dict(data=data, layout=layout) plotly.plot(fig)
# Written by Jonathan Saewitz, released May 24th, 2016 for Statisti.ca # Released under the MIT License (https://opensource.org/licenses/MIT) import csv, plotly.plotly as plotly, plotly.graph_objs as go, requests from bs4 import BeautifulSoup candidates=[] with open('presidential_candidates.csv', 'r') as f: reader=csv.reader(f) reader.next() #skip the headers row for row in reader: #loop through the candidates c_id=row[15] #row[15] is the candidate's FEC id html=requests.get('https://beta.fec.gov/data/candidate/' + c_id).text #get the candidate's FEC page b=BeautifulSoup(html, 'html.parser') if len(b.find_all(class_='t-big-data'))==0: #if this class isn't found on the candidate's FEC page, #the candidate raised $0 amt=0.0 else: amt=float(b.find_all(class_="t-big-data")[0].text.strip().replace("$", "").replace(",", "")) #class "t-big-data" contains the money data #the 0th element contains the total receipts #.text gets only the text (i.e. amount raised) #.strip() removes all whitespace #.replace("$", "") removes the dollar sign #.replace(",", "") removes all commas #we should be left with the total amount raised in the form 0.00 name=row[14] #row[14] is the candidate's name candidates.append({'name': name, 'amount': amt}) candidates=sorted(candidates, key=lambda k: k['amount']) #sort the candidates by amount raised trace=go.Bar( x=[candidate['name'] for candidate in candidates], y=[candidate['amount'] for candidate in candidates] ) layout=go.Layout( title="Presidential Candidates by Money Raised", xaxis=dict( title="Candidates", ), yaxis=dict( title="Amount raised ($)", ) ) data=[trace] fig=dict(data=data, layout=layout) plotly.plot(fig)
mit
Python
caff96633ce29a2139bc61bb5ee333efd69d50ef
Remove default classifier path from default config
ruipgil/ProcessMySteps,ruipgil/ProcessMySteps
processmysteps/default_config.py
processmysteps/default_config.py
""" Base line settings """ CONFIG = { 'input_path': None, 'backup_path': None, 'dest_path': None, 'life_all': None, 'db': { 'host': None, 'port': None, 'name': None, 'user': None, 'pass': None }, # 'preprocess': { # 'max_acc': 30.0 # }, 'smoothing': { 'use': True, 'algorithm': 'inverse', 'noise': 10 }, 'segmentation': { 'use': True, 'epsilon': 1.0, 'min_time': 80 }, 'simplification': { 'max_dist_error': 2.0, 'max_speed_error': 1.0, 'eps': 0.15 }, 'location': { 'max_distance': 20, 'min_samples': 2, 'limit': 5, 'google_key': '' }, 'transportation': { 'remove_stops': False, 'min_time': 60, 'classifier_path': None#'classifier.data'# None }, 'trip_learning': { 'epsilon': 0.0, 'classifier_path': None, }, 'trip_name_format': '%Y-%m-%d' }
""" Base line settings """ CONFIG = { 'input_path': None, 'backup_path': None, 'dest_path': None, 'life_all': None, 'db': { 'host': None, 'port': None, 'name': None, 'user': None, 'pass': None }, # 'preprocess': { # 'max_acc': 30.0 # }, 'smoothing': { 'use': True, 'algorithm': 'inverse', 'noise': 10 }, 'segmentation': { 'use': True, 'epsilon': 1.0, 'min_time': 80 }, 'simplification': { 'max_dist_error': 2.0, 'max_speed_error': 1.0, 'eps': 0.15 }, 'location': { 'max_distance': 20, 'min_samples': 2, 'limit': 5, 'google_key': '' }, 'transportation': { 'remove_stops': False, 'min_time': 60, 'classifier_path': 'classifier.data'# None }, 'trip_learning': { 'epsilon': 0.0, 'classifier_path': None, }, 'trip_name_format': '%Y-%m-%d' }
mit
Python
d8fc3888f0b40a8b7a476fc3fec0ca3dfe7a2416
make API able to work with single names
block8437/gender.py
gender.py
gender.py
import requests, json def getGenders(names): url = "" cnt = 0 if not isinstance(names,list): names = [names,] for name in names: if url == "": url = "name[0]=" + name else: cnt += 1 url = url + "&name[" + str(cnt) + "]=" + name req = requests.get("http://api.genderize.io?" + url) results = json.loads(req.text) if len(names)==1 : results = [ results, ] retrn = [] for result in results: if result["gender"] is not None: retrn.append((result["gender"], result["probability"], result["count"])) else: retrn.append((u'None',u'0.0',0.0)) return retrn if __name__ == '__main__': print(getGenders(["Brian","Apple","Jessica","Zaeem","NotAName"]))
import requests, json def getGenders(names): url = "" cnt = 0 for name in names: if url == "": url = "name[0]=" + name else: cnt += 1 url = url + "&name[" + str(cnt) + "]=" + name req = requests.get("http://api.genderize.io?" + url) results = json.loads(req.text) retrn = [] for result in results: if result["gender"] is not None: retrn.append((result["gender"], result["probability"], result["count"])) else: retrn.append((u'None',u'0.0',0.0)) return retrn if __name__ == '__main__': print getGenders(["Brian","Apple","Jessica","Zaeem","NotAName"])
mit
Python
fc6c6f9ecbf694198c650cf86151423226304c51
put import statement in try
alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,TaiSakuma/AlphaTwirl
alphatwirl/delphes/load_delphes.py
alphatwirl/delphes/load_delphes.py
# Tai Sakuma <[email protected]> try: import ROOT except ImportError: pass _loaded = False ##__________________________________________________________________|| def load_delphes(): global _loaded if _loaded: return # https://root.cern.ch/phpBB3/viewtopic.php?t=21603 ROOT.gInterpreter.Declare('#include "classes/DelphesClasses.h"') # https://cp3.irmp.ucl.ac.be/projects/delphes/ticket/1039 ROOT.gInterpreter.Declare('#include "external/ExRootAnalysis/ExRootTreeReader.h"') ROOT.gSystem.Load("libDelphes.so") _loaded = True ##__________________________________________________________________||
# Tai Sakuma <[email protected]> import ROOT _loaded = False ##__________________________________________________________________|| def load_delphes(): global _loaded if _loaded: return # https://root.cern.ch/phpBB3/viewtopic.php?t=21603 ROOT.gInterpreter.Declare('#include "classes/DelphesClasses.h"') # https://cp3.irmp.ucl.ac.be/projects/delphes/ticket/1039 ROOT.gInterpreter.Declare('#include "external/ExRootAnalysis/ExRootTreeReader.h"') ROOT.gSystem.Load("libDelphes.so") _loaded = True ##__________________________________________________________________||
bsd-3-clause
Python
1eb648b14c52c9a2e715774ec71b2c8e6228efc4
add vtkNumpy.numpyToImageData() function
patmarion/director,patmarion/director,patmarion/director,patmarion/director,patmarion/director
src/python/director/vtkNumpy.py
src/python/director/vtkNumpy.py
from director.shallowCopy import shallowCopy import director.vtkAll as vtk from vtk.util import numpy_support import numpy as np def numpyToPolyData(pts, pointData=None, createVertexCells=True): pd = vtk.vtkPolyData() pd.SetPoints(getVtkPointsFromNumpy(pts.copy())) if pointData is not None: for key, value in pointData.iteritems(): addNumpyToVtk(pd, value.copy(), key) if createVertexCells: f = vtk.vtkVertexGlyphFilter() f.SetInputData(pd) f.Update() pd = shallowCopy(f.GetOutput()) return pd def numpyToImageData(img, flip=True, vtktype=vtk.VTK_UNSIGNED_CHAR): if flip: img = np.flipud(img) height, width, numChannels = img.shape image = vtk.vtkImageData() image.SetDimensions(width, height, 1) image.AllocateScalars(vtktype, numChannels) scalars = getNumpyFromVtk(image, 'ImageScalars') scalars[:] = img.reshape(width*height, numChannels)[:] return image def getNumpyFromVtk(dataObj, arrayName='Points', arrayType='points'): assert arrayType in ('points', 'cells') if arrayName == 'Points': vtkArray = dataObj.GetPoints().GetData() elif arrayType == 'points': vtkArray = dataObj.GetPointData().GetArray(arrayName) else: vtkArray = dataObj.GetCellData().GetArray(arrayName) if not vtkArray: raise KeyError('Array not found') return numpy_support.vtk_to_numpy(vtkArray) def getVtkPointsFromNumpy(numpyArray): points = vtk.vtkPoints() points.SetData(getVtkFromNumpy(numpyArray)) return points def getVtkPolyDataFromNumpyPoints(points): return numpyToPolyData(points) def getVtkFromNumpy(numpyArray): def MakeCallback(numpyArray): def Closure(caller, event): closureArray = numpyArray return Closure vtkArray = numpy_support.numpy_to_vtk(numpyArray) vtkArray.AddObserver('DeleteEvent', MakeCallback(numpyArray)) return vtkArray def addNumpyToVtk(dataObj, numpyArray, arrayName, arrayType='points'): assert arrayType in ('points', 'cells') vtkArray = getVtkFromNumpy(numpyArray) vtkArray.SetName(arrayName) if arrayType == 'points': assert dataObj.GetNumberOfPoints() == numpyArray.shape[0] dataObj.GetPointData().AddArray(vtkArray) else: assert dataObj.GetNumberOfCells() == numpyArray.shape[0] dataObj.GetCellData().AddArray(vtkArray)
from director.shallowCopy import shallowCopy import director.vtkAll as vtk from vtk.util import numpy_support import numpy as np def numpyToPolyData(pts, pointData=None, createVertexCells=True): pd = vtk.vtkPolyData() pd.SetPoints(getVtkPointsFromNumpy(pts.copy())) if pointData is not None: for key, value in pointData.iteritems(): addNumpyToVtk(pd, value.copy(), key) if createVertexCells: f = vtk.vtkVertexGlyphFilter() f.SetInputData(pd) f.Update() pd = shallowCopy(f.GetOutput()) return pd def getNumpyFromVtk(dataObj, arrayName='Points', arrayType='points'): assert arrayType in ('points', 'cells') if arrayName == 'Points': vtkArray = dataObj.GetPoints().GetData() elif arrayType == 'points': vtkArray = dataObj.GetPointData().GetArray(arrayName) else: vtkArray = dataObj.GetCellData().GetArray(arrayName) if not vtkArray: raise KeyError('Array not found') return numpy_support.vtk_to_numpy(vtkArray) def getVtkPointsFromNumpy(numpyArray): points = vtk.vtkPoints() points.SetData(getVtkFromNumpy(numpyArray)) return points def getVtkPolyDataFromNumpyPoints(points): return numpyToPolyData(points) def getVtkFromNumpy(numpyArray): def MakeCallback(numpyArray): def Closure(caller, event): closureArray = numpyArray return Closure vtkArray = numpy_support.numpy_to_vtk(numpyArray) vtkArray.AddObserver('DeleteEvent', MakeCallback(numpyArray)) return vtkArray def addNumpyToVtk(dataObj, numpyArray, arrayName, arrayType='points'): assert arrayType in ('points', 'cells') vtkArray = getVtkFromNumpy(numpyArray) vtkArray.SetName(arrayName) if arrayType == 'points': assert dataObj.GetNumberOfPoints() == numpyArray.shape[0] dataObj.GetPointData().AddArray(vtkArray) else: assert dataObj.GetNumberOfCells() == numpyArray.shape[0] dataObj.GetCellData().AddArray(vtkArray)
bsd-3-clause
Python
5f522cf58a1566513e874002bdaeb063e8a02497
Update model and add TODO
harunurhan/repologist,harunurhan/repodoctor,harunurhan/repologist,harunurhan/repodoctor
server/models/checkup.py
server/models/checkup.py
# -*- coding: utf-8 -*- from datetime import datetime from app import db class Checkup(db.Model): __tablename__ = 'checkup' id = db.Column(db.Integer, primary_key=True) created = db.Column(db.DateTime, default=datetime.utcnow) # TODO: add one unique constraint on the column group of owner and repo owner = db.Column(db.String) repo = db.Column(db.String) criteria = db.relationship('Criterion', backref='criterion', lazy='dynamic')
# -*- coding: utf-8 -*- from datetime import datetime from app import db class Checkup(db.Model): __tablename__ = 'checkup' id = db.Column(db.Integer, primary_key=True) created = db.Column(db.DateTime, default=datetime.utcnow) repo_name = db.Column(db.String, unique=True) # github-user/repo-name criteria = db.relationship('Criterion', backref='criterion', lazy='dynamic')
mit
Python
fb1ddcdd789d1c1be02a9f6d63a21548a8cf584e
Fix undo of PlatformPhysicsOperation after the SceneNode changes
onitake/Uranium,onitake/Uranium
printer/PlatformPhysicsOperation.py
printer/PlatformPhysicsOperation.py
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._old_position = node.getPosition() self._new_position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setPosition(self._old_position) def redo(self): self._node.setPosition(self._new_position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
from UM.Operations.Operation import Operation from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation from UM.Operations.TranslateOperation import TranslateOperation from UM.Operations.GroupedOperation import GroupedOperation ## A specialised operation designed specifically to modify the previous operation. class PlatformPhysicsOperation(Operation): def __init__(self, node, translation): super().__init__() self._node = node self._transform = node.getLocalTransformation() self._position = node.getPosition() + translation self._always_merge = True def undo(self): self._node.setLocalTransformation(self._transform) def redo(self): self._node.setPosition(self._position) def mergeWith(self, other): group = GroupedOperation() group.addOperation(self) group.addOperation(other) return group def __repr__(self): return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
agpl-3.0
Python
e89c20e1ecfadb7e63a1fe80d821afafb8860352
add missing import
tensorflow/tfx,tensorflow/tfx
tfx/experimental/templates/taxi/launcher/stub_component_launcher.py
tfx/experimental/templates/taxi/launcher/stub_component_launcher.py
# Lint as: python3 # Copyright 2020 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Stub component launcher for launching stub executors in KFP.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tfx.experimental.pipeline_testing import stub_component_launcher from tfx.experimental.templates.taxi.pipeline import configs class StubComponentLauncher(stub_component_launcher.StubComponentLauncher): """Responsible for launching stub executors in KFP Template. This stub component launcher cannot be defined in the kubeflow_dag_runner.py because launcher class is imported by the module path. """ pass # GCS directory where KFP outputs are recorded test_data_dir = "gs://{}/testdata".format(configs.GCS_BUCKET_NAME) # TODO(StubExecutor): customize self.stubbed_component_ids to replace components # with BaseStubExecutor stubbed_component_ids = ['CsvExampleGen', 'StatisticsGen', 'SchemaGen', 'ExampleValidator', 'Trainer', 'Transform', 'Evaluator', 'Pusher'] # TODO(StubExecutor): (Optional) Use stubbed_component_map to insert custom stub # executor class as a value and component id as a key. stubbed_component_map = {} StubComponentLauncher.get_stub_launcher_class( test_data_dir, stubbed_component_ids, stubbed_component_map)
# Lint as: python3 # Copyright 2020 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Stub component launcher for launching stub executors in KFP.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tfx.experimental.pipeline_testing import stub_component_launcher class StubComponentLauncher(stub_component_launcher.StubComponentLauncher): """Responsible for launching stub executors in KFP Template. This stub component launcher cannot be defined in the kubeflow_dag_runner.py because launcher class is imported by the module path. """ pass # GCS directory where KFP outputs are recorded test_data_dir = "gs://{}/testdata".format(configs.GCS_BUCKET_NAME) # TODO(StubExecutor): customize self.stubbed_component_ids to replace components # with BaseStubExecutor stubbed_component_ids = ['CsvExampleGen', 'StatisticsGen', 'SchemaGen', 'ExampleValidator', 'Trainer', 'Transform', 'Evaluator', 'Pusher'] # TODO(StubExecutor): (Optional) Use stubbed_component_map to insert custom stub # executor class as a value and component id as a key. stubbed_component_map = {} StubComponentLauncher.get_stub_launcher_class( test_data_dir, stubbed_component_ids, stubbed_component_map)
apache-2.0
Python
7f4a02f7058c4e7dfd4bbb01ba847e6990b5e391
update admin
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
corehq/apps/userreports/admin.py
corehq/apps/userreports/admin.py
from __future__ import absolute_import, unicode_literals from django.contrib import admin from .models import AsyncIndicator, DataSourceActionLog, InvalidUCRData @admin.register(AsyncIndicator) class AsyncIndicatorAdmin(admin.ModelAdmin): model = AsyncIndicator list_display = [ 'doc_id', 'doc_type', 'domain', 'indicator_config_ids', 'date_created', 'date_queued', 'unsuccessful_attempts' ] list_filter = ('doc_type', 'domain', 'unsuccessful_attempts') search_fields = ('doc_id',) @admin.register(InvalidUCRData) class InvalidUCRDataAdmin(admin.ModelAdmin): model = InvalidUCRData list_display = [ 'doc_id', 'doc_type', 'domain', 'indicator_config_id', 'validation_name', ] list_filter = ('doc_type', 'domain', 'indicator_config_id', 'validation_name') search_fields = ('doc_id',) @admin.register(DataSourceActionLog) class DataSourceActionLogAdmin(admin.ModelAdmin): model = DataSourceActionLog list_display = [ 'date_created', 'domain', 'indicator_config_id', 'initiated_by', 'action_source', 'action', 'skip_destructive' ] list_filter = ('action_source', 'action', 'skip_destructive') search_fields = ('domain', 'indicator_config_id',)
from __future__ import absolute_import, unicode_literals from django.contrib import admin from .models import AsyncIndicator, DataSourceActionLog, InvalidUCRData @admin.register(AsyncIndicator) class AsyncIndicatorAdmin(admin.ModelAdmin): model = AsyncIndicator list_display = [ 'doc_id', 'doc_type', 'domain', 'indicator_config_ids', 'date_created', 'date_queued', 'unsuccessful_attempts' ] list_filter = ('doc_type', 'domain', 'unsuccessful_attempts') search_fields = ('doc_id',) @admin.register(InvalidUCRData) class InvalidUCRDataAdmin(admin.ModelAdmin): model = InvalidUCRData list_display = [ 'doc_id', 'doc_type', 'domain', 'indicator_config_id', 'validation_name', ] list_filter = ('doc_type', 'domain', 'indicator_config_id', 'validation_name') search_fields = ('doc_id',) @admin.register(DataSourceActionLog) class DataSourceActionLogAdmin(admin.ModelAdmin): model = DataSourceActionLog list_display = [ 'date_created', 'domain', 'indicator_config_id', 'initiated_by', 'action_source', 'action', ] list_filter = ('action_source', 'action') search_fields = ('domain', 'indicator_config_id',)
bsd-3-clause
Python
5450303c975e34265f6fda3c014b9aed7d002a3c
Fix download path, the existing one has been removed from nvidia's site (#10253)
iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack
var/spack/repos/builtin/packages/cudnn/package.py
var/spack/repos/builtin/packages/cudnn/package.py
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Cudnn(Package): """NVIDIA cuDNN is a GPU-accelerated library of primitives for deep neural networks""" homepage = "https://developer.nvidia.com/cudnn" version('7.3', '72666d3532850752612706601258a0b2', url='http://developer.download.nvidia.com/compute/redist/cudnn/v7.3.0/cudnn-9.0-linux-x64-v7.3.0.29.tgz') version('6.0', 'a08ca487f88774e39eb6b0ef6507451d', url='http://developer.download.nvidia.com/compute/redist/cudnn/v6.0/cudnn-8.0-linux-x64-v6.0.tgz') version('5.1', '406f4ac7f7ee8aa9e41304c143461a69', url='http://developer.download.nvidia.com/compute/redist/cudnn/v5.1/cudnn-8.0-linux-x64-v5.1.tgz') depends_on('cuda@8:') def install(self, spec, prefix): install_tree('.', prefix)
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Cudnn(Package): """NVIDIA cuDNN is a GPU-accelerated library of primitives for deep neural networks""" homepage = "https://developer.nvidia.com/cudnn" version('7.3', '72666d3532850752612706601258a0b2', url='https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v7.3.0/prod/9.0_2018920/cudnn-9.0-linux-x64-v7.3.0.29.tgz') version('6.0', 'a08ca487f88774e39eb6b0ef6507451d', url='http://developer.download.nvidia.com/compute/redist/cudnn/v6.0/cudnn-8.0-linux-x64-v6.0.tgz') version('5.1', '406f4ac7f7ee8aa9e41304c143461a69', url='http://developer.download.nvidia.com/compute/redist/cudnn/v5.1/cudnn-8.0-linux-x64-v5.1.tgz') depends_on('cuda@8:') def install(self, spec, prefix): install_tree('.', prefix)
lgpl-2.1
Python
bb042f7bd76e364c3be6791c580b9426a4007627
fix url and add shared variant (#5358)
LLNL/spack,iulian787/spack,EmreAtes/spack,tmerrick1/spack,TheTimmy/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,mfherbst/spack,krafczyk/spack,lgarren/spack,lgarren/spack,LLNL/spack,lgarren/spack,lgarren/spack,LLNL/spack,TheTimmy/spack,mfherbst/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,LLNL/spack,mfherbst/spack,matthiasdiener/spack,TheTimmy/spack,EmreAtes/spack,TheTimmy/spack,iulian787/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,TheTimmy/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack
var/spack/repos/builtin/packages/latte/package.py
var/spack/repos/builtin/packages/latte/package.py
############################################################################## # Copyright (c) 2017, Los Alamos National Security, LLC # Produced at the Los Alamos National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Latte(CMakePackage): """Open source density functional tight binding molecular dynamics.""" homepage = "https://github.com/lanl/latte" url = "https://github.com/lanl/latte/tarball/v1.0" version('develop', git='https://github.com/lanl/latte', branch='master') variant('mpi', default=True, description='Build with mpi') variant('progress', default=False, description='Use progress for fast') variant('shared', default=True, description='Build shared libs') depends_on("[email protected]:", type='build') depends_on('blas') depends_on('lapack') depends_on('mpi', when='+mpi') depends_on('qmd-progress', when='+progress') root_cmakelists_dir = 'cmake' def cmake_args(self): options = [] if '+shared' in self.spec: options.append('-DBUILD_SHARED_LIBS=ON') else: options.append('-DBUILD_SHARED_LIBS=OFF') if '+mpi' in self.spec: options.append('-DO_MPI=yes') if '+progress' in self.spec: options.append('-DPROGRESS=yes') return options
############################################################################## # Copyright (c) 2017, Los Alamos National Security, LLC # Produced at the Los Alamos National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Latte(CMakePackage): """Open source density functional tight binding molecular dynamics.""" homepage = "https://gitlab.com/exaalt/latte" url = "https://gitlab.com/exaalt/latte/tags/v1.0" version('develop', git='https://gitlab.com/exaalt/latte', branch='cmake') depends_on("[email protected]:", type='build') depends_on('blas') depends_on('lapack') root_cmakelists_dir = 'cmake' def cmake_args(self): options = ['-DBUILD_SHARED_LIBS=ON'] return options
lgpl-2.1
Python
08b5b565666d42a6802e136fc8e7cf8d355929b0
add v2019.1 and v2020.1 (#17648)
iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack
var/spack/repos/builtin/packages/qhull/package.py
var/spack/repos/builtin/packages/qhull/package.py
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Qhull(CMakePackage): """Qhull computes the convex hull, Delaunay triangulation, Voronoi diagram, halfspace intersection about a point, furt hest-site Delaunay triangulation, and furthest-site Voronoi diagram. The source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull implements the Quickhull algorithm for computing the convex hull. It handles roundoff errors from floating point arithmetic. It computes volumes, surface areas, and approximations to the convex hull.""" homepage = "http://www.qhull.org" version('2020.1', sha256='1ac92a5538f61e297c72aebe4d4ffd731ceb3e6045d6d15faf1c212713798df4', url="http://www.qhull.org/download/qhull-2020-src-8.0.0.tgz") version('2019.1', sha256='2b7990558c363076261564f61b74db4d0d73b71869755108a469038c07dc43fb', url="http://www.qhull.org/download/qhull-2019-src-7.3.2.tgz") version('2015.2', sha256='78b010925c3b577adc3d58278787d7df08f7c8fb02c3490e375eab91bb58a436', url="http://www.qhull.org/download/qhull-2015-src-7.2.0.tgz") version('2012.1', sha256='a35ecaa610550b7f05c3ce373d89c30cf74b059a69880f03080c556daebcff88', url="http://www.qhull.org/download/qhull-2012.1-src.tgz") patch('qhull-unused-intel-17.02.patch', when='@2015.2') depends_on('[email protected]:', type='build') def flag_handler(self, name, flags): # See https://github.com/qhull/qhull/issues/65 if name == 'cxxflags' and self.version == Version('2020.1'): flags.append(self.compiler.cxx11_flag) return (flags, None, None)
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Qhull(CMakePackage): """Qhull computes the convex hull, Delaunay triangulation, Voronoi diagram, halfspace intersection about a point, furt hest-site Delaunay triangulation, and furthest-site Voronoi diagram. The source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull implements the Quickhull algorithm for computing the convex hull. It handles roundoff errors from floating point arithmetic. It computes volumes, surface areas, and approximations to the convex hull.""" homepage = "http://www.qhull.org" version('2015.2', sha256='78b010925c3b577adc3d58278787d7df08f7c8fb02c3490e375eab91bb58a436', url="http://www.qhull.org/download/qhull-2015-src-7.2.0.tgz") version('2012.1', sha256='a35ecaa610550b7f05c3ce373d89c30cf74b059a69880f03080c556daebcff88', url="http://www.qhull.org/download/qhull-2012.1-src.tgz") patch('qhull-unused-intel-17.02.patch', when='@2015.2') depends_on('[email protected]:', type='build')
lgpl-2.1
Python
1f6b1d2aca3995a4ac295f7e6a8ab6bf84d6e79b
add logging for ShotDetectorPlotService
w495/python-video-shot-detector,w495/python-video-shot-detector
shot_detector/services/shot_detector_service.py
shot_detector/services/shot_detector_service.py
# -*- coding: utf8 -*- from __future__ import absolute_import, division, print_function import logging from shot_detector.detectors import SimpleDetector from .base_detector_service import BaseDetectorService from .plot_service import PlotService from shot_detector.utils.common import yes_no from shot_detector.utils.log_meta import log_method_call_with class ShotDetectorPlotService(PlotService, BaseDetectorService): """ Simple Shot Detector Service. """ def add_arguments(self, parser, **kwargs): parser = super(ShotDetectorPlotService, self) \ .add_arguments(parser, **kwargs) parser = self.add_video_arguments(parser, **kwargs) parser = self.add_plot_arguments(parser, **kwargs) return parser def add_video_arguments(self, parser, **kwargs): parser.add_argument( '--ff', '--first-frame', metavar='sec', dest='first_frame', type=int, default=0, ) parser.add_argument( '--lf', '--last-frame', metavar='sec', dest='last_frame', type=int, default=60, ) parser.add_argument( '--as', '--as-stream', default='no', dest='as_stream', type=yes_no, ) return parser @log_method_call_with( level=logging.WARN, logger=logging.getLogger(__name__) ) def run(self, *kwargs): options = self.options detector = SimpleDetector() detector.detect( input_uri=options.input_uri, format=options.format, service_options=vars(options) )
# -*- coding: utf8 -*- from __future__ import absolute_import, division, print_function import time from shot_detector.detectors import SimpleDetector from .base_detector_service import BaseDetectorService from .plot_service import PlotService from shot_detector.utils.common import yes_no class ShotDetectorPlotService(PlotService, BaseDetectorService): """ Simple Shot Detector Service. """ def add_arguments(self, parser, **kwargs): parser = super(ShotDetectorPlotService, self) \ .add_arguments(parser, **kwargs) parser = self.add_video_arguments(parser, **kwargs) parser = self.add_plot_arguments(parser, **kwargs) return parser def add_video_arguments(self, parser, **kwargs): parser.add_argument( '--ff', '--first-frame', metavar='sec', dest='first_frame', type=int, default=0, ) parser.add_argument( '--lf', '--last-frame', metavar='sec', dest='last_frame', type=int, default=60, ) parser.add_argument( '--as', '--as-stream', default='no', dest='as_stream', type=yes_no, ) return parser def run(self, *kwargs): options = self.options detector = SimpleDetector() t1 = time.time() detector.detect( input_uri=options.input_uri, format=options.format, service_options=vars(options) ) t2 = time.time() print(t2 - t1)
bsd-3-clause
Python
251e11ef777ece9542b21af1ed43fa580c2186b3
Bump to 2.1.2
OpenCanada/website,OpenCanada/website,OpenCanada/website,OpenCanada/website
opencanada/__init__.py
opencanada/__init__.py
from django.utils.version import get_version VERSION = (2, 1, 2, 'final', 0) __version__ = get_version(VERSION)
from django.utils.version import get_version VERSION = (2, 1, 1, 'final', 0) __version__ = get_version(VERSION)
mit
Python
baa024a9e09607f8295cfe526a9eb25906aca806
modify the filename
HengLin/PyStudyAlgorithms,HengLin/PyStudyAlgorithms
PyStudy/loadfile_speed.py
PyStudy/loadfile_speed.py
#!/usr/bin/env python import datetime count = 0 begin_time = datetime.datetime.now() def readInChunks(fileObj, chunkSize=2048): """ Lazy function to read a file piece by piece. Default chunk size: 2kB. """ while True: data = fileObj.read(chunkSize) if not data: break yield data f = open('bigfile') for chuck in readInChunks(f): count = count + 1 end_time = datetime.datetime.now() total_time = end_time - begin_time print "chunk=%s, count=%i"%(total_time, count) f.close() count = 0 begin_time = datetime.datetime.now() f = open('bigfile') for line in f: count = count + 1 end_time = datetime.datetime.now() total_time = end_time - begin_time print "read=%s, count=%i"%(total_time, count) f.close()
#!/usr/bin/env python import datetime count = 0 begin_time = datetime.datetime.now() def readInChunks(fileObj, chunkSize=2048): """ Lazy function to read a file piece by piece. Default chunk size: 2kB. """ while True: data = fileObj.read(chunkSize) if not data: break yield data f = open('fastapi-requests.log.1') for chuck in readInChunks(f): count = count + 1 end_time = datetime.datetime.now() total_time = end_time - begin_time print "chunk=%s, count=%i"%(total_time, count) f.close() count = 0 begin_time = datetime.datetime.now() f = open('fastapi-requests.log.1') for line in f: count = count + 1 end_time = datetime.datetime.now() total_time = end_time - begin_time print "read=%s, count=%i"%(total_time, count) f.close()
apache-2.0
Python
96e26b74851c0b54493f3c269ceefb6b2ae53e7d
implement fromXml toXml and defaultInit method of Resolution class
CaptainDesAstres/Simple-Blender-Render-Manager,CaptainDesAstres/Blender-Render-Manager
settingMod/Resolution.py
settingMod/Resolution.py
#!/usr/bin/python3.4 # -*-coding:Utf-8 -* '''module to manage resolution settings''' import xml.etree.ElementTree as xmlMod from settingMod.Size import * import os class Resolution: '''class to manage resolution settings''' def __init__(self, xml= None): '''initialize resolution settings with default value or values extracted from an xml object''' if xml is None: self.defaultInit() else: self.fromXml(xml) def defaultInit(self): '''initialize resolution settings with default value''' self.pourcent = 100 self.size = Size('1920x1080') def fromXml(self, xml): '''initialize resolution settings with values extracted from an xml object''' self.pourcent = int(xml.get('pourcent')) self.size = Size(xml = xml) def toXml(self): '''export resolution settings into xml syntaxed string''' return '<resolution pourcent="'+str(self.pourcent)+'" '+self.size.toXmlAttr()+' />' def see(self, log): '''menu to explore and edit resolution settings settings''' def print(self): '''a method to print preset'''
#!/usr/bin/python3.4 # -*-coding:Utf-8 -* '''module to manage resolution settings''' import xml.etree.ElementTree as xmlMod from settingMod.Size import * import os class Resolution: '''class to manage resolution settings''' def __init__(self, xml= None): '''initialize resolution settings with default value or values extracted from an xml object''' if xml is None: self.defaultInit() else: self.fromXml(xml) def defaultInit(self): '''initialize resolution settings with default value''' def fromXml(self, xml): '''initialize resolution settings with values extracted from an xml object''' def toXml(self): '''export resolution settings into xml syntaxed string''' def see(self, log): '''menu to explore and edit resolution settings settings''' def print(self): '''a method to print preset'''
mit
Python
44dcbfe606377331a40777a7b387768c816b0e61
Increment to .2.11 for new package
cloudtools/nymms
nymms/__init__.py
nymms/__init__.py
__version__ = '0.2.11'
__version__ = '0.2.10'
bsd-2-clause
Python
7095380ff71947f76ff60765e699da8e31fde944
Build - remove dir directory - not used
molejar/project_generator,hwfwgrp/project_generator,ohagendorf/project_generator,0xc0170/project_generator,sarahmarshy/project_generator,project-generator/project_generator
project_generator/commands/build.py
project_generator/commands/build.py
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import logging from ..tools_supported import ToolsSupported from ..generate import Generator from ..settings import ProjectSettings help = 'Build a project' def run(args): # Export if we know how, otherwise return if os.path.exists(args.file): generator = Generator(args.file) for project in generator.generate(args.project): export_result = project.export(args.tool, args.copy) build_result = project.build(args.tool) if build_result == 0 and export_result == 0: return 0 else: return -1 else: # not project known by pgen logging.warning("%s not found." % args.file) return -1 def setup(subparser): subparser.add_argument( "-f", "--file", help="YAML projects file", default='projects.yaml') subparser.add_argument("-p", "--project", help="Name of the project to build", default = '') subparser.add_argument( "-t", "--tool", help="Build a project files for provided tool") subparser.add_argument( "-c", "--copy", action="store_true", help="Copy all files to the exported directory")
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import logging from ..tools_supported import ToolsSupported from ..generate import Generator from ..settings import ProjectSettings help = 'Build a project' def run(args): # Export if we know how, otherwise return if os.path.exists(args.file): generator = Generator(args.file) for project in generator.generate(args.project): export_result = project.export(args.tool, args.copy) build_result = project.build(args.tool) if build_result == 0 and export_result == 0: return 0 else: return -1 else: # not project known by pgen logging.warning("%s not found." % args.file) return -1 def setup(subparser): subparser.add_argument( "-f", "--file", help="YAML projects file", default='projects.yaml') subparser.add_argument("-p", "--project", help="Name of the project to build", default = '') subparser.add_argument( "-t", "--tool", help="Build a project files for provided tool") subparser.add_argument( "-dir", "--directory", help="The projects directory") subparser.add_argument( "-c", "--copy", action="store_true", help="Copy all files to the exported directory")
apache-2.0
Python
9ec02a7cc31766d2b0d46547addddc0ca350e8ed
make pylint even more happy
juliakreutzer/bandit-neuralmonkey,bastings/neuralmonkey,bastings/neuralmonkey,ufal/neuralmonkey,ufal/neuralmonkey,ufal/neuralmonkey,bastings/neuralmonkey,juliakreutzer/bandit-neuralmonkey,juliakreutzer/bandit-neuralmonkey,ufal/neuralmonkey,bastings/neuralmonkey,ufal/neuralmonkey,juliakreutzer/bandit-neuralmonkey,juliakreutzer/bandit-neuralmonkey,bastings/neuralmonkey
neuralmonkey/runners/perplexity_runner.py
neuralmonkey/runners/perplexity_runner.py
""" This module contains an implementation of a runner that is supposed to be used in case we train a language model. Instead of decoding sentences in computes its perplexities given the decoder. """ #tests: lint from neuralmonkey.learning_utils import feed_dicts #pylint: disable=too-few-public-methods class PerplexityRunner(object): def __init__(self, decoder, batch_size): self.decoder = decoder self.batch_size = batch_size self.vocabulary = decoder.vocabulary def __call__(self, sess, dataset, coders): if not dataset.has_series(self.decoder.data_id): raise Exception("Dataset must have the target values ({})" "for computing perplexity." .format(self.decoder.data_id)) perplexities = [] train_loss = 0.0 runtime_loss = 0.0 batch_count = 0 for batch in dataset.batch_dataset(self.batch_size): batch_count += 1 batch_feed_dict = feed_dicts(batch, coders, train=False) cross_entropies, opt_loss, dec_loss = sess.run( [self.decoder.cross_entropies, self.decoder.train_loss, self.decoder.runtime_loss], feed_dict=batch_feed_dict) perplexities.extend([2 ** xent for xent in cross_entropies]) train_loss += opt_loss runtime_loss += dec_loss avg_train_loss = train_loss / batch_count avg_runtime_loss = runtime_loss / batch_count return perplexities, avg_train_loss, avg_runtime_loss
""" This module contains an implementation of a runner that is supposed to be used in case we train a language model. Instead of decoding sentences in computes its perplexities given the decoder. """ #tests: lint from neuralmonkey.learning_utils import feed_dicts class PerplexityRunner(object): def __init__(self, decoder, batch_size): self.decoder = decoder self.batch_size = batch_size self.vocabulary = decoder.vocabulary def __call__(self, sess, dataset, coders): if not dataset.has_series(self.decoder.data_id): raise Exception("Dataset must have the target values ({})" "for computing perplexity." .format(self.decoder.data_id)) batched_dataset = dataset.batch_dataset(self.batch_size) losses = [self.decoder.train_loss, self.decoder.runtime_loss] perplexities = [] train_loss = 0.0 runtime_loss = 0.0 batch_count = 0 for batch in batched_dataset: batch_count += 1 batch_feed_dict = feed_dicts(batch, coders, train=False) cross_entropies, opt_loss, dec_loss = sess.run( [self.decoder.cross_entropies] + losses, feed_dict=batch_feed_dict) perplexities.extend([2 ** xent for xent in cross_entropies]) train_loss += opt_loss runtime_loss += dec_loss avg_train_loss = train_loss / batch_count avg_runtime_loss = runtime_loss / batch_count return perplexities, avg_train_loss, avg_runtime_loss
bsd-3-clause
Python
1fc9561148402c4eb558d183f4d8f3ecce0a0330
Set version to 0.4.1
Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend
alignak_backend/__init__.py
alignak_backend/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Alignak REST backend """ # Application manifest VERSION = (0, 4, 1) __application__ = u"Alignak_Backend" __version__ = '.'.join((str(each) for each in VERSION[:4])) __author__ = u"Alignak team" __copyright__ = u"(c) 2015 - %s" % __author__ __license__ = u"GNU Affero General Public License, version 3" __description__ = u"Alignak REST backend" __releasenotes__ = u"""Alignak REST Backend""" __doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend" # Application manifest manifest = { 'name': __application__, 'version': __version__, 'author': __author__, 'description': __description__, 'copyright': __copyright__, 'license': __license__, 'release': __releasenotes__, 'doc': __doc_url__ }
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Alignak REST backend """ # Application manifest VERSION = (0, 4, 0) __application__ = u"Alignak_Backend" __version__ = '.'.join((str(each) for each in VERSION[:4])) __author__ = u"Alignak team" __copyright__ = u"(c) 2015 - %s" % __author__ __license__ = u"GNU Affero General Public License, version 3" __description__ = u"Alignak REST backend" __releasenotes__ = u"""Alignak REST Backend""" __doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend" # Application manifest manifest = { 'name': __application__, 'version': __version__, 'author': __author__, 'description': __description__, 'copyright': __copyright__, 'license': __license__, 'release': __releasenotes__, 'doc': __doc_url__ }
agpl-3.0
Python
e77381b087acd935bc3dae1f6c2e809970506db9
remove SECRET_KEY, again
makefu/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server,bepasty/bepasty-server,bepasty/bepasty-server
bepasty/config.py
bepasty/config.py
# Copyright: 2013 Bastian Blank <[email protected]> # License: BSD 2-clause, see LICENSE for details. class Config(object): """This is the basic configuration class for bepasty.""" #: name of this site (put YOUR bepasty fqdn here) SITENAME = 'bepasty.example.org' UPLOAD_UNLOCKED = True """ .. warning:: Uploads are default unlocked. Actually the admin should manual unlock the uploaded files to avoid copyright issues. In hosted version you should set ``UPLOAD_UNLOCKED = False``. """ #: Define storage module #: Available: #: - filesystem #: - ceph STORAGE = 'filesystem' #: Filesystem storage specific config STORAGE_FILESYSTEM_DIRECTORY = '/tmp/' #: Config file for CEPH storage STORAGE_CEPH_CONFIG_FILE = '/etc/ceph/ceph.conf' #: CEPH pool name for actually data STORAGE_CEPH_POOL_DATA = 'bepasty-data' #: CEPH pool name for meta data STORAGE_CEPH_POOL_META = 'bepasty-meta' #: server secret key needed for secure cookies #: you must set a very long, very random, very secret string here, #: otherwise bepasty will not work (and crash when trying to log in)! SECRET_KEY = '' #: not logged-in users get these permissions #: usually either nothing ('') or read-only ('read'): DEFAULT_PERMISSIONS = '' #: logged-in users may get more permissions #: you need a login secret to log in and, depending on that secret, you will #: get the configured permissions. #: you can use same secret / same permissions for all privileged users or #: set up different secrets / different permissions. #: PERMISSIONS is a dict that maps secrets to permissions, use it like: #: PERMISSIONS = { #: 'myadminsecret': 'admin,create,read,delete', #: 'myuploadersecret': 'create,read', #: } PERMISSIONS = { }
# Copyright: 2013 Bastian Blank <[email protected]> # License: BSD 2-clause, see LICENSE for details. class Config(object): """This is the basic configuration class for bepasty.""" #: name of this site (put YOUR bepasty fqdn here) SITENAME = 'bepasty.example.org' UPLOAD_UNLOCKED = True """ .. warning:: Uploads are default unlocked. Actually the admin should manual unlock the uploaded files to avoid copyright issues. In hosted version you should set ``UPLOAD_UNLOCKED = False``. """ #: Define storage module #: Available: #: - filesystem #: - ceph STORAGE = 'filesystem' #: Filesystem storage specific config STORAGE_FILESYSTEM_DIRECTORY = '/tmp/' #: Config file for CEPH storage STORAGE_CEPH_CONFIG_FILE = '/etc/ceph/ceph.conf' #: CEPH pool name for actually data STORAGE_CEPH_POOL_DATA = 'bepasty-data' #: CEPH pool name for meta data STORAGE_CEPH_POOL_META = 'bepasty-meta' #: server secret key needed for secure cookies #: you must set a very long, very random, very secret string here, #: otherwise bepasty will not work (and crash when trying to log in)! SECRET_KEY = 'xx' #: not logged-in users get these permissions #: usually either nothing ('') or read-only ('read'): DEFAULT_PERMISSIONS = '' #: logged-in users may get more permissions #: you need a login secret to log in and, depending on that secret, you will #: get the configured permissions. #: you can use same secret / same permissions for all privileged users or #: set up different secrets / different permissions. #: PERMISSIONS is a dict that maps secrets to permissions, use it like: #: PERMISSIONS = { #: 'myadminsecret': 'admin,create,read,delete', #: 'myuploadersecret': 'create,read', #: } PERMISSIONS = { }
bsd-2-clause
Python
b8e53ed353bf28bc1e532ae1577bf4a8b4ce976f
Add missing import
hackeriet/nfcd,hackeriet/pyhackeriet,hackeriet/pyhackeriet,hackeriet/nfcd,hackeriet/nfcd,hackeriet/pyhackeriet
hackeriet/cardreaderd/__init__.py
hackeriet/cardreaderd/__init__.py
#!/usr/bin/env python from hackeriet import mifare from hackeriet.mqtt import MQTT from hackeriet.door import users import os, logging, time logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s') door_name = os.getenv("DOOR_NAME", 'hackeriet') door_topic = "hackeriet/door/%s/open" % door_name door_timeout = int(os.getenv("DOOR_TIMEOUT", 2)) mqtt = MQTT() def main(): logging.debug('Starting main loop') while True: users.load() # Read data from card reader logging.debug('mifare: waiting for data...') data = mifare.try_read() if data: logging.debug('mifare: data read') user = users.auth(data[0:16]) if user: ascii_user = user.encode('ascii', 'replace').decode('ascii') logging.info('auth: card read for user %s' % ascii_user) mqtt(door_topic, user) else: logging.debug('auth: card data does not belong to a user: %s' % data[0:16]) # Avoid spewing messages every single ms while a card is in front of the reader time.sleep(door_timeout) else: logging.debug('mifare: no data read in last attempt') if __name__ == "__main__": main()
#!/usr/bin/env python from hackeriet import mifare from hackeriet.mqtt import MQTT from hackeriet.door import users import os, logging logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s') door_name = os.getenv("DOOR_NAME", 'hackeriet') door_topic = "hackeriet/door/%s/open" % door_name door_timeout = int(os.getenv("DOOR_TIMEOUT", 2)) mqtt = MQTT() def main(): logging.debug('Starting main loop') while True: users.load() # Read data from card reader logging.debug('mifare: waiting for data...') data = mifare.try_read() if data: logging.debug('mifare: data read') user = users.auth(data[0:16]) if user: ascii_user = user.encode('ascii', 'replace').decode('ascii') logging.info('auth: card read for user %s' % ascii_user) mqtt(door_topic, user) else: logging.debug('auth: card data does not belong to a user: %s' % data[0:16]) # Avoid spewing messages every single ms while a card is in front of the reader time.sleep(door_timeout) else: logging.debug('mifare: no data read in last attempt') if __name__ == "__main__": main()
apache-2.0
Python
2e042201d6c0e0709d7056d399052389d1ea54b0
Move imports inside initialize() method so that we don’t break things on initial setup.
RafaAguilar/django-shopify-auth,discolabs/django-shopify-auth,RafaAguilar/django-shopify-auth,funkybob/django-shopify-auth,funkybob/django-shopify-auth,discolabs/django-shopify-auth
shopify_auth/__init__.py
shopify_auth/__init__.py
VERSION = (0, 1, 6) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
import shopify from django.conf import settings from django.core.exceptions import ImproperlyConfigured VERSION = (0, 1, 5) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Gavin Ballard' def initialize(): if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET: raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings") shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET)
mit
Python
c33b23e1d5263321cc29e2fe1f9871e36d97c5e5
add method get on opps db redis
opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,williamroot/opps,williamroot/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps
opps/db/_redis.py
opps/db/_redis.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from opps.db.conf import settings from redis import ConnectionPool from redis import Redis as RedisClient class Redis: def __init__(self, key_prefix, key_sufix): self.key_prefix = key_prefix self.key_sufix = key_sufix self.host = settings.OPPS_DB_HOST self.port = settings.OPPS_DB_PORT self.db = 0 pool = ConnectionPool(host=self.host, port=self.port, db=self.db) self.conn = RedisClient(connection_pool=pool) def close(self): self.conn = None return True def key(self): return '{}_{}_{}'.format(settings.OPPS_DB_NAME, self.key_prefix, self.key_sufix) def save(self, document): return self.conn.set(self.key(), document) def get(self): self.conn.get(self.key())
#!/usr/bin/env python # -*- coding: utf-8 -*- from opps.db.conf import settings from redis import ConnectionPool from redis import Redis as RedisClient class Redis: def __init__(self, key_prefix, key_sufix): self.key_prefix = key_prefix self.key_sufix = key_sufix self.host = settings.OPPS_DB_HOST self.port = settings.OPPS_DB_PORT self.db = 0 pool = ConnectionPool(host=self.host, port=self.port, db=self.db) self.conn = RedisClient(connection_pool=pool) def close(self): self.conn = None return True def key(self): return '{}_{}_{}'.format(settings.OPPS_DB_NAME, self.key_prefix, self.key_sufix) def save(self, document): return self.conn.set(self.key(), document)
mit
Python
b03b168cd752d50f1091106d3f4fcc0a79b22203
Fix tests
Siyavula/siyavula.latex2image
siyavula/latex2image/tests/latex2image_tests.py
siyavula/latex2image/tests/latex2image_tests.py
# coding=utf-8 from unittest import TestCase from lxml import etree, html from siyavula.latex2image.imageutils import replace_latex_with_images class TestBaseEquationToImageConversion(TestCase): """Test the equation to image conversion.""" def setUp(self): self.element_input = etree.Element('xml') self.div_input = etree.SubElement(self.element_input, 'div') self.div_input.set('class', 'latex-math') def test_complex_equation_to_png(self): self.div_input.text = u'\\(\\begin{{aligned}} \\vec{{F}}_{{g}} & = m\\vec{{g}} \\\\ & = (\\text{{12,7}}\\ \\text{{kg}})(\\text{{9,8}}\\ \\text{{m·s$^{{-2}}$}}) \\\\ & = \\text{{124,46}}\\ \\text{{N}}\\text{{&#181; µ &#956; μ}} &#181; µ &#956; μ \\end{{aligned}}\\'.replace('{{', '{').replace('}}', '}') xml = html.tostring(replace_latex_with_images(self.element_input, 'latex-math', '', '')) self.assertEqual(xml, '<xml><div class="latex-math"><a href="/8996d7eee5c41cdf08aa8c0e9fe42e93.png"><img src="/8996d7eee5c41cdf08aa8c0e9fe42e93.png" srcset="/b0791f40d3207d55907aa0b7df78ca1e.png 2x"></a></div></xml>')
# coding=utf-8 from unittest import TestCase from lxml import etree from siyavula.latex2image.imageutils import replace_latex_with_images class TestBaseEquationToImageConversion(TestCase): """Test the equation to image conversion.""" def setUp(self): self.element_input = etree.Element('xml') self.div_input = etree.SubElement(self.element_input, 'div') self.div_input.set('class', 'latex-math') def test_complex_equation_to_png(self): self.div_input.text = u'\\(\\begin{{aligned}} \\vec{{F}}_{{g}} & = m\\vec{{g}} \\\\ & = (\\text{{12,7}}\\ \\text{{kg}})(\\text{{9,8}}\\ \\text{{m·s$^{{-2}}$}}) \\\\ & = \\text{{124,46}}\\ \\text{{N}}\\text{{&#181; µ &#956; μ}} &#181; µ &#956; μ \\end{{aligned}}\\'.replace('{{', '{').replace('}}', '}') self.assertEqual(replace_latex_with_images(self.element_input, 'latex-math', '', ''), None)
mit
Python
3989abf6de879af6982a76ea3522f11f789c6569
Increment version for speedup release
rhiever/MarkovNetwork,rhiever/MarkovNetwork
MarkovNetwork/_version.py
MarkovNetwork/_version.py
# -*- coding: utf-8 -*- """ Copyright 2016 Randal S. Olson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __version__ = '1.3'
# -*- coding: utf-8 -*- """ Copyright 2016 Randal S. Olson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __version__ = '1.2'
mit
Python
0a9bd97598bc63450bcf0956242d3b67e2a52d9b
Remove testing code
sustainableis/python-sis
pysis/reqs/buildings/__init__.py
pysis/reqs/buildings/__init__.py
# -*- encoding: utf-8 -*- from pysis.reqs.base import Request from pysis.resources.buildings import Buildings from pysis.resources.outputs import Outputs from pysis.resources.blastcells import Blastcells from pysis.resources.metrics import Metrics class Get(Request): uri = 'buildings/{id}' resource = Buildings def clean_uri(self): if not self.id: return 'buildings' class GetOutputs(Request): uri = 'buildings/{id}/outputs' resource = Outputs class GetBlastcells(Request): uri = 'buildings/{id}/blastcells' resource = Blastcells class GetInfo(Request): uri = 'buildings/{id}/info' resource = Buildings def clean_uri(self): if not self.id: return 'buildings/{id}/info' class GetMetricsScores(Request): uri = 'buildings/{id}/metrics/energystar' resource = Metrics def clean_uri(self): if not self.id: return 'buildings/{id}/metrics' class GetMetrics(Request): uri = 'buildings/{id}/metrics' resource = Metrics def clean_uri(self): if not self.id: return 'buildings/{id}/metrics' class Set(Request): uri = 'buildings/{id}' resource = Buildings def clean_uri(self): if not self.id: return 'buildings/{id}'
# -*- encoding: utf-8 -*- from pysis.reqs.base import Request from pysis.resources.buildings import Buildings from pysis.resources.outputs import Outputs from pysis.resources.blastcells import Blastcells from pysis.resources.metrics import Metrics class Get(Request): uri = 'buildings/{id}' resource = Buildings def clean_uri(self): if not self.id: return 'buildings' class GetOutputs(Request): uri = 'buildings/{id}/outputs' resource = Outputs class GetBlastcells(Request): uri = 'buildings/{id}/blastcells' resource = Blastcells class GetInfo(Request): uri = 'buildings/{id}/info' resource = Buildings def clean_uri(self): if not self.id: return 'buildings/{id}/info' class GetMetricsScores(Request): uri = 'buildings/{id}/metrics/energystar' resource = Metrics print vars(Request) def clean_uri(self): if not self.id: return 'buildings/{id}/metrics' class GetMetrics(Request): uri = 'buildings/{id}/metrics' resource = Metrics def clean_uri(self): if not self.id: return 'buildings/{id}/metrics' class Set(Request): uri = 'buildings/{id}' resource = Buildings def clean_uri(self): if not self.id: return 'buildings/{id}'
isc
Python
d2ae65564c173789578c0119be7d1143d7c59641
Fix mistaken variable name.
chbrown/pybtex,chbrown/pybtex,andreas-h/pybtex,andreas-h/pybtex
pybtex/style/formatting/__init__.py
pybtex/style/formatting/__init__.py
# Copyright (C) 2006, 2007, 2008, 2009 Andrey Golovizin # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from pybtex.core import FormattedEntry from pybtex.style.template import node, join from pybtex.richtext import Symbol, Text from pybtex.plugin import find_plugin @node def toplevel(children, data): return join(sep=Symbol('newblock')) [children].format_data(data) class FormatterBase: default_label_style = 'number' default_name_style = 'plain' def __init__(self, label_style=None, name_style=None, abbreviate_names=False, **kwargs): if name_style is None: name_style = find_plugin('pybtex.style.names', self.default_name_style) if label_style is None: label_style = find_plugin('pybtex.style.labels', self.default_label_style) self.format_label = label_style.LabelStyle().format self.format_name = name_style.NameStyle().format self.abbreviate_names = abbreviate_names def format_entries(self, entries): for number, (key, entry) in enumerate(entries): entry.number = number + 1 for persons in entry.persons.itervalues(): for person in persons: person.text = self.format_name(person, self.abbreviate_names) f = getattr(self, "format_" + entry.type) text = f(entry) label = self.format_label(entry) yield FormattedEntry(key, text, label)
# Copyright (C) 2006, 2007, 2008, 2009 Andrey Golovizin # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from pybtex.core import FormattedEntry from pybtex.style.template import node, join from pybtex.richtext import Symbol, Text from pybtex.plugin import find_plugin @node def toplevel(children, data): return join(sep=Symbol('newblock')) [children].format_data(data) class FormatterBase: default_label_style = 'number' default_name_style = 'plain' def __init__(self, label_style=None, name_style=None, abbreviate_names=False, **kwargs): if name_style is None: name_style = find_plugin('pybtex.style.names', self.default_name_style) if label_style is None: label_format = find_plugin('pybtex.style.labels', self.default_label_style) self.format_label = label_style.LabelStyle().format self.format_name = name_style.NameStyle().format self.abbreviate_names = abbreviate_names def format_entries(self, entries): for number, (key, entry) in enumerate(entries): entry.number = number + 1 for persons in entry.persons.itervalues(): for person in persons: person.text = self.format_name(person, self.abbreviate_names) f = getattr(self, "format_" + entry.type) text = f(entry) label = self.format_label(entry) yield FormattedEntry(key, text, label)
mit
Python
43294bc83d013d79d909cadfcf2508aca0c575f6
Fix for bad y param.
charanpald/APGL
exp/sandbox/predictors/profile/DecisionTreeLearnerProfile.py
exp/sandbox/predictors/profile/DecisionTreeLearnerProfile.py
import numpy import logging import sys from apgl.util.ProfileUtils import ProfileUtils from exp.sandbox.predictors.DecisionTreeLearner import DecisionTreeLearner from apgl.data.ExamplesGenerator import ExamplesGenerator from sklearn.tree import DecisionTreeRegressor logging.basicConfig(stream=sys.stdout, level=logging.INFO) numpy.random.seed(22) class DecisionTreeLearnerProfile(object): def profileLearnModel(self): numExamples = 1000 numFeatures = 50 minSplit = 10 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) y = numpy.array(y, numpy.float) learner = DecisionTreeLearner(minSplit=minSplit, maxDepth=maxDepth, pruneType="REP-CV") #learner.learnModel(X, y) #print("Done") ProfileUtils.profile('learner.learnModel(X, y) ', globals(), locals()) print(learner.getTree().getNumVertices()) def profileDecisionTreeRegressor(self): numExamples = 1000 numFeatures = 20 minSplit = 10 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) regressor = DecisionTreeRegressor(min_split=minSplit, max_depth=maxDepth, min_density=0.0) ProfileUtils.profile('regressor.fit(X, y)', globals(), locals()) def profilePredict(self): #Make the prdiction function faster numExamples = 1000 numFeatures = 20 minSplit = 1 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) learner = DecisionTreeLearner(minSplit=minSplit, maxDepth=maxDepth) learner.learnModel(X, y) print(learner.getTree().getNumVertices()) ProfileUtils.profile('learner.predict(X)', globals(), locals()) print(learner.getTree().getNumVertices()) profiler = DecisionTreeLearnerProfile() profiler.profileLearnModel() #0.418 #profiler.profileDecisionTreeRegressor() #0.020 #profiler.profilePredict() #0.024
import numpy import logging import sys from apgl.util.ProfileUtils import ProfileUtils from exp.sandbox.predictors.DecisionTreeLearner import DecisionTreeLearner from apgl.data.ExamplesGenerator import ExamplesGenerator from sklearn.tree import DecisionTreeRegressor logging.basicConfig(stream=sys.stdout, level=logging.INFO) numpy.random.seed(22) class DecisionTreeLearnerProfile(object): def profileLearnModel(self): numExamples = 1000 numFeatures = 20 minSplit = 10 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) learner = DecisionTreeLearner(minSplit=minSplit, maxDepth=maxDepth, pruneType="REP-CV") #learner.learnModel(X, y) #print("Done") ProfileUtils.profile('learner.learnModel(X, y) ', globals(), locals()) print(learner.getTree().getNumVertices()) def profileDecisionTreeRegressor(self): numExamples = 1000 numFeatures = 20 minSplit = 10 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) regressor = DecisionTreeRegressor(min_split=minSplit, max_depth=maxDepth, min_density=0.0) ProfileUtils.profile('regressor.fit(X, y)', globals(), locals()) def profilePredict(self): #Make the prdiction function faster numExamples = 1000 numFeatures = 20 minSplit = 1 maxDepth = 20 generator = ExamplesGenerator() X, y = generator.generateBinaryExamples(numExamples, numFeatures) learner = DecisionTreeLearner(minSplit=minSplit, maxDepth=maxDepth) learner.learnModel(X, y) print(learner.getTree().getNumVertices()) ProfileUtils.profile('learner.predict(X)', globals(), locals()) print(learner.getTree().getNumVertices()) profiler = DecisionTreeLearnerProfile() profiler.profileLearnModel() #0.418 #profiler.profileDecisionTreeRegressor() #0.020 #profiler.profilePredict() #0.024
bsd-3-clause
Python
ca4f942656429021bc0ff9276dab70f28bc00023
reduce to 1 iteration.
daStrauss/subsurface
src/testOptRoutine.py
src/testOptRoutine.py
''' Created on Oct 17, 2012 @author: dstrauss ''' import numpy as np D = {'solverType':'projection', 'flavor':'TE', 'numRuns':1, 'expt':'testSolver'} def getMyVars(parseNumber, D): '''routine to return the parameters to test at the current iteration.''' D['rho'] = 0.00001 D['xi'] = 1e-9 D['freqs'] = np.array([1e3]) D['inc'] = np.array([75*np.pi/180]) D['bkgNo'] = 100 D['numProcs'] = 1 D['maxIter'] = 1 return D
''' Created on Oct 17, 2012 @author: dstrauss ''' import numpy as np D = {'solverType':'projection', 'flavor':'TE', 'numRuns':1, 'expt':'testSolver'} def getMyVars(parseNumber, D): '''routine to return the parameters to test at the current iteration.''' D['rho'] = 0.00001 D['xi'] = 1e-9 D['freqs'] = np.array([1e3]) D['inc'] = np.array([75*np.pi/180]) D['bkgNo'] = 100 D['numProcs'] = 1 D['maxIter'] = 200 return D
apache-2.0
Python
3de3e4bf2f0df0d602c2f69dd5a06016bf31eb9d
rebuild checkpoints when something breaks while updating group exports
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
couchexport/groupexports.py
couchexport/groupexports.py
from couchexport.models import GroupExportConfiguration, SavedBasicExport from couchdbkit.exceptions import ResourceNotFound from datetime import datetime import os import json from couchexport.tasks import Temp, rebuild_schemas from couchexport.export import SchemaMismatchException from dimagi.utils.logging import notify_exception def export_for_group(export_id, output_dir): try: config = GroupExportConfiguration.get(export_id) except ResourceNotFound: raise Exception("Couldn't find an export with id %s" % export_id) for config, schema in config.all_exports: try: tmp, _ = schema.get_export_files(format=config.format) except SchemaMismatchException, e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(config.index) msg = "Saved export failed for group export {index}. The specific error is {msg}." notify_exception(None, msg.format(index=config.index, msg=str(e))) # TODO: do we care enough to notify the user? # This is typically only called by things like celerybeat. continue payload = Temp(tmp).payload if output_dir == "couch": saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=config) saved.save() saved.put_attachment(payload, config.filename) saved.last_updated = datetime.utcnow() # force update the config in case it changed. # redundant in the create case saved.configuration = config saved.save() else: with open(os.path.join(output_dir, config.filename), "wb") as f: f.write(payload)
from couchexport.models import GroupExportConfiguration, SavedBasicExport from couchdbkit.exceptions import ResourceNotFound from datetime import datetime import os import json from couchexport.tasks import Temp def export_for_group(export_id, output_dir): try: config = GroupExportConfiguration.get(export_id) except ResourceNotFound: raise Exception("Couldn't find an export with id %s" % export_id) for config, schema in config.all_exports: tmp, _ = schema.get_export_files(format=config.format) payload = Temp(tmp).payload if output_dir == "couch": saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=config) saved.save() saved.put_attachment(payload, config.filename) saved.last_updated = datetime.utcnow() # force update the config in case it changed. # redundant in the create case saved.configuration = config saved.save() else: with open(os.path.join(output_dir, config.filename), "wb") as f: f.write(payload)
bsd-3-clause
Python
1746dad3e5bb218aede86cdb38e458a3f7ce270c
Update Inputkey.py
gotankgo/practice
python/inputkeyboard/Inputkey.py
python/inputkeyboard/Inputkey.py
import sys, tty, termios class _Getch: def __call__(self, a): return self._get_key(a) def _get_key(self, a): fd = sys.stdin.fileno() old = termios.tcgetattr(fd) try: tty.setraw(sys.stdin.fileno()) ch = sys.stdin.read(a) finally: termios.tcsetattr(fd, termios.TCSADRAIN, old) return ch def arrow_get(): ARROW_KEY = { '\x1b[A' :'up', '\x1b[B' :'down', '\x1b[C' :'right', '\x1b[D' :'left', '\x1b\x1b\x1b' :'esc' } while True: raw_key = _Getch() while True: # 방향키 읽으려면 3으로 줘야함 # 이유: 방향키가 이스케이프문자포함해서 3자리 # 그런데 3으로 주면 일반문자 3자리쌓여야 출력함 input_key = raw_key(3) if input_key != '': break if input_key in ARROW_KEY.keys(): return ARROW_KEY.get(input_key) else: continue def get(): while True: raw_key = _Getch() while True: input_key = raw_key(1) if input_key != '': break return input_key
import sys, tty, termios, time class _Getch: def __call__(self, a): return self._get_key(a) def _get_key(self, a): fd = sys.stdin.fileno() old = termios.tcgetattr(fd) try: tty.setraw(sys.stdin.fileno()) ch = sys.stdin.read(a) finally: termios.tcsetattr(fd, termios.TCSADRAIN, old) return ch def get(): while True: raw_key = _Getch() while True: input_key = raw_key(1) if input_key != '': break return input_key def arrow_get(): ARROW_KEY = { '\x1b[A' :'up', '\x1b[B' :'down', '\x1b[C' :'right', '\x1b[D' :'left' } while True: raw_key = _Getch() while True: # 방향키 읽으려면 3으로 줘야함 # 이유: 방향키가 이스케이프문자포함해서 3자리 # 그런데 3으로 주면 일반문자 3자리쌓여야 출력함 input_key = raw_key(3) if input_key != '': break if input_key in ARROW_KEY.keys(): return ARROW_KEY.get(input_key) else: continue
mit
Python
2c43cf3368742d7bb0acb91118ff07aeb1fe4183
Fix comment typo.
ohsu-qin/qipipe
qipipe/staging/sarcoma_config.py
qipipe/staging/sarcoma_config.py
import os from six.moves.configparser import ConfigParser as Config from six.moves.configparser import NoOptionError CFG_FILE = os.path.abspath( os.path.join( os.path.dirname(__file__), '..', 'conf', 'sarcoma.cfg') ) """ The Sarcoma Tumor Location configuration file. This file contains properties that associate the subject name to the location, e.g.:: Sarcoma004 = SHOULDER The value is the SNOMED anatomy term. """ class ConfigError(Exception): pass def sarcoma_location(subject): """ :param subject: the XNAT Subject ID :return: the subject tumor location """ try: return sarcoma_config().get('Tumor Location', subject) except NoOptionError: raise ConfigError("Tumor location for subject %s was not found in the" " sarcoma configuration file %s" % (subject, CFG_FILE)) def sarcoma_config(): """ :return: the sarcoma configuration :rtype: ConfigParser """ # Read the configuration file on demand. if not hasattr(sarcoma_config, 'instance'): sarcoma_config.instance = Config() sarcoma_config.instance.read(CFG_FILE) return sarcoma_config.instance
import os from six.moves.configparser import ConfigParser as Config from six.moves.configparser import NoOptionError CFG_FILE = os.path.abspath( os.path.join( os.path.dirname(__file__), '..', 'conf', 'sarcoma.cfg') ) """ The Sarcoma Tumor Location configuration file. This file contains properties that associat the subject name to the location, e.g.:: Sarcoma004 = SHOULDER The value is the SNOMED anatomy term. """ class ConfigError(Exception): pass def sarcoma_location(subject): """ :param subject: the XNAT Subject ID :return: the subject tumor location """ try: return sarcoma_config().get('Tumor Location', subject) except NoOptionError: raise ConfigError("Tumor location for subject %s was not found in the" " sarcoma configuration file %s" % (subject, CFG_FILE)) def sarcoma_config(): """ :return: the sarcoma configuration :rtype: ConfigParser """ # Read the configuration file on demand. if not hasattr(sarcoma_config, 'instance'): sarcoma_config.instance = Config() sarcoma_config.instance.read(CFG_FILE) return sarcoma_config.instance
bsd-2-clause
Python
f0e07f97fd43a0f54c8b0996944038a07e9a0e96
Add error handling for when the meter name does not match the NEM file
aguinane/energyusage,aguinane/energyusage,aguinane/energyusage,aguinane/energyusage
metering/loader.py
metering/loader.py
""" metering.loader ~~~~~~~~~ Define the meter data models """ import logging from nemreader import read_nem_file from sqlalchemy.orm import sessionmaker from energy_shaper import split_into_daily_intervals from . import get_db_engine from . import save_energy_reading from . import refresh_daily_stats from . import refresh_monthly_stats def load_nem_data(meter_id, nmi, nem_file): """ Load data from NEM file and save to database """ engine = get_db_engine(meter_id) Session = sessionmaker(bind=engine) session = Session() m = read_nem_file(nem_file) try: channels = m.readings[nmi] except KeyError: first_nmi = list(m.readings.keys())[0] logging.warning('NMI of %s not found, using %s instead', nmi, first_nmi) channels = m.readings[first_nmi] for ch_name in channels.keys(): reads = split_into_daily_intervals(channels[ch_name]) for read in reads: try: quality_method = read[3] except IndexError: quality_method = None save_energy_reading(session, ch_name, read[0], read[1], read[2], quality_method) session.commit() refresh_daily_stats(meter_id) refresh_monthly_stats(meter_id)
""" metering.loader ~~~~~~~~~ Define the meter data models """ from nemreader import read_nem_file from sqlalchemy.orm import sessionmaker from energy_shaper import split_into_daily_intervals from . import get_db_engine from . import save_energy_reading from . import refresh_daily_stats from . import refresh_monthly_stats def load_nem_data(meter_id, nmi, nem_file): """ Load data from NEM file and save to database """ engine = get_db_engine(meter_id) Session = sessionmaker(bind=engine) session = Session() m = read_nem_file(nem_file) channels = m.readings[nmi] for ch_name in channels.keys(): reads = split_into_daily_intervals(channels[ch_name]) for read in reads: try: quality_method = read[3] except IndexError: quality_method = None save_energy_reading(session, ch_name, read[0], read[1], read[2], quality_method) session.commit() refresh_daily_stats(meter_id) refresh_monthly_stats(meter_id)
agpl-3.0
Python
6a1b5003547833ffb0cddea933594c0322ad1bf2
Add complete utils instead
frappe/frappe,vjFaLk/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,saurabh6790/frappe,saurabh6790/frappe,adityahase/frappe,mhbu50/frappe,mhbu50/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe,yashodhank/frappe,adityahase/frappe,adityahase/frappe,saurabh6790/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,mhbu50/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,frappe/frappe,vjFaLk/frappe,saurabh6790/frappe,yashodhank/frappe,vjFaLk/frappe,vjFaLk/frappe
frappe/social/doctype/energy_point_rule/energy_point_rule.py
frappe/social/doctype/energy_point_rule/energy_point_rule.py
# -*- coding: utf-8 -*- # Copyright (c) 2018, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ import frappe.cache_manager from frappe.model.document import Document from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert class EnergyPointRule(Document): def on_update(self): frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name) def on_trash(self): frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name) def apply(self, doc): whitelisted_globals = { "utils": frappe.utils } if frappe.safe_eval(self.condition, whitelisted_globals, {'doc': doc.as_dict()}): multiplier = 1 if self.multiplier_field: multiplier = doc.get(self.multiplier_field) or 1 points = round(self.points * multiplier) reference_doctype = doc.doctype reference_name = doc.name user = doc.get(self.user_field) rule = self.name # incase of zero as result after roundoff if not points: return # if user_field has no value if not user or user == 'Administrator': return try: create_energy_points_log(reference_doctype, reference_name, { 'points': points, 'user': user, 'rule': rule }) except Exception as e: frappe.log_error(frappe.get_traceback(), 'apply_energy_point') def process_energy_points(doc, state): if (frappe.flags.in_patch or frappe.flags.in_install or not is_energy_point_enabled()): return old_doc = doc.get_doc_before_save() # check if doc has been cancelled if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2: return revert_points_for_cancelled_doc(doc) for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype, dict(reference_doctype = doc.doctype, enabled=1)): frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc) def revert_points_for_cancelled_doc(doc): energy_point_logs = frappe.get_all('Energy Point Log', { 'reference_doctype': doc.doctype, 'reference_name': doc.name, 'type': 'Auto' }) for log in energy_point_logs: revert(log.name, _('Reference document has been cancelled')) def get_energy_point_doctypes(): return [ d.reference_doctype for d in frappe.get_all('Energy Point Rule', ['reference_doctype'], {'enabled': 1}) ]
# -*- coding: utf-8 -*- # Copyright (c) 2018, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ import frappe.cache_manager from frappe.model.document import Document from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert class EnergyPointRule(Document): def on_update(self): frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name) def on_trash(self): frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name) def apply(self, doc): whitelisted_globals = { "getdate": frappe.utils.getdate } if frappe.safe_eval(self.condition, whitelisted_globals, {'doc': doc.as_dict()}): multiplier = 1 if self.multiplier_field: multiplier = doc.get(self.multiplier_field) or 1 points = round(self.points * multiplier) reference_doctype = doc.doctype reference_name = doc.name user = doc.get(self.user_field) rule = self.name # incase of zero as result after roundoff if not points: return # if user_field has no value if not user or user == 'Administrator': return try: create_energy_points_log(reference_doctype, reference_name, { 'points': points, 'user': user, 'rule': rule }) except Exception as e: frappe.log_error(frappe.get_traceback(), 'apply_energy_point') def process_energy_points(doc, state): if (frappe.flags.in_patch or frappe.flags.in_install or not is_energy_point_enabled()): return old_doc = doc.get_doc_before_save() # check if doc has been cancelled if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2: return revert_points_for_cancelled_doc(doc) for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype, dict(reference_doctype = doc.doctype, enabled=1)): frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc) def revert_points_for_cancelled_doc(doc): energy_point_logs = frappe.get_all('Energy Point Log', { 'reference_doctype': doc.doctype, 'reference_name': doc.name, 'type': 'Auto' }) for log in energy_point_logs: revert(log.name, _('Reference document has been cancelled')) def get_energy_point_doctypes(): return [ d.reference_doctype for d in frappe.get_all('Energy Point Rule', ['reference_doctype'], {'enabled': 1}) ]
mit
Python
c906e675bb4c75286d98d78e4625d12a158652c7
Update accel.py
jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi
apps/accelerometer/accel.py
apps/accelerometer/accel.py
#!/usr/bin/python # Author : ipmstyle, https://github.com/ipmstyle # : jeonghoonkang, https://github.com/jeonghoonkang # for the detail of HW connection, see lcd_connect.py import sys from time import strftime, localtime # beware the dir location, it should exist sys.path.append("../lcd_berepi/lib") sys.path.append("../sht20") from lcd import * from sht25class import * def main(): # Initialise display lcd_init() #print ip_chk(), wip_chk(), mac_chk(), wmac_chk(), stalk_chk(), time_chk() while True: str = ip_chk() str = str[:-1] lcd_string('%s ET' %str,LCD_LINE_1,1) str = mac_chk() str = str[:-1] lcd_string('%s' % (tstr),LCD_LINE_1,1) str = humi_chk() lcd_string('%.5s ' % (str),LCD_LINE_2,1) whiteLCDon() time.sleep(2) def run_cmd(cmd): p = Popen(cmd, shell=True, stdout=PIPE) output = p.communicate()[0] return output def temp_chk(): temperature = getTemperature() return temperature def humi_chk(): humidity = getHumidity() return humidity def time_chk(): time = strftime("%Y-%m%d %H:%M", localtime()) return time def ip_chk(): cmd = "ip addr show eth0 | grep inet | awk '$2 !~ /^169/ {print $2}' | cut -d/ -f1" ipAddr = run_cmd(cmd) return ipAddr def wip_chk(): cmd = "ip addr show wlan0 | grep inet | awk '{print $2}' | cut -d/ -f1" wipAddr = run_cmd(cmd) return wipAddr def mac_chk(): cmd = "ifconfig -a | grep ^eth | awk '{print $5}'" macAddr = run_cmd(cmd) return macAddr def wmac_chk(): cmd = "ifconfig -a | grep ^wlan | awk '{print $5}'" wmacAddr = run_cmd(cmd) return wmacAddr def stalk_chk(): cmd = "hostname" return run_cmd(cmd) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass finally: lcd_byte(0x01, LCD_CMD) lcd_string("Goodbye!",LCD_LINE_1,2) GPIO.cleanup()
#!/usr/bin/python # Author : ipmstyle, https://github.com/ipmstyle # : jeonghoonkang, https://github.com/jeonghoonkang # for the detail of HW connection, see lcd_connect.py import sys from time import strftime, localtime # beware the dir location, it should exist sys.path.append("../lcd_berepi/lib") sys.path.append("../sht20") from lcd import * from sht25class import * def main(): # Initialise display lcd_init() #print ip_chk(), wip_chk(), mac_chk(), wmac_chk(), stalk_chk(), time_chk() while True: str = ip_chk() str = str[:-1] lcd_string('%s ET' %str,LCD_LINE_1,1) str = mac_chk() str = str[:-1] str = wip_chk() str = str[:-1] lcd_string('%s WL ' % (str),LCD_LINE_2,1) str = wmac_chk() str = str[:-1] # lcd_string('%s' % (str),LCD_LINE_2,1) blueLCDon() time.sleep(1.2) str = stalk_chk() str = str[:-1] lcd_string('%s' % (tstr),LCD_LINE_1,1) lcd_string('%s ' % (str),LCD_LINE_2,1) blueLCDon() time.sleep(1) lcd_string('%s' % (tstr),LCD_LINE_1,1) str = humi_chk() lcd_string('%.5s ' % (str),LCD_LINE_2,1) whiteLCDon() time.sleep(2) def run_cmd(cmd): p = Popen(cmd, shell=True, stdout=PIPE) output = p.communicate()[0] return output def temp_chk(): temperature = getTemperature() return temperature def humi_chk(): humidity = getHumidity() return humidity def time_chk(): time = strftime("%Y-%m%d %H:%M", localtime()) return time def ip_chk(): cmd = "ip addr show eth0 | grep inet | awk '$2 !~ /^169/ {print $2}' | cut -d/ -f1" ipAddr = run_cmd(cmd) return ipAddr def wip_chk(): cmd = "ip addr show wlan0 | grep inet | awk '{print $2}' | cut -d/ -f1" wipAddr = run_cmd(cmd) return wipAddr def mac_chk(): cmd = "ifconfig -a | grep ^eth | awk '{print $5}'" macAddr = run_cmd(cmd) return macAddr def wmac_chk(): cmd = "ifconfig -a | grep ^wlan | awk '{print $5}'" wmacAddr = run_cmd(cmd) return wmacAddr def stalk_chk(): cmd = "hostname" return run_cmd(cmd) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass finally: lcd_byte(0x01, LCD_CMD) lcd_string("Goodbye!",LCD_LINE_1,2) GPIO.cleanup()
bsd-2-clause
Python
1ed14e9231d295c6db83337f7cf2b586a39dc3dc
Add timestamp to payment log list display
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
apps/cowry_docdata/admin.py
apps/cowry_docdata/admin.py
from babel.numbers import format_currency from django.contrib import admin from django.core.urlresolvers import reverse from django.utils import translation from .models import DocDataPaymentOrder, DocDataPayment, DocDataPaymentLogEntry class DocDataPaymentLogEntryInine(admin.TabularInline): model = DocDataPaymentLogEntry can_delete = False extra = 0 max_num = 0 fields = ('timestamp', 'level', 'message') readonly_fields = fields class DocDataPaymentInline(admin.TabularInline): model = DocDataPayment can_delete = False extra = 0 max_num = 0 fields = ('payment_method', 'status', 'created', 'updated') readonly_fields = fields class DocDataPaymentOrderAdmin(admin.ModelAdmin): list_filter = ('status',) list_display = ('created', 'amount_override', 'status') raw_id_fields = ('order',) search_fields = ('payment_order_id', 'merchant_order_reference') inlines = (DocDataPaymentInline, DocDataPaymentLogEntryInine) def amount_override(self, obj): language = translation.get_language().split('-')[0] return format_currency(obj.amount / 100, obj.currency, locale=language) amount_override.short_description = 'amount' admin.site.register(DocDataPaymentOrder, DocDataPaymentOrderAdmin) class DocDataPaymentLogEntryAdmin(admin.ModelAdmin): # List view. list_display = ('payment', 'timestamp', 'level', 'message') list_filter = ('level', 'timestamp') search_fields = ('message',) def payment(self, obj): payment = obj.docdata_payment_order url = reverse('admin:%s_%s_change' % (payment._meta.app_label, payment._meta.module_name), args=[payment.id]) return "<a href='%s'>%s</a>" % (str(url), payment) payment.allow_tags = True # Don't allow the detail view to be accessed. def has_change_permission(self, request, obj=None): if not obj: return True return False admin.site.register(DocDataPaymentLogEntry, DocDataPaymentLogEntryAdmin)
from babel.numbers import format_currency from django.contrib import admin from django.core.urlresolvers import reverse from django.utils import translation from .models import DocDataPaymentOrder, DocDataPayment, DocDataPaymentLogEntry class DocDataPaymentLogEntryInine(admin.TabularInline): model = DocDataPaymentLogEntry can_delete = False extra = 0 max_num = 0 fields = ('timestamp', 'level', 'message') readonly_fields = fields class DocDataPaymentInline(admin.TabularInline): model = DocDataPayment can_delete = False extra = 0 max_num = 0 fields = ('payment_method', 'status', 'created', 'updated') readonly_fields = fields class DocDataPaymentOrderAdmin(admin.ModelAdmin): list_filter = ('status',) list_display = ('created', 'amount_override', 'status') raw_id_fields = ('order',) search_fields = ('payment_order_id', 'merchant_order_reference') inlines = (DocDataPaymentInline, DocDataPaymentLogEntryInine) def amount_override(self, obj): language = translation.get_language().split('-')[0] return format_currency(obj.amount / 100, obj.currency, locale=language) amount_override.short_description = 'amount' admin.site.register(DocDataPaymentOrder, DocDataPaymentOrderAdmin) class DocDataPaymentLogEntryAdmin(admin.ModelAdmin): # List view. list_display = ('payment', 'level', 'message') list_filter = ('level', 'timestamp') search_fields = ('message',) def payment(self, obj): payment = obj.docdata_payment_order url = reverse('admin:%s_%s_change' % (payment._meta.app_label, payment._meta.module_name), args=[payment.id]) return "<a href='%s'>%s</a>" % (str(url), payment) payment.allow_tags = True # Don't allow the detail view to be accessed. def has_change_permission(self, request, obj=None): if not obj: return True return False admin.site.register(DocDataPaymentLogEntry, DocDataPaymentLogEntryAdmin)
bsd-3-clause
Python
8064be72de340fca963da2cade2b73aa969fbdbd
Add string representation for Activity model
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
csunplugged/activities/models.py
csunplugged/activities/models.py
from django.db import models class Activity(models.Model): name = models.CharField(max_length=200) description = models.TextField() def __str__(self): return self.name
from django.db import models class Activity(models.Model): name = models.CharField(max_length=200) description = models.TextField()
mit
Python
dd4e62667da94469a8bbb6dd0ccd881124e7665f
Fix return value of terraform.render
elifesciences/builder,elifesciences/builder
src/buildercore/terraform.py
src/buildercore/terraform.py
import json from buildercore.utils import ensure RESOURCE_TYPE_FASTLY = 'fastly_service_v1' RESOURCE_NAME_FASTLY = 'fastly-cdn' def render(context): if not context['fastly']: return '{}' ensure(len(context['fastly']['subdomains']) == 1, "Only 1 subdomain for Fastly CDNs is supported") tf_file = { 'resource': { RESOURCE_TYPE_FASTLY: { # must be unique but only in a certain context like this, use some constants RESOURCE_NAME_FASTLY: { 'name': context['stackname'], 'domain': { 'name': context['fastly']['subdomains'][0], }, 'backend': { 'address': context['full_hostname'], 'name': context['stackname'], 'port': 443, 'use_ssl': True, 'ssl_check_cert': False # bad option # it's for minimal fuss. Before we start customizing this, a lot of the risk to be tackled # is integrating everything together with a good lifecycle for adding, modifying and removing # CDNs that point to CloudFormation-managed resources. }, 'force_destroy': True } } }, } return json.dumps(tf_file)
import json from buildercore.utils import ensure RESOURCE_TYPE_FASTLY = 'fastly_service_v1' RESOURCE_NAME_FASTLY = 'fastly-cdn' def render(context): if not context['fastly']: return None ensure(len(context['fastly']['subdomains']) == 1, "Only 1 subdomain for Fastly CDNs is supported") tf_file = { 'resource': { RESOURCE_TYPE_FASTLY: { # must be unique but only in a certain context like this, use some constants RESOURCE_NAME_FASTLY: { 'name': context['stackname'], 'domain': { 'name': context['fastly']['subdomains'][0], }, 'backend': { 'address': context['full_hostname'], 'name': context['stackname'], 'port': 443, 'use_ssl': True, 'ssl_check_cert': False # bad option # it's for minimal fuss. Before we start customizing this, a lot of the risk to be tackled # is integrating everything together with a good lifecycle for adding, modifying and removing # CDNs that point to CloudFormation-managed resources. }, 'force_destroy': True } } }, } return json.dumps(tf_file)
mit
Python
a98e536334eb3d3376efe93c1bdc639ecdc4a2a0
remove unused code
approvals/ApprovalTests.Python,approvals/ApprovalTests.Python,tdpreece/ApprovalTests.Python,approvals/ApprovalTests.Python
approvaltests/reporters/generic_diff_reporter_factory.py
approvaltests/reporters/generic_diff_reporter_factory.py
import json from approvaltests.reporters.generic_diff_reporter import GenericDiffReporter from approvaltests.utils import get_adjacent_file class GenericDiffReporterFactory(object): reporters = [] def __init__(self): self.load(get_adjacent_file('reporters.json')) self.add_fallback_reporter_config(["PythonNative", "python", [get_adjacent_file("python_native_reporter.py")]]) def add_fallback_reporter_config(self, config): self.reporters.append(config) def list(self): return [r[0] for r in self.reporters] def get(self, reporter_name): config = next((r for r in self.reporters if r[0] == reporter_name), None) return self._create_reporter(config) @staticmethod def _create_reporter(config): if not config: return None return GenericDiffReporter(config) def save(self, file_name): with open(file_name, 'w') as f: json.dump( self.reporters, f, sort_keys=True, indent=2, separators=(',', ': ') ) return file_name def load(self, file_name): with open(file_name, 'r') as f: self.reporters = json.load(f) return self.reporters def get_first_working(self): working = (i for i in self.get_all_reporters() if i.is_working()) return next(working, None) def get_all_reporters(self): instances = (self._create_reporter(r) for r in self.reporters) return instances def remove(self, reporter_name): self.reporters = [r for r in self.reporters if r[0] != reporter_name]
import json from approvaltests.reporters.generic_diff_reporter import GenericDiffReporter from approvaltests.utils import get_adjacent_file class GenericDiffReporterFactory(object): reporters = [] def __init__(self): self.load(get_adjacent_file('reporters.json')) self.add_fallback_reporter_config(["PythonNative", "python", [get_adjacent_file("python_native_reporter.py")]]) def add_default_reporter_config(self, config): self.reporters.insert(0, config) def add_fallback_reporter_config(self, config): self.reporters.append(config) def list(self): return [r[0] for r in self.reporters] def get(self, reporter_name): config = next((r for r in self.reporters if r[0] == reporter_name), None) return self._create_reporter(config) @staticmethod def _create_reporter(config): if not config: return None return GenericDiffReporter(config) def save(self, file_name): with open(file_name, 'w') as f: json.dump( self.reporters, f, sort_keys=True, indent=2, separators=(',', ': ') ) return file_name def load(self, file_name): with open(file_name, 'r') as f: self.reporters = json.load(f) return self.reporters def get_first_working(self): working = (i for i in self.get_all_reporters() if i.is_working()) return next(working, None) def get_all_reporters(self): instances = (self._create_reporter(r) for r in self.reporters) return instances def remove(self, reporter_name): self.reporters = [r for r in self.reporters if r[0] != reporter_name]
apache-2.0
Python
ae2981b26fce2641a9bae5af68a3d5043fdd8b46
Fix disapear exception message (#31)
arnaudmorin/python-ovh,arnaudmorin/python-ovh
ovh/exceptions.py
ovh/exceptions.py
# -*- encoding: utf-8 -*- # # Copyright (c) 2013-2016, OVH SAS. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of OVH SAS nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY OVH SAS AND CONTRIBUTORS ``AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL OVH SAS AND CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ All exceptions used in OVH SDK derives from `APIError` """ class APIError(Exception): """Base OVH API exception, all specific exceptions inherits from it.""" def __init__(self, *args, **kwargs): self.response = kwargs.pop('response', None) super(APIError, self).__init__(*args, **kwargs) class HTTPError(APIError): """Raised when the request fails at a low level (DNS, network, ...)""" class InvalidKey(APIError): """Raised when trying to sign request with invalid key""" class InvalidCredential(APIError): """Raised when trying to sign request with invalid consumer key""" class InvalidResponse(APIError): """Raised when api response is not valid json""" class InvalidRegion(APIError): """Raised when region is not in `REGIONS`.""" class ReadOnlyError(APIError): """Raised when attempting to modify readonly data.""" class ResourceNotFoundError(APIError): """Raised when requested resource does not exist.""" class BadParametersError(APIError): """Raised when request contains bad parameters.""" class ResourceConflictError(APIError): """Raised when trying to create an already existing resource.""" class NetworkError(APIError): """Raised when there is an error from network layer.""" class NotGrantedCall(APIError): """Raised when there is an error from network layer.""" class NotCredential(APIError): """Raised when there is an error from network layer.""" class Forbidden(APIError): """Raised when there is an error from network layer."""
# -*- encoding: utf-8 -*- # # Copyright (c) 2013-2016, OVH SAS. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of OVH SAS nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY OVH SAS AND CONTRIBUTORS ``AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL OVH SAS AND CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ All exceptions used in OVH SDK derives from `APIError` """ class APIError(Exception): """Base OVH API exception, all specific exceptions inherits from it.""" def __init__(self, *args, **kwargs): self.response = kwargs.get('response') class HTTPError(APIError): """Raised when the request fails at a low level (DNS, network, ...)""" class InvalidKey(APIError): """Raised when trying to sign request with invalid key""" class InvalidCredential(APIError): """Raised when trying to sign request with invalid consumer key""" class InvalidResponse(APIError): """Raised when api response is not valid json""" class InvalidRegion(APIError): """Raised when region is not in `REGIONS`.""" class ReadOnlyError(APIError): """Raised when attempting to modify readonly data.""" class ResourceNotFoundError(APIError): """Raised when requested resource does not exist.""" class BadParametersError(APIError): """Raised when request contains bad parameters.""" class ResourceConflictError(APIError): """Raised when trying to create an already existing resource.""" class NetworkError(APIError): """Raised when there is an error from network layer.""" class NotGrantedCall(APIError): """Raised when there is an error from network layer.""" class NotCredential(APIError): """Raised when there is an error from network layer.""" class Forbidden(APIError): """Raised when there is an error from network layer."""
bsd-3-clause
Python
63f6637228153b1f77ca860c297ff3554d802ce9
Fix order history sorting logic, #sort() should be called before #reverse().
supistar/OandaOrderbook,supistar/OandaOrderbook,supistar/OandaOrderbook
model/orderbook.py
model/orderbook.py
# -*- encoding:utf8 -*- import os from model.oandapy import oandapy class OrderBook(object): def get_latest_orderbook(self, instrument, period, history): oanda_token = os.environ.get('OANDA_TOKEN') oanda = oandapy.API(environment="practice", access_token=oanda_token) orders = oanda.get_orderbook(instrument=instrument) try: timeset = orders.keys() timeset.sort() timeset.reverse() target_time = timeset[history] except: return None order = orders[target_time] order['time'] = target_time return order
# -*- encoding:utf8 -*- import os from model.oandapy import oandapy class OrderBook(object): def get_latest_orderbook(self, instrument, period, history): oanda_token = os.environ.get('OANDA_TOKEN') oanda = oandapy.API(environment="practice", access_token=oanda_token) orders = oanda.get_orderbook(instrument=instrument) try: timeset = orders.keys() timeset.reverse() target_time = timeset[history] except: return None order = orders[target_time] order['time'] = target_time return order
mit
Python
6741c59d726f1ceaf6edba82b6e97f501fc265ee
fix zero shape bug!
yassersouri/omgh,yassersouri/omgh
src/scripts/make_parts_dataset.py
src/scripts/make_parts_dataset.py
import sys import os sys.path.append(os.path.dirname(os.path.dirname(__file__))) import settings sys.path.append(settings.CAFFE_PYTHON_PATH) import skimage.io import caffe import numpy as np import click from glob import glob import utils from dataset import CUB_200_2011 from parts import Parts @click.command() @click.argument('out-path', type=click.Path(exists=True)) def main(out_path): cub = CUB_200_2011(settings.CUB_ROOT) cub_images = cub.get_all_images() for image in cub_images: image_path = image['img_file'] image_id = image['img_id'] cub_parts = cub.get_parts() rel_image_path = image_path[len(settings.CUB_IMAGES_FOLDER):] o_image = caffe.io.load_image(image_path) parts = cub_parts.for_image(image_id) head_parts = parts.filter_by_name(Parts.HEAD_PART_NAMES) if len(head_parts) <= 2: print "#parts:%d \tID:%d \tName:%s" % (len(head_parts), int(image_id), rel_image_path) if len(head_parts) <= 1: continue part_image = head_parts.get_rect(o_image) if 0 in part_image.shape: print "#parts:%d \tID:%d \tName:%s + Shape:%s" % (len(head_parts), int(image_id), rel_image_path, str(part_image.shape)) out_image_path = os.path.join(out_path, rel_image_path) utils.ensure_dir(os.path.dirname(out_image_path)) skimage.io.imsave(out_image_path, part_image) if __name__ == '__main__': main()
import sys import os sys.path.append(os.path.dirname(os.path.dirname(__file__))) import settings sys.path.append(settings.CAFFE_PYTHON_PATH) import skimage.io import caffe import numpy as np import click from glob import glob import utils from dataset import CUB_200_2011 from parts import Parts @click.command() @click.argument('out-path', type=click.Path(exists=True)) def main(out_path): cub = CUB_200_2011(settings.CUB_ROOT) cub_images = cub.get_all_images() for image in cub_images: image_path = image['img_file'] image_id = image['img_id'] cub_parts = cub.get_parts() rel_image_path = image_path[len(settings.CUB_IMAGES_FOLDER):] o_image = caffe.io.load_image(image_path) parts = cub_parts.for_image(image_id) head_parts = parts.filter_by_name(Parts.HEAD_PART_NAMES) if len(head_parts) <= 2: print "#parts:%d \tID:%d \tName:%s" % (len(head_parts), int(image_id), rel_image_path) if len(head_parts) <= 1: continue part_image = head_parts.get_rect(o_image) out_image_path = os.path.join(out_path, rel_image_path) utils.ensure_dir(os.path.dirname(out_image_path)) skimage.io.imsave(out_image_path, part_image) if __name__ == '__main__': main()
mit
Python
48cd6af0e138dd28b18ca3a71f41976c71483445
Add --forceuninstall option
boltomli/MyMacScripts,boltomli/MyMacScripts
Python/brewcaskupgrade.py
Python/brewcaskupgrade.py
#! /usr/bin/env python3 # -*- coding: utf8 -*- import argparse import shutil from subprocess import check_output, run parser = argparse.ArgumentParser(description='Update every entries found in cask folder.') parser.add_argument('--pretend', dest='pretend', action='store_true', help='Pretend to take action.') parser.add_argument('--forceuninstall', dest='forceuninstall', action='store_true', help='Force uninstall before install.') parser.set_defaults(pretend=False, forceuninstall=False) args = parser.parse_args() brew_bin = 'brew' if not shutil.which(brew_bin): raise FileExistsError(brew_bin + ' not exists') list_command = [ brew_bin, 'cask', 'list' ] list_installed = str.split(check_output(list_command).decode(), '\n') list_installed = [i for i in list_installed if i is not ''] print(str(len(list_installed)) + ' cask(s) installed') updated_count = 0 for cask in list_installed: info_command = [ brew_bin, 'cask', 'info', cask ] try: install_status = str.splitlines(check_output(info_command).decode()) except: install_status = 'Not installed' version = str.strip(str.split(install_status[0], ':')[1]) for line in install_status: if not line.startswith(cask) and cask in line and version in line: is_version_installed = True is_version_installed = False if not is_version_installed: print('Installing', cask) install_command = [ brew_bin, 'cask', 'install', '--force', cask ] if args.pretend: print(' '.join(install_command)) else: if args.forceuninstall: uninstall_command = [ brew_bin, 'cask', 'uninstall', '--force', cask ] run(uninstall_command) run(install_command) updated_count += 1 print(str(updated_count) + ' cask(s) updated')
#! /usr/bin/env python3 # -*- coding: utf8 -*- import argparse import shutil from subprocess import check_output, run parser = argparse.ArgumentParser(description='Update every entries found in cask folder.') parser.add_argument('--pretend', dest='pretend', action='store_true', help='Pretend to take action.') parser.set_defaults(pretend=False) args = parser.parse_args() brew_bin = 'brew' if not shutil.which(brew_bin): raise FileExistsError(brew_bin + ' not exists') list_command = [ brew_bin, 'cask', 'list' ] list_installed = str.split(check_output(list_command).decode(), '\n') list_installed = [i for i in list_installed if i is not ''] print(str(len(list_installed)) + ' cask(s) installed') updated_count = 0 for cask in list_installed: info_command = [ brew_bin, 'cask', 'info', cask ] try: install_status = str.splitlines(check_output(info_command).decode()) except: install_status = 'Not installed' version = str.strip(str.split(install_status[0], ':')[1]) is_version_installed = False for line in install_status: if not line.startswith(cask) and cask in line and version in line: is_version_installed = True if not is_version_installed: print('Installing', cask) install_command = [ brew_bin, 'cask', 'install', '--force', cask ] if args.pretend: print(' '.join(install_command)) else: run(install_command) updated_count += 1 print(str(updated_count) + ' cask(s) updated')
cc0-1.0
Python
a4bc6c0c4d13629dbdfef30edcba262efce0eaff
fix up config for heroku
Jaza/colorsearchtest,Jaza/colorsearchtest,Jaza/colorsearchtest
colorsearchtest/settings.py
colorsearchtest/settings.py
# -*- coding: utf-8 -*- import os os_env = os.environ class Config(object): SECRET_KEY = os_env.get('COLORSEARCHTEST_SECRET', 'secret-key') # TODO: Change me APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir)) SQLALCHEMY_DATABASE_URI = ((os.environ.get('HEROKU') is not None) and os_env.get( 'DATABASE_URL', 'postgresql://localhost/example') or os_env.get( 'COLORSEARCHTEST_DATABASE_URI', 'postgresql://localhost/example')) # TODO: Change me BCRYPT_LOG_ROUNDS = 13 ASSETS_DEBUG = False DEBUG_TB_ENABLED = False # Disable Debug toolbar DEBUG_TB_INTERCEPT_REDIRECTS = False CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. IS_DELTA_E_COLORMATH_ENABLED = False IS_DELTA_E_DBQUERY_ENABLED = True MAX_COLORS = 100 class ProdConfig(Config): """Production configuration.""" ENV = 'prod' DEBUG = False DEBUG_TB_ENABLED = False # Disable Debug toolbar class DevConfig(Config): """Development configuration.""" ENV = 'dev' DEBUG = True DEBUG_TB_ENABLED = True ASSETS_DEBUG = True # Don't bundle/minify static assets CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. class TestConfig(Config): TESTING = True DEBUG = True BCRYPT_LOG_ROUNDS = 1 # For faster tests WTF_CSRF_ENABLED = False # Allows form testing
# -*- coding: utf-8 -*- import os os_env = os.environ class Config(object): SECRET_KEY = os_env.get('COLORSEARCHTEST_SECRET', 'secret-key') # TODO: Change me APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir)) SQLALCHEMY_DATABASE_URI = os_env.get( 'COLORSEARCHTEST_DATABASE_URI', 'postgresql://localhost/example') # TODO: Change me BCRYPT_LOG_ROUNDS = 13 ASSETS_DEBUG = False DEBUG_TB_ENABLED = False # Disable Debug toolbar DEBUG_TB_INTERCEPT_REDIRECTS = False CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. IS_DELTA_E_COLORMATH_ENABLED = False IS_DELTA_E_DBQUERY_ENABLED = True MAX_COLORS = 100 class ProdConfig(Config): """Production configuration.""" ENV = 'prod' DEBUG = False DEBUG_TB_ENABLED = False # Disable Debug toolbar class DevConfig(Config): """Development configuration.""" ENV = 'dev' DEBUG = True DEBUG_TB_ENABLED = True ASSETS_DEBUG = True # Don't bundle/minify static assets CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc. class TestConfig(Config): TESTING = True DEBUG = True BCRYPT_LOG_ROUNDS = 1 # For faster tests WTF_CSRF_ENABLED = False # Allows form testing
apache-2.0
Python
f20eb91dcf04bc8e33fbb48ebfbef1b56acbf02d
Make functions that pull a number of tweets and pics
samanehsan/spark_github,samanehsan/spark_github,samanehsan/learn-git,samanehsan/learn-git
web.py
web.py
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django""" import os import random import requests from flask import Flask import tweepy import settings app = Flask(__name__) @app.route('/') def home_page(): return 'Hello from the SPARK learn-a-thon!' def get_instagram_image(): instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID) data = requests.get(instagram_api_url).json()['data'] number_of_images = choose_number_of_images() images_returned = [] for image in number_of_images: images_returned.append(random.choice(data)['images']['low_resolution']['url']) return images_returned def get_tweets(): auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET) auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET) api = tweepy.API(auth) number_of_tweets = choose_number_of_tweets() tweets_text = [] tweets = tweepy.Cursor(api.search, q='#spark') for tweet in tweets.items(limit=number_of_tweets): tweets_text.append(tweet.text) return tweets_text def choose_number_of_images(): number = 3 return number def choose_number_of_tweets(): number = 3 return number if __name__ == '__main__': port = int(os.environ.get("PORT", 5000)) app.run(host='0.0.0.0', port=port)
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django""" import os from flask import Flask app = Flask(__name__) @app.route('/') def home_page(): return 'Hello from the SPARK learn-a-thon!' if __name__ == '__main__': port = int(os.environ.get("PORT", 5000)) app.run(host='0.0.0.0', port=port)
apache-2.0
Python
0d58c2ffc8ec6afc353a242f942f668b0b7f362c
Correct shipping repository method calls
enodyt/django-oscar-paypal,evonove/django-oscar-paypal,bharling/django-oscar-worldpay,FedeDR/django-oscar-paypal,nfletton/django-oscar-paypal,django-oscar/django-oscar-paypal,vintasoftware/django-oscar-paypal,bharling/django-oscar-worldpay,enodyt/django-oscar-paypal,embedded1/django-oscar-paypal,ZachGoldberg/django-oscar-paypal,lpakula/django-oscar-paypal,enodyt/django-oscar-paypal,britco/django-oscar-paypal,evonove/django-oscar-paypal,st8st8/django-oscar-paypal,britco/django-oscar-paypal,embedded1/django-oscar-paypal,phedoreanu/django-oscar-paypal,django-oscar/django-oscar-paypal,FedeDR/django-oscar-paypal,vintasoftware/django-oscar-paypal,django-oscar/django-oscar-paypal,phedoreanu/django-oscar-paypal,ZachGoldberg/django-oscar-paypal,st8st8/django-oscar-paypal,vintasoftware/django-oscar-paypal,lpakula/django-oscar-paypal,FedeDR/django-oscar-paypal,st8st8/django-oscar-paypal,lpakula/django-oscar-paypal,embedded1/django-oscar-paypal,britco/django-oscar-paypal,bharling/django-oscar-worldpay,bharling/django-oscar-worldpay,nfletton/django-oscar-paypal,ZachGoldberg/django-oscar-paypal,phedoreanu/django-oscar-paypal,evonove/django-oscar-paypal,nfletton/django-oscar-paypal
sandbox/apps/shipping/repository.py
sandbox/apps/shipping/repository.py
from decimal import Decimal as D from oscar.apps.shipping.methods import Free, FixedPrice from oscar.apps.shipping.repository import Repository as CoreRepository class Repository(CoreRepository): """ This class is included so that there is a choice of shipping methods. Oscar's default behaviour is to only have one which means you can't test the shipping features of PayPal. """ def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs): methods = [Free(), FixedPrice(D('10.00')), FixedPrice(D('20.00'))] return self.prime_methods(basket, methods)
from decimal import Decimal as D from oscar.apps.shipping.methods import Free, FixedPrice from oscar.apps.shipping.repository import Repository as CoreRepository class Repository(CoreRepository): """ This class is included so that there is a choice of shipping methods. Oscar's default behaviour is to only have one which means you can't test the shipping features of PayPal. """ def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs): methods = [Free(), FixedPrice(D('10.00')), FixedPrice(D('20.00'))] return self.add_basket_to_methods(basket, methods)
bsd-3-clause
Python
edec18a82d6027c8a011fbef84c8aa3b80e18826
Update forward_device1.py
VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot
Server/forward_device1.py
Server/forward_device1.py
import zmq def main(): print "\nServer for ProBot is running..." try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:5559") frontend.setsockopt(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:5560") zmq.device(zmq.FORWARDER, frontend, backend) except Exception, e: print e print "bringing down zmq device" finally: pass frontend.close() backend.close() context.term() if __name__ == "__main__": main()
import zmq def main(): print "\nServer for ProBot is running..." try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:5559") frontend.setsockopt(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:5560") zmq.device(zmq.FORWARDER, frontend, backend) except Exception, e: print e print "bringing down zmq device" finally: pass frontend.close() backend.close() context.term() if __name__ == "__main__": main()
agpl-3.0
Python
2100b512ffb188374e1d883cd2f359586182596b
ADD migration name
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
packages/grid/backend/alembic/versions/2021-09-20_916812f40fb4.py
packages/grid/backend/alembic/versions/2021-09-20_916812f40fb4.py
"""ADD daa_document column at setup table Revision ID: 916812f40fb4 Revises: 5796f6ceb314 Create Date: 2021-09-20 01:07:37.239186 """ # third party from alembic import op # type: ignore import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "916812f40fb4" down_revision = "5796f6ceb314" branch_labels = None depends_on = None def upgrade() -> None: op.add_column("setup", sa.Column("daa_document", sa.String(255), default="")) pass def downgrade() -> None: pass
"""empty message Revision ID: 916812f40fb4 Revises: 5796f6ceb314 Create Date: 2021-09-20 01:07:37.239186 """ # third party from alembic import op # type: ignore import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "916812f40fb4" down_revision = "5796f6ceb314" branch_labels = None depends_on = None def upgrade() -> None: op.add_column("setup", sa.Column("daa_document", sa.String(255), default="")) pass def downgrade() -> None: pass
apache-2.0
Python
80ede493f698395176d3c67dd1e4f3723b0d5859
Add initial pass at writing the git commit hook
EliRibble/mothermayi
mothermayi/hook.py
mothermayi/hook.py
import logging import os LOGGER = logging.getLogger(__name__) class NoRepoFoundError(Exception): pass class PreCommitExists(Exception): pass def find_git_repo(): location = os.path.abspath('.') while location != '/': check = os.path.join(location, '.git') if os.path.exists(check) and os.path.isdir(check): return check location = os.path.dirname(location) raise NoRepoFoundError("Could not find a git repository (.git) in {}".format(os.path.abspath('.'))) HOOK_CONTENT = """ mothermayi run """ def write_hook(pre_commit): with open(pre_commit, 'w') as f: f.write(HOOK_CONTENT) def install(): repo = find_git_repo() LOGGER.debug("Found git repo at %s", repo) hooks = os.path.join(repo, 'hooks') pre_commit = os.path.join(hooks, 'pre-commit') if os.path.exists(pre_commit): raise PreCommitExists("A git hook already exists at {}. Refusing to overwrite. Please remove it manually".format(pre_commit)) write_hook(pre_commit)
import logging import os LOGGER = logging.getLogger(__name__) class NoRepoFoundError(Exception): pass def find_git_repo(): location = os.path.abspath('.') while location != '/': check = os.path.join(location, '.git') if os.path.exists(check) and os.path.isdir(check): return check location = os.path.dirname(location) raise NoRepoFoundError("Could not find a git repository (.git) in {}".format(os.path.abspath('.'))) def install(): repo = find_git_repo() LOGGER.debug("Found git repo at %s", repo)
mit
Python
5d5f73ac411873c0ec82e233b74ce70f4de4ab03
Optimize migration process
openprocurement/openprocurement.planning.api
openprocurement/planning/api/migration.py
openprocurement/planning/api/migration.py
# -*- coding: utf-8 -*- import logging from openprocurement.planning.api.traversal import Root from openprocurement.planning.api.models import Plan LOGGER = logging.getLogger(__name__) SCHEMA_VERSION = 1 SCHEMA_DOC = 'openprocurement_plans_schema' def get_db_schema_version(db): schema_doc = db.get(SCHEMA_DOC, {"_id": SCHEMA_DOC}) return schema_doc.get("version", SCHEMA_VERSION - 1) def set_db_schema_version(db, version): schema_doc = db.get(SCHEMA_DOC, {"_id": SCHEMA_DOC}) schema_doc["version"] = version db.save(schema_doc) def migrate_data(registry, destination=None): if registry.settings.get('plugins') and 'planning' not in registry.settings['plugins'].split(','): return cur_version = get_db_schema_version(registry.db) if cur_version == SCHEMA_VERSION: return cur_version for step in xrange(cur_version, destination or SCHEMA_VERSION): LOGGER.info("Migrate openprocurement plans schema from {} to {}".format(step, step + 1), extra={'MESSAGE_ID': 'migrate_data'}) migration_func = globals().get('from{}to{}'.format(step, step + 1)) if migration_func: migration_func(registry) set_db_schema_version(registry.db, step + 1) def from0to1(registry): class Request(object): def __init__(self, registry): self.registry = registry len(registry.db.view('plans/all', limit=1)) results = registry.db.iterview('plans/all', 2 ** 10, include_docs=True, stale='update_after') docs = [] request = Request(registry) root = Root(request) for i in results: doc = i.doc if not all([i.get('url', '').startswith(registry.docservice_url) for i in doc.get('documents', [])]): plan = Plan(doc) plan.__parent__ = root doc = plan.to_primitive() doc['dateModified'] = get_now().isoformat() docs.append(doc) if len(docs) >= 2 ** 7: registry.db.update(docs) docs = [] if docs: registry.db.update(docs)
# -*- coding: utf-8 -*- import logging from openprocurement.planning.api.traversal import Root from openprocurement.planning.api.models import Plan LOGGER = logging.getLogger(__name__) SCHEMA_VERSION = 1 SCHEMA_DOC = 'openprocurement_plans_schema' def get_db_schema_version(db): schema_doc = db.get(SCHEMA_DOC, {"_id": SCHEMA_DOC}) return schema_doc.get("version", SCHEMA_VERSION - 1) def set_db_schema_version(db, version): schema_doc = db.get(SCHEMA_DOC, {"_id": SCHEMA_DOC}) schema_doc["version"] = version db.save(schema_doc) def migrate_data(registry, destination=None): if registry.settings.get('plugins') and 'planning' not in registry.settings['plugins'].split(','): return cur_version = get_db_schema_version(registry.db) if cur_version == SCHEMA_VERSION: return cur_version for step in xrange(cur_version, destination or SCHEMA_VERSION): LOGGER.info("Migrate openprocurement plans schema from {} to {}".format(step, step + 1), extra={'MESSAGE_ID': 'migrate_data'}) migration_func = globals().get('from{}to{}'.format(step, step + 1)) if migration_func: migration_func(registry) set_db_schema_version(registry.db, step + 1) def from0to1(registry): class Request(object): def __init__(self, registry): self.registry = registry len(registry.db.view('plans/all', limit=1)) results = registry.db.iterview('plans/all', 2 ** 10, include_docs=True, stale='update_after') docs = [] request = Request(registry) root = Root(request) for i in results: doc = i.doc if doc.get('documents'): plan = Plan(doc) plan.__parent__ = root doc = plan.to_primitive() docs.append(doc) if len(docs) >= 2 ** 7: registry.db.update(docs) docs = [] if docs: registry.db.update(docs)
apache-2.0
Python
d0a9d10d0df25de670e8bf9a1e603ed1fbe5ca29
use helpers
alexoneill/py3status,valdur55/py3status,Andrwe/py3status,ultrabug/py3status,docwalter/py3status,tobes/py3status,tobes/py3status,valdur55/py3status,vvoland/py3status,ultrabug/py3status,guiniol/py3status,guiniol/py3status,ultrabug/py3status,valdur55/py3status,Andrwe/py3status
py3status/modules/taskwarrior.py
py3status/modules/taskwarrior.py
# -*- coding: utf-8 -*- """ Display tasks currently running in taskwarrior. Configuration parameters: cache_timeout: refresh interval for this module (default 5) format: display format for this module (default '{task}') Format placeholders: {task} active tasks Requires task: https://taskwarrior.org/download/ @author James Smith http://jazmit.github.io/ @license BSD """ import json class Py3status: """ """ # available configuration parameters cache_timeout = 5 format = '{task}' def taskWarrior(self): def describeTask(taskObj): return str(taskObj['id']) + ' ' + taskObj['description'] task_command = 'task start.before:tomorrow status:pending export' task_json = json.loads(self.py3.command_output(task_command)) task_result = ', '.join(map(describeTask, task_json)) return { 'cached_until': self.py3.time_in(self.cache_timeout), 'full_text': self.py3.safe_format(self.format, {'task': task_result}) } if __name__ == "__main__": """ Run module in test mode. """ from py3status.module_test import module_test module_test(Py3status)
# -*- coding: utf-8 -*- """ Display tasks currently running in taskwarrior. Configuration parameters: cache_timeout: how often we refresh this module in seconds (default 5) format: display format for taskwarrior (default '{task}') Format placeholders: {task} active tasks Requires task: https://taskwarrior.org/download/ @author James Smith http://jazmit.github.io/ @license BSD """ # import your useful libs here from subprocess import check_output import json import shlex class Py3status: """ """ # available configuration parameters cache_timeout = 5 format = '{task}' def taskWarrior(self): command = 'task start.before:tomorrow status:pending export' taskwarrior_output = check_output(shlex.split(command)) tasks_json = json.loads(taskwarrior_output.decode('utf-8')) def describeTask(taskObj): return str(taskObj['id']) + ' ' + taskObj['description'] result = ', '.join(map(describeTask, tasks_json)) return { 'cached_until': self.py3.time_in(self.cache_timeout), 'full_text': self.py3.safe_format(self.format, {'task': result}) } if __name__ == "__main__": """ Run module in test mode. """ from py3status.module_test import module_test module_test(Py3status)
bsd-3-clause
Python
c9b7e886f9276079fc79fbe394f5b15595f04603
Test fixes
danjac/ownblock,danjac/ownblock,danjac/ownblock
ownblock/ownblock/apps/messaging/tests.py
ownblock/ownblock/apps/messaging/tests.py
from unittest.mock import Mock from django.test import TestCase from rest_framework import serializers from apps.accounts.tests import ResidentFactory from apps.buildings.tests import ApartmentFactory from .serializers import MessageSerializer class SerializerTests(TestCase): def test_validate_recipient_if_same_as_sender(self): apt = ApartmentFactory.create() req = Mock() req.user = ResidentFactory.create(apartment=apt) serializer = MessageSerializer(context={'request': req}) attrs = {'recipient': req.user} self.assertRaises(serializers.ValidationError, serializer.validate_recipient, attrs, 'recipient') def test_validate_recipient_if_does_not_exist(self): apt = ApartmentFactory.create() req = Mock() req.user = ResidentFactory.create(apartment=apt) recipient = ResidentFactory.create() serializer = MessageSerializer(context={'request': req}) attrs = {'recipient': recipient} self.assertRaises(serializers.ValidationError, serializer.validate_recipient, attrs, 'recipient') def test_validate_recipient_if_ok(self): pass
from unittest.mock import Mock from django.test import TestCase from rest_framework import serializers from apps.accounts.tests import ResidentFactory from apps.buildings.tests import ApartmentFactory from .serializers import MessageSerializer class SerializerTests(TestCase): def test_validate_recipient_if_same_as_sender(self): apt = ApartmentFactory.create() req = Mock() req.user = ResidentFactory.create(apartment=apt) serializer = MessageSerializer(context={'request': req}) attrs = {'recipient': req.user} self.assertRaises(serializers.ValidationError, serializer.validate_recipient, attrs, 'recipient') def test_validate_recipient_if_does_not_exist(self): pass def test_validate_recipient_if_ok(self): pass
mit
Python
5e9eda407832d9b97e7f78219f20236e04306a32
fix test, probably broken by a epydoc change this code is dead though so i don't much care
chevah/pydoctor,chevah/pydoctor,hawkowl/pydoctor,hawkowl/pydoctor,jelmer/pydoctor,jelmer/pydoctor,jelmer/pydoctor
pydoctor/test/test_formatting.py
pydoctor/test/test_formatting.py
from pydoctor import html, model from py import test def test_signatures(): argspec = [['a', 'b', 'c'], None, None, (1,2)] assert html.getBetterThanArgspec(argspec) == (['a'], [('b', 1), ('c', 2)]) def test_strsig(): argspec = [['a', 'b', 'c'], None, None, (1,2)] assert html.signature(argspec) == "a, b=1, c=2" def test_strsigvar(): argspec = [['a', 'b', 'c'], 'args', 'kk', (1,2)] assert html.signature(argspec) == "a, *args, b=1, c=2, **kk" def test_strsigargh(): argspec = [['a', ['b','c']], None, None, ()] assert html.signature(argspec) == 'a, (b, c)' def test_link(): doc0 = model.Documentable(None, 'twisted', None) docco = model.Documentable(None, 'threadz', None, doc0) assert html.link(docco) == 'twisted.threadz.html' def test_summaryDoc(): docco = model.Documentable(None, 'threadz', 'Woot\nYeah') assert html.summaryDoc(docco) == html.doc2html(docco, 'Woot') def test_boringDocstring(): assert html.boringDocstring('Woot\nYeah') == '<pre>Woot\nYeah</pre>' def test_reallyBoringDocstring(): undocced = '<pre class="undocumented">Undocumented</pre>' assert html.boringDocstring('') == undocced assert html.boringDocstring(None) == undocced def test_doc2htmlEpy(): if not html.EPYTEXT: test.skip("Epytext not available") assert html.doc2html(None, 'Woot\nYeah') == '<div><p>Woot Yeah</p>\n</div>' class TestEpyHackers: def setup_method(self, meth): self.orig = html.EPYTEXT def teardown_method(self, meth): html.EPYTEXT = self.orig def test_doc2htmlBoring(self): if html.EPYTEXT: html.EPYTEXT = False assert html.doc2html(object(), 'Woot\nYeah') == '<pre>Woot\nYeah</pre>' def test_generateModuleIndex(self): #This test is a bit un-unity # And *damnit* how do I write teardowners html.EPYTEXT = False sysw = html.SystemWriter(None) pack = model.Package(None, 'twisted', None) mod = model.Module(None, 'threadz', 'Woot\nYeah', pack) fun = model.Function(None, 'blat', 'HICKY HECK\nYEAH', mod) fun.argspec = [(), None, None, ()] out = sysw.getHTMLFor(fun) assert 'blat()' in out assert 'HICKY HECK\nYEAH' in out
from pydoctor import html, model from py import test def test_signatures(): argspec = [['a', 'b', 'c'], None, None, (1,2)] assert html.getBetterThanArgspec(argspec) == (['a'], [('b', 1), ('c', 2)]) def test_strsig(): argspec = [['a', 'b', 'c'], None, None, (1,2)] assert html.signature(argspec) == "a, b=1, c=2" def test_strsigvar(): argspec = [['a', 'b', 'c'], 'args', 'kk', (1,2)] assert html.signature(argspec) == "a, *args, b=1, c=2, **kk" def test_strsigargh(): argspec = [['a', ['b','c']], None, None, ()] assert html.signature(argspec) == 'a, (b, c)' def test_link(): doc0 = model.Documentable(None, 'twisted', None) docco = model.Documentable(None, 'threadz', None, doc0) assert html.link(docco) == 'twisted.threadz.html' def test_summaryDoc(): docco = model.Documentable(None, 'threadz', 'Woot\nYeah') assert html.summaryDoc(docco) == html.doc2html(docco, 'Woot') def test_boringDocstring(): assert html.boringDocstring('Woot\nYeah') == '<pre>Woot\nYeah</pre>' def test_reallyBoringDocstring(): undocced = '<pre class="undocumented">Undocumented</pre>' assert html.boringDocstring('') == undocced assert html.boringDocstring(None) == undocced def test_doc2htmlEpy(): if not html.EPYTEXT: test.skip("Epytext not available") assert html.doc2html(None, 'Woot\nYeah') == '<div>Woot Yeah\n</div>' class TestEpyHackers: def setup_method(self, meth): self.orig = html.EPYTEXT def teardown_method(self, meth): html.EPYTEXT = self.orig def test_doc2htmlBoring(self): if html.EPYTEXT: html.EPYTEXT = False assert html.doc2html(object(), 'Woot\nYeah') == '<pre>Woot\nYeah</pre>' def test_generateModuleIndex(self): #This test is a bit un-unity # And *damnit* how do I write teardowners html.EPYTEXT = False sysw = html.SystemWriter(None) pack = model.Package(None, 'twisted', None) mod = model.Module(None, 'threadz', 'Woot\nYeah', pack) fun = model.Function(None, 'blat', 'HICKY HECK\nYEAH', mod) fun.argspec = [(), None, None, ()] out = sysw.getHTMLFor(fun) assert 'blat()' in out assert 'HICKY HECK\nYEAH' in out
isc
Python
d9189f91370abd1e20e5010bb70d9c47efd58215
Change read_chrom_sizes to read from a FAIDX index if available
NIEHS/muver
muver/reference.py
muver/reference.py
import os from wrappers import bowtie2, picard, samtools def create_reference_indices(ref_fn): ''' For a given reference FASTA file, generate several indices. ''' bowtie2.build(ref_fn) samtools.faidx_index(ref_fn) picard.create_sequence_dictionary(ref_fn) def read_chrom_sizes(reference_assembly_fn): ''' Iterate through a FASTA file to find the length of each chromosome. If a FAIDX index is available, it will read the lengths from there. ''' chrom_sizes = dict() if os.path.exists(reference_assembly_fn + '.fai'): with open(reference_assembly_fn + '.fai') as f: for line in f: chromosome, size = line.strip().split('\t')[:2] chrom_sizes[chromosome] = int(size) else: last_chromosome = None with open(reference_assembly_fn) as f: for line in f: if line.startswith('>'): last_chromosome = line.split('>')[1].strip() chrom_sizes[last_chromosome] = 0 else: chrom_sizes[last_chromosome] += len(line.strip()) return chrom_sizes def read_chrom_sizes_from_file(chrom_sizes_fn): ''' Read chromosome sizes from a UCSC chrom_sizes file. ''' chrom_sizes = dict() with open(chrom_sizes_fn) as f: for line in f: chromosome, size = line.strip().split() chrom_sizes[chromosome] = int(size) return chrom_sizes
from wrappers import bowtie2, picard, samtools def create_reference_indices(ref_fn): ''' For a given reference FASTA file, generate several indices. ''' bowtie2.build(ref_fn) samtools.faidx_index(ref_fn) picard.create_sequence_dictionary(ref_fn) def read_chrom_sizes(reference_assembly_fn): ''' Iterate through a FASTA file to find the length of each chromosome. ''' chrom_sizes = dict() last_chromosome = None with open(reference_assembly_fn) as f: for line in f: if line.startswith('>'): last_chromosome = line.split('>')[1].strip() chrom_sizes[last_chromosome] = 0 else: chrom_sizes[last_chromosome] += len(line.strip()) return chrom_sizes def read_chrom_sizes_from_file(chrom_sizes_fn): ''' Read chromosome sizes from a UCSC chrom_sizes file. ''' chrom_sizes = dict() with open(chrom_sizes_fn) as f: for line in f: chromosome, size = line.strip().split() chrom_sizes[chromosome] = int(size) return chrom_sizes
mit
Python
8e1610570a50282594a5516ee473cf13bec2ce71
fix typo
cmu-db/db-webcrawler,cmu-db/db-webcrawler,cmu-db/cmdbac,cmu-db/cmdbac,cmu-db/cmdbac,cmu-db/cmdbac,cmu-db/db-webcrawler,cmu-db/cmdbac,cmu-db/db-webcrawler,cmu-db/db-webcrawler
core/drivers/count/count.py
core/drivers/count/count.py
keywords = ['SELECT', 'INSERT', 'UPDATE', 'DELETE'] def count_query(queries): ret = {} for keyword in keywords: ret[keyword] = 0 for query in queries: for keyword in keywords: if query.startswith(keyword): ret[keyword] += 1 break return ret
keywords = ['SET', 'INSERT', 'UPDATE', 'DELETE'] def count_query(queries): ret = {} for keyword in keywords: ret[keyword] = 0 for query in queries: for keyword in keywords: if query.startswith(keyword): ret[keyword] += 1 break return ret
apache-2.0
Python
4657acf6408b2fb416e2c9577ac09d18d81f8a68
Remove unused NHS database mockup
jawrainey/sris
nameless/config.py
nameless/config.py
import os _basedir = os.path.abspath(os.path.dirname(__file__)) # Plugin settings DATABASE_NAMES = ['atc', 'sms'] # Using sqlite for local development, will be SQL on production. SQLALCHEMY_BINDS = { 'atc': 'sqlite:///' + os.path.join(_basedir, 'db/atc.db'), 'sms': 'sqlite:///' + os.path.join(_basedir, 'db/sms.db') } # TxtLocal SMS settings SENDER = '447786202240' INBOX_ID = '498863' API_KEY = 'Sap3A0EaE2k-xL6d4nLJuQdZriNxBByUjRhOCHM5X0' API_URI = 'https://api.txtlocal.com/' API_SEND_URI = API_URI + 'send/?' API_RECEIVE_URI = API_URI + 'get_messages/?' TEST_MODE = 1 # 1 (True) to enable test mode & 0 to disable.
import os _basedir = os.path.abspath(os.path.dirname(__file__)) # Plugin settings DATABASE_NAMES = ['atc', 'nhs', 'sms'] # Using sqlite for local development, will be SQL on production. SQLALCHEMY_BINDS = { 'atc': 'sqlite:///' + os.path.join(_basedir, 'db/atc.db'), 'nhs': 'sqlite:///' + os.path.join(_basedir, 'db/nhs.db'), 'sms': 'sqlite:///' + os.path.join(_basedir, 'db/sms.db') } # TxtLocal SMS settings SENDER = '447786202240' INBOX_ID = '498863' API_KEY = 'Sap3A0EaE2k-xL6d4nLJuQdZriNxBByUjRhOCHM5X0' API_URI = 'https://api.txtlocal.com/' API_SEND_URI = API_URI + 'send/?' API_RECEIVE_URI = API_URI + 'get_messages/?' TEST_MODE = 1 # 1 (True) to enable test mode & 0 to disable.
mit
Python
10801bca03c03d6b6bb7b6108733178dcf5a8b53
Revert 87dbc5eb9665b5a145a3c2a190f64e2ce4c09fd4^..HEAD
mlabsnl/zengarden,mlabsnl/zengarden,mlabsnl/zengarden
shop/views.py
shop/views.py
from django.http import HttpResponse, Http404, HttpResponseRedirect from django.views.generic.simple import direct_to_template from shop.forms import OrderForm from shop.models import EmailEntry, Order from datetime import datetime import urllib from xml.dom import minidom def index(request): print request.META['HTTP_HOST'] if request.META['HTTP_HOST'] == 'localhost:8000': return HttpResponseRedirect('/opkikker') else: return HttpResponseRedirect('/rustgever') def opkikker(request): if request.POST: form = EmailEntry.Form(request.POST) if form.is_valid(): email = form.cleaned_data['email'] EmailEntry.objects.get_or_create(email=email, date_added=datetime.now()) form.clean() return direct_to_template(request, 'opkikker.html', extra_context={'succes': True}) else: return direct_to_template(request, 'opkikker.html', extra_context={'error': True, 'form': form,}) else: form = EmailEntry.Form() return direct_to_template(request, 'opkikker.html', extra_context={'form': form}) def rustgever(request): return direct_to_template(request, 'rustgever.html') def order(request): if request.POST: form = OrderForm(request.POST) if form.is_valid(): order = form.save() order.save() total_amount = int(((order.product_price * order.product_amount) + order.product_shipment_cost) * 100) return HttpResponseRedirect(get_payment_url(total_amount, order.id)) else: return direct_to_template(request, 'bestel-rustgever.html', extra_context={'error': True, 'form': form,}) else: form = OrderForm() return direct_to_template(request, 'bestel-rustgever.html', extra_context={'form': form}) def get_payment_url(amount, id): URL = "https://secure.mollie.nl/xml/ideal?a=create-link&partnerid=705747&amount="+str(amount)+"&description=Zen%20Garden%20Rustgever(tm)%20order_id%20"+str(id)+"&profile_key=e510805f" print URL result = urllib.urlopen(URL).read() splits = result.split("<URL>") return splits[1].split("</URL>")[0]
from django.http import HttpResponse, Http404, HttpResponseRedirect from django.views.generic.simple import direct_to_template from shop.forms import OrderForm from shop.models import EmailEntry, Order from datetime import datetime import urllib from xml.dom import minidom def index(request): print request.META['HTTP_HOST'] if request.META['HTTP_HOST'] == 'www.opkikker.nl': return HttpResponseRedirect('/opkikker') else: return HttpResponseRedirect('/rustgever') def opkikker(request): if request.POST: form = EmailEntry.Form(request.POST) if form.is_valid(): email = form.cleaned_data['email'] EmailEntry.objects.get_or_create(email=email, date_added=datetime.now()) form.clean() return direct_to_template(request, 'opkikker.html', extra_context={'succes': True}) else: return direct_to_template(request, 'opkikker.html', extra_context={'error': True, 'form': form,}) else: form = EmailEntry.Form() return direct_to_template(request, 'opkikker.html', extra_context={'form': form}) def rustgever(request): return direct_to_template(request, 'rustgever.html') def order(request): if request.POST: form = OrderForm(request.POST) if form.is_valid(): order = form.save() order.save() total_amount = int(((order.product_price * order.product_amount) + order.product_shipment_cost) * 100) return HttpResponseRedirect(get_payment_url(total_amount, order.id)) else: return direct_to_template(request, 'bestel-rustgever.html', extra_context={'error': True, 'form': form,}) else: form = OrderForm() return direct_to_template(request, 'bestel-rustgever.html', extra_context={'form': form}) def get_payment_url(amount, id): URL = "https://secure.mollie.nl/xml/ideal?a=create-link&partnerid=705747&amount="+str(amount)+"&description=Zen%20Garden%20Rustgever(tm)%20order_id%20"+str(id)+"&profile_key=e510805f" print URL result = urllib.urlopen(URL).read() splits = result.split("<URL>") return splits[1].split("</URL>")[0]
apache-2.0
Python
c81f4d0659366e1512a4b64f0cce65d50de25927
update to 3.29.0
DeadSix27/python_cross_compile_script
packages/dependencies/sqlite3.py
packages/dependencies/sqlite3.py
{ 'repo_type' : 'archive', 'custom_cflag' : '-O2', # make sure we build it without -ffast-math 'download_locations' : [ { 'url' : 'https://www.sqlite.org/2019/sqlite-autoconf-3290000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : '8e7c1e2950b5b04c5944a981cb31fffbf9d2ddda939d536838ebc854481afd5b' }, ], }, { 'url' : 'https://fossies.org/linux/misc/sqlite-autoconf-3290000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : '8e7c1e2950b5b04c5944a981cb31fffbf9d2ddda939d536838ebc854481afd5b' }, ], }, ], 'cflag_addition' : '-fexceptions -DSQLITE_ENABLE_COLUMN_METADATA=1 -DSQLITE_USE_MALLOC_H=1 -DSQLITE_USE_MSIZE=1 -DSQLITE_DISABLE_DIRSYNC=1 -DSQLITE_ENABLE_RTREE=1 -fno-strict-aliasing', 'configure_options': '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-threadsafe --disable-editline --enable-readline --enable-json1 --enable-fts5 --enable-session', 'depends_on': ( 'zlib', ), 'update_check' : { 'url' : 'https://www.sqlite.org/index.html', 'type' : 'httpregex', 'regex' : r'<a href="releaselog/.*\.html">Version (?P<version_num>[\d.]+)<\/a>' }, '_info' : { 'version' : '3.29.0', 'fancy_name' : 'libsqlite3' }, }
{ 'repo_type' : 'archive', 'custom_cflag' : '-O2', # make sure we build it without -ffast-math 'download_locations' : [ { 'url' : 'https://www.sqlite.org/2019/sqlite-autoconf-3280000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'd61b5286f062adfce5125eaf544d495300656908e61fca143517afcc0a89b7c3' }, ], }, { 'url' : 'https://fossies.org/linux/misc/sqlite-autoconf-3280000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'd61b5286f062adfce5125eaf544d495300656908e61fca143517afcc0a89b7c3' }, ], }, ], 'cflag_addition' : '-fexceptions -DSQLITE_ENABLE_COLUMN_METADATA=1 -DSQLITE_USE_MALLOC_H=1 -DSQLITE_USE_MSIZE=1 -DSQLITE_DISABLE_DIRSYNC=1 -DSQLITE_ENABLE_RTREE=1 -fno-strict-aliasing', 'configure_options': '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-threadsafe --disable-editline --enable-readline --enable-json1 --enable-fts5 --enable-session', 'depends_on': ( 'zlib', ), 'update_check' : { 'url' : 'https://www.sqlite.org/index.html', 'type' : 'httpregex', 'regex' : r'<a href="releaselog/.*\.html">Version (?P<version_num>[\d.]+)<\/a>' }, '_info' : { 'version' : '3.28.0', 'fancy_name' : 'libsqlite3' }, }
mpl-2.0
Python
0fee973ea7a4ca7b79c84ed55fa1d327c754beee
Add tests and some fixes for class extension pattern
pombredanne/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,tddv/readthedocs.org,pombredanne/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org
readthedocs/core/utils/extend.py
readthedocs/core/utils/extend.py
"""Patterns for extending Read the Docs""" import inspect from django.conf import settings from django.utils.module_loading import import_by_path from django.utils.functional import LazyObject class SettingsOverrideObject(LazyObject): """Base class for creating class that can be overridden This is used for extension points in the code, where we want to extend a class without monkey patching it. This abstract class allows for lazy inheritance, creating a class from the specified class or from a setting, but only once the class is called. Default to an instance of the class defined by :py:cvar:`_default_class`. Next, look for an override setting class path in ``settings.CLASS_OVERRIDES``, which should be a dictionary of class paths. The setting should be a dictionary keyed by the object path name:: CLASS_OVERRIDES = { 'readthedocs.core.resolver.Resolver': 'something.resolver.Resolver', } Lastly, if ``settings.CLASS_OVERRIDES`` is missing, or the key is not found, attempt to pull the key :py:cvar:`_override_setting` from ``settings``. """ _default_class = None _override_setting = None def _setup(self): """Set up wrapped object This is called when attributes are accessed on :py:cls:`LazyObject` and the underlying wrapped object does not yet exist. """ cls = self._default_class cls_path = (getattr(settings, 'CLASS_OVERRIDES', {}) .get(self._get_class_id())) if cls_path is None and self._override_setting is not None: cls_path = getattr(settings, self._override_setting, None) if cls_path is not None: cls = import_by_path(cls_path) self._wrapped = cls() def _get_class_id(self): # type() here, because LazyObject overrides some attribute access return '.'.join([inspect.getmodule(type(self)).__name__, type(self).__name__])
"""Patterns for extending Read the Docs""" from django.conf import settings from django.utils.module_loading import import_by_path from django.utils.functional import LazyObject class SettingsOverrideObject(LazyObject): """Base class for creating class that can be overridden This is used for extension points in the code, where we want to extend a class without monkey patching it. This abstract class allows for lazy inheritance, creating a class from the specified class or from a setting, but only once the class is called. Default to an instance of the class defined by :py:cvar:`_default_class`. Next, look for an override setting class path in ``settings.CLASS_OVERRIDES``, which should be a dictionary of class paths. The setting should be a dictionary keyed by the object path name:: CLASS_OVERRIDES = { 'readthedocs.core.resolver.Resolver': 'something.resolver.Resolver', } Lastly, if ``settings.CLASS_OVERRIDES`` is missing, or the key is not found, attempt to pull the key :py:cvar:`_override_setting` from ``settings``. """ _default_class = None _override_setting = None def _setup(self): """Set up wrapped object This is called when attributes are accessed on :py:cls:`LazyObject` and the underlying wrapped object does not yet exist. """ cls = self._default_class cls_path = (getattr(settings, 'CLASS_OVERRIDES', {}) .get(self.get_class_id())) if cls_path is None: cls_path = getattr(settings, self._override_setting, None) if cls_path is not None: cls = import_by_path(cls_path) self._wrapped = cls() def get_class_id(self): # type() here, because LazyObject overrides some attribute access return '.'.join([__name__, type(self).__name__])
mit
Python
f1e071957214e787521c7de887ca1fe369671bc7
Add constants
lakewik/storj-gui-client
UI/resources/constants.py
UI/resources/constants.py
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 1000 MAX_RETRIES_GET_FILE_POINTERS = 100 GET_DEFAULT_TMP_PATH_FROM_ENV_VARIABLES = True GET_HOME_PATH_FROM_ENV_VARIABLES = True FILE_POINTERS_REQUEST_DELAY = 1 FILE_POINTERS_ITERATION_DELAY = 0.2 CONTRACT_NEGOTIATION_ITERATION_DELAY = 0.2 MAX_POINTERS_RESOLVED_IN_ONE_PART = 50 MINIMAL_ALLOWED_BRIDGE_REQUEST_TIMEOUT = 5 # int: file pointers request delay, file pointers iteration delay, max pointers resolved in one part, # minimal bridge request timeout, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 4 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 CONCURRENT_UPLOADING = False DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 90 DEFAULT_MAX_FARMER_CONNECTION_TIMEOUT = 7 DEFAULT_MAX_FARMER_DOWNLOAD_READ_TIMEOUT = 17 # int: maximum bridge request timeout, in seconds. MAX_ALLOWED_UPLOAD_CONCURRENCY = 9999 MAX_ALLOWED_DOWNLOAD_CONCURRENCY = 9999 DEFAULT_BRIDGE_API_URL = 'api.storj.io' CONFIG_FILE_NAME = 'storj_client_config.xml' # DESIGN DISPLAY_FILE_CREATION_DATE_IN_MAIN = True DISPLAY_FILE_ID_IN_MAIN = True FILE_LIST_SORTING_MAIN_ENABLED = True AUTO_SCROLL_UPLOAD_DOWNLOAD_QUEUE = True SHOW_TRAY_ICON = False BUCKETS_LIST_SORTING_ENABLED = True MIRRORS_TREE_SORTING_ENABLED = True FIXED_WINDOWS_SIZE = True ALLOW_DOWNLOAD_FARMER_POINTER_CANCEL_BY_USER = True ALLOW_UPLOAD_FARMER_CANCEL_BY_USER = True DATA_TABLE_EDIT_ENABLED = False # BLACKLISTING FARMER_NODES_EXCLUSION_FOR_UPLOAD_ENABLED = True FARMER_NODES_EXCLUSION_FOR_DOWNLOAD_ENABLED = True BLACKLIST_MAX_LENGTH = 300 BLACKLISTING_MODE = 2 # 1 - blacklist all farmers to which shard have been recently uploaded # 2 - blacklist only farmers to which transfer failed # PATHS USE_USER_ENV_PATH_FOR_TEMP = False DEFAULT_ENCRYPTION_KEYS_DIRECTORY = "" # SHARDING DEFAULT_MAX_SHARD_SIZE = 4294967296 # 4Gb DEFAULT_SHARD_SIZE = 2 * (1024 * 1024) # 8Mb # UPLOAD REED_SOLOMON_ENCODING_ENABLED = True
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 1000 MAX_RETRIES_GET_FILE_POINTERS = 100 GET_DEFAULT_TMP_PATH_FROM_ENV_VARIABLES = True GET_HOME_PATH_FROM_ENV_VARIABLES = True FILE_POINTERS_REQUEST_DELAY = 1 FILE_POINTERS_ITERATION_DELAY = 0.2 CONTRACT_NEGOTIATION_ITERATION_DELAY = 0.2 MAX_POINTERS_RESOLVED_IN_ONE_PART = 50 MINIMAL_ALLOWED_BRIDGE_REQUEST_TIMEOUT = 5 # int: file pointers request delay, file pointers iteration delay, max pointers resolved in one part, # minimal bridge request timeout, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 4 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 CONCURRENT_UPLOADING = False DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 90 DEFAULT_MAX_FARMER_CONNECTION_TIMEOUT = 7 DEFAULT_MAX_FARMER_DOWNLOAD_READ_TIMEOUT = 17 # int: maximum bridge request timeout, in seconds. MAX_ALLOWED_UPLOAD_CONCURRENCY = 9999 MAX_ALLOWED_DOWNLOAD_CONCURRENCY = 9999 DEFAULT_BRIDGE_API_URL = 'api.storj.io' # DESIGN DISPLAY_FILE_CREATION_DATE_IN_MAIN = True DISPLAY_FILE_ID_IN_MAIN = True FILE_LIST_SORTING_MAIN_ENABLED = True AUTO_SCROLL_UPLOAD_DOWNLOAD_QUEUE = True SHOW_TRAY_ICON = False BUCKETS_LIST_SORTING_ENABLED = True MIRRORS_TREE_SORTING_ENABLED = True FIXED_WINDOWS_SIZE = True ALLOW_DOWNLOAD_FARMER_POINTER_CANCEL_BY_USER = True ALLOW_UPLOAD_FARMER_CANCEL_BY_USER = True DATA_TABLE_EDIT_ENABLED = False # BLACKLISTING FARMER_NODES_EXCLUSION_FOR_UPLOAD_ENABLED = True FARMER_NODES_EXCLUSION_FOR_DOWNLOAD_ENABLED = True BLACKLIST_MAX_LENGTH = 300 BLACKLISTING_MODE = 2 # 1 - blacklist all farmers to which shard have been recently uploaded # 2 - blacklist only farmers to which transfer failed # PATHS USE_USER_ENV_PATH_FOR_TEMP = False DEFAULT_ENCRYPTION_KEYS_DIRECTORY = "" # SHARDING DEFAULT_MAX_SHARD_SIZE = 4294967296 # 4Gb DEFAULT_SHARD_SIZE = 2 * (1024 * 1024) # 8Mb # UPLOAD REED_SOLOMON_ENCODING_ENABLED = True
mit
Python
f2a0bbee61a144bf0d1de77dd4b41393fe7428bf
fix Ntests in simuNtests
plguhur/random-sets
simuNtests.py
simuNtests.py
# lance simulations pour different nombre d'electeurs import multiprocessing import os, sys import shutil import time import numpy as np from randomSets import * def worker(((Ncandidats,q, Nwinners))): """worker function""" sys.stdout.write('\nSTART -- %i candidats -- \n' % Ncandidats) sys.stdout.flush() time.sleep(0.01) # being sure that simulation are differently initialized minNvoters = simulate(Ncandidats, q =q, Nwinners = Nwinners) with open('nmin-candidates-%i' % Ncandidats,'a') as f_handle: np.savetxt(f_handle,minNvoters) return if __name__ == '__main__': print "Cette fois, c'est la bonne !" print (time.strftime("%H:%M:%S")) root = "simulations/" try: os.mkdir(root) except OSError: pass Ncandidates = int(sys.argv[1]) Ntests = [sys.argv[1] if len(sys.argv) == 3 else 1000] Nwinners = 1 args = [] print Ncandidates for i in range(Ntests): arg = [Ncandidates,100,1] args.append(arg) if args == []: print "Rien a faire!" pool = multiprocessing.Pool(processes=20) pool.map(worker, args) print "Alors, ca marche ? :)"
# lance simulations pour different nombre d'electeurs import multiprocessing import os, sys import shutil import time import numpy as np from randomSets import * def worker(((Ncandidats,q, Nwinners))): """worker function""" sys.stdout.write('\nSTART -- %i candidats -- \n' % Ncandidats) sys.stdout.flush() time.sleep(0.01) # being sure that simulation are differently initialized minNvoters = simulate(Ncandidats, q =q, Nwinners = Nwinners) with open('nmin-candidates-%i' % Ncandidats,'a') as f_handle: np.savetxt(f_handle,minNvoters) return if __name__ == '__main__': print "Cette fois, c'est la bonne !" print (time.strftime("%H:%M:%S")) root = "simulations/" try: os.mkdir(root) except OSError: pass Ncandidates = int(sys.argv[1]) Ntests = [sys.argv[1] if len(sys.argv) == 3 else 1000] Nwinners = 1 args = [] print Ncandidates for i in range(Ncandidates): arg = [Ncandidates,100,1] args.append(arg) if args == []: print "Rien a faire!" pool = multiprocessing.Pool(processes=1) pool.map(worker, args) print "Alors, ca marche ? :)"
apache-2.0
Python
3bf9ab0da4b06b8b0383fb6db64947886742899c
Add newline in log of builds after successful rebuild of website.
chebee7i/dit,Autoplectic/dit,dit/dit,dit/dit,Autoplectic/dit,Autoplectic/dit,dit/dit,Autoplectic/dit,chebee7i/dit,Autoplectic/dit,dit/dit,chebee7i/dit,chebee7i/dit,dit/dit
site/build.py
site/build.py
#!/usr/bin/env python # -*- coding: ascii -*- """ This script can be used to build the website. It is also run on each commit to github. Example: ./build public_html """ from __future__ import print_function import datetime import os import shutil import subprocess import sys import time BUILD_DIR = 'build' def get_build_dir(): try: build_dir = sys.argv[1] except IndexError: build_dir = BUILD_DIR basedir = os.path.abspath(os.path.curdir) build_dir = os.path.join(basedir, build_dir) return build_dir def build(dest): source = os.path.split(os.path.abspath(__file__))[0] source = os.path.join(source, 'src') # We aren't doing anything fancy yet. shutil.copytree(source, dest) def update_gitrepo(): source = os.path.split(os.path.abspath(__file__))[0] initial = os.getcwd() try: os.chdir(source) subprocess.call(['git', 'pull']) finally: os.chdir(initial) def main(): try: min_delay = int(sys.argv[2]) * 60 except: min_delay = 0 # Build only if enough time has passed. build_dir = get_build_dir() if os.path.exists(build_dir): elapsed = time.time() - os.path.getmtime(build_dir) if elapsed < min_delay: print("Not enough time has elapsed since last build.") sys.exit(0) else: # Delete it all! if os.path.islink(build_dir): os.unlink(build_dir) else: shutil.rmtree(build_dir) elif os.path.islink(build_dir): # Then its a bad symlink. os.unlink(build_dir) #update_gitrepo() build(build_dir) subprocess.call(['touch', build_dir]) print("Done.\n") if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: ascii -*- """ This script can be used to build the website. It is also run on each commit to github. Example: ./build public_html """ import datetime import os import shutil import subprocess import sys import time BUILD_DIR = 'build' def get_build_dir(): try: build_dir = sys.argv[1] except IndexError: build_dir = BUILD_DIR basedir = os.path.abspath(os.path.curdir) build_dir = os.path.join(basedir, build_dir) return build_dir def build(dest): source = os.path.split(os.path.abspath(__file__))[0] source = os.path.join(source, 'src') # We aren't doing anything fancy yet. shutil.copytree(source, dest) def update_gitrepo(): source = os.path.split(os.path.abspath(__file__))[0] initial = os.getcwd() try: os.chdir(source) subprocess.call(['git', 'pull']) finally: os.chdir(initial) def main(): try: min_delay = int(sys.argv[2]) * 60 except: min_delay = 0 # Build only if enough time has passed. build_dir = get_build_dir() if os.path.exists(build_dir): elapsed = time.time() - os.path.getmtime(build_dir) if elapsed < min_delay: print "Not enough time has elapsed since last build." sys.exit(0) else: # Delete it all! if os.path.islink(build_dir): os.unlink(build_dir) else: shutil.rmtree(build_dir) elif os.path.islink(build_dir): # Then its a bad symlink. os.unlink(build_dir) #update_gitrepo() build(build_dir) subprocess.call(['touch', build_dir]) print "Done." if __name__ == '__main__': main()
bsd-3-clause
Python
798f80c3efe06869194adf7073af574cc94481b9
add to init
tamasgal/km3pipe,tamasgal/km3pipe
km3modules/__init__.py
km3modules/__init__.py
# coding=utf-8 # Filename: __init__.py # pylint: disable=locally-disabled """ A collection of commonly used modules. """ from km3modules.common import (Dump, Delete, HitCounter, BlobIndexer, Keep, StatusBar, MemoryObserver, Wrap, Cut) from km3modules.reco import SvdFit as PrimFit from km3modules.reco import SvdFit
# coding=utf-8 # Filename: __init__.py # pylint: disable=locally-disabled """ A collection of commonly used modules. """ from km3modules.common import (Dump, Delete, HitCounter, BlobIndexer, Keep, StatusBar, MemoryObserver, Wrap) from km3modules.reco import SvdFit as PrimFit from km3modules.reco import SvdFit
mit
Python
e5ed0e4e6dea58a1412e3c596612e647bd22c619
Update __init__.py
bittracker/krempelair,bittracker/krempelair,KrempelEv/krempelair,bittracker/krempelair,KrempelEv/krempelair,KrempelEv/krempelair
krempelair/__init__.py
krempelair/__init__.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import jinja2 import flask import views class Krempelair(flask.Flask): jinja_options = { 'extensions': ['jinja2.ext.autoescape'], 'undefined': jinja2.StrictUndefined } def __init__(self): """(See `make_app` for parameter descriptions.)""" flask.Flask.__init__(self, __name__) self.setup_routes() def create_jinja_environment(self): """Called by Flask.__init__""" env = super(Krempelair, self).create_jinja_environment() for func in [ 'force_unicode', 'timesince', 'shorten_sha1', 'shorten_message', 'extract_author_name', 'formattimestamp', ]: env.filters[func] = getattr(utils, func) return env def setup_routes(self): for endpoint, rule in [ ('air_get_status_betrieb', '/'), ('air_get_status_stoerung', '/stoerung'), ('air_set_status', '/<int:pin>/<int:state>'), ('air_set_level', '/lueftung/stufe/<int:level>'), ('air_set_timer', '/lueftung/timer/<int:time>'), ('air_set_temp', '/lueftung/temperatur/sollTemp/<int:temp>'), ('air_set_tempNAK', '/lueftung/temperatur/sollTempNAK/<int:temp>'), ('air_set_raucherraum_on', '/raucherraum/on'), ('air_set_raucherraum_off', '/raucherraum/off'), ('air_get_temperaturen', '/lueftung/temperatur'), ]: self.add_url_rule(rule, view_func=getattr(views, endpoint)) if __name__ == "__main__": app = Krempelair() app.run(host="0.0.0.0", debug=True) else: application = Krempelair()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import jinja2 import flask import views class Krempelair(flask.Flask): jinja_options = { 'extensions': ['jinja2.ext.autoescape'], 'undefined': jinja2.StrictUndefined } def __init__(self): """(See `make_app` for parameter descriptions.)""" flask.Flask.__init__(self, __name__) self.setup_routes() def create_jinja_environment(self): """Called by Flask.__init__""" env = super(Krempelair, self).create_jinja_environment() for func in [ 'force_unicode', 'timesince', 'shorten_sha1', 'shorten_message', 'extract_author_name', 'formattimestamp', ]: env.filters[func] = getattr(utils, func) return env def setup_routes(self): for endpoint, rule in [ ('air_get_status_betrieb', '/'), ('air_get_status_stoerung', '/stoerung'), ('air_set_status', '/<int:pin>/<int:state>'), ('air_set_level', '/lueftung/stufe/<int:level>'), ('air_set_timer', '/lueftung/timer/<int:time>'), ('air_set_temp', '/lueftung/temperatur/<int:temp>'), ('air_set_raucherraum_on', '/raucherraum/on'), ('air_set_raucherraum_off', '/raucherraum/off'), ('air_get_temperaturen', '/lueftung/temperatur'), ]: self.add_url_rule(rule, view_func=getattr(views, endpoint)) if __name__ == "__main__": app = Krempelair() app.run(host="0.0.0.0", debug=True) else: application = Krempelair()
agpl-3.0
Python