commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
0a0d55a2a9aa07b0841b2a221e8b7bc9b844b976
|
update version numbers and project details
|
arkaitzj/python-butter,wdv4758h/butter,dasSOZO/python-butter
|
butter/__init__.py
|
butter/__init__.py
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "[email protected]"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
|
#!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.1"
__email__ = "[email protected]"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/"
__testsuite__ = "tests.testall"
|
bsd-3-clause
|
Python
|
39d4f9c0df535c13c6f37eaaccaaeabb0b92b8e0
|
Bump version number
|
fabric-colors/fabric-colors,fabric-colors/fabric-colors
|
fabric_colors/_version.py
|
fabric_colors/_version.py
|
__version__ = "0.9.42"
|
__version__ = "0.9.41"
|
bsd-2-clause
|
Python
|
4e09200b83f986ce333f5b1143e13a4b2d7df2ce
|
determine site activity on process_view
|
Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server
|
pykeg/src/pykeg/web/middleware.py
|
pykeg/src/pykeg/web/middleware.py
|
# Copyright 2011 Mike Wakerly <[email protected]>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
from pykeg.core import models
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
class KegbotSiteMiddleware:
def process_view(self, request, view_func, view_args, view_kwargs):
kbsite_name = view_kwargs.pop('kbsite_name', None)
if kbsite_name is not None:
if kbsite_name == '':
kbsite_name = 'default'
request.kbsite = get_object_or_404(models.KegbotSite, name=kbsite_name)
return None
class SiteActiveMiddleware:
"""Middleware which throws 503s when KegbotSite.is_active is false."""
ALLOWED_PATHS = (
'/accounts/login/',
'/admin/',
'/site_media/',
)
def _path_allowed(self, path):
for p in self.ALLOWED_PATHS:
if path.startswith(p):
return True
return False
def process_view(self, request, view_func, view_args, view_kwargs):
if not hasattr(request, 'kbsite'):
return None
kbsite = request.kbsite
# We have a KegbotSite, and that site is active: nothing to do.
if kbsite.is_active:
return None
# If the request is for a whitelisted path, allow it.
if self._path_allowed(request.path):
return None
# Allow staff/superusers access if inactive.
if request.user.is_staff or request.user.is_superuser:
return None
return HttpResponse('Site temporarily unavailable', status=503)
|
# Copyright 2011 Mike Wakerly <[email protected]>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
from pykeg.core import models
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
class KegbotSiteMiddleware:
def process_view(self, request, view_func, view_args, view_kwargs):
kbsite_name = view_kwargs.pop('kbsite_name', None)
if kbsite_name is not None:
if kbsite_name == '':
kbsite_name = 'default'
request.kbsite = get_object_or_404(models.KegbotSite, name=kbsite_name)
return None
class SiteActiveMiddleware:
"""Middleware which throws 503s when KegbotSite.is_active is false."""
ALLOWED_PATHS = (
'/accounts/login/',
'/admin/',
'/site_media/',
)
def _path_allowed(self, path):
for p in self.ALLOWED_PATHS:
if path.startswith(p):
return True
return False
def process_request(self, request):
kbsite = None
if hasattr(request, 'kbsite'):
kbsite = request.kbsite
# We have a KegbotSite, and that site is active: nothing to do.
if kbsite and kbsite.is_active:
return None
# If the request is for a whitelisted path, allow it.
if self._path_allowed(request.path):
return None
# Allow staff/superusers access if inactive.
if request.user.is_staff or request.user.is_superuser:
return None
else:
return HttpResponse('Site temporarily unavailable', status=503)
|
mit
|
Python
|
7b21270ca893e90790a0a60c8417df12052ea9a0
|
Add alternate MDP-ID aleph API if the first fails
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
falcom/api/reject_list.py
|
falcom/api/reject_list.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from os import environ
from urllib.request import urlopen
from .uri import URI, APIQuerier
from .marc import get_marc_data_from_xml
from .worldcat import get_worldcat_data_from_json
from .hathi import get_oclc_counts_from_json
from .common import ReadOnlyDataStructure
AlephURI = URI("http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta")
WorldCatURI = URI("http://www.worldcat.org/webservices/catalog"
"/content/libraries/{oclc}")
HathiURI = URI("http://catalog.hathitrust.org/api/volumes/brief"
"/oclc/{oclc}.json")
aleph_api = APIQuerier(AlephURI, url_opener=urlopen)
worldcat_api = APIQuerier(WorldCatURI, url_opener=urlopen)
hathi_api = APIQuerier(HathiURI, url_opener=urlopen)
wc_key = environ.get("MDP_REJECT_WC_KEY", "none")
class VolumeDataFromBarcode:
def __init__ (self, barcode):
self.barcode = barcode
self.marc = get_marc_data_from_xml(aleph_api.get(
id=barcode,
type="bc",
schema="marcxml"))
if not self.marc:
self.marc = get_marc_data_from_xml(aleph_api.get(
id="mdp." + barcode,
schema="marcxml"))
if self.marc.oclc is None:
worldcat, hathi = None, None
else:
worldcat = worldcat_api.get(
oclc=self.marc.oclc,
wskey=wc_key,
format="json",
maximumLibraries="50")
hathi = hathi_api.get(oclc=self.marc.oclc)
self.worldcat = get_worldcat_data_from_json(worldcat)
self.hathi = get_oclc_counts_from_json(hathi, "mdp." + barcode)
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from os import environ
from urllib.request import urlopen
from .uri import URI, APIQuerier
from .marc import get_marc_data_from_xml
from .worldcat import get_worldcat_data_from_json
from .hathi import get_oclc_counts_from_json
from .common import ReadOnlyDataStructure
AlephURI = URI("http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta")
WorldCatURI = URI("http://www.worldcat.org/webservices/catalog"
"/content/libraries/{oclc}")
HathiURI = URI("http://catalog.hathitrust.org/api/volumes/brief"
"/oclc/{oclc}.json")
aleph_api = APIQuerier(AlephURI, url_opener=urlopen)
worldcat_api = APIQuerier(WorldCatURI, url_opener=urlopen)
hathi_api = APIQuerier(HathiURI, url_opener=urlopen)
wc_key = environ.get("MDP_REJECT_WC_KEY", "none")
class VolumeDataFromBarcode:
def __init__ (self, barcode):
self.barcode = barcode
self.marc = get_marc_data_from_xml(aleph_api.get(
id=barcode,
type="bc",
schema="marcxml"))
if self.marc.oclc is None:
worldcat, hathi = None, None
else:
worldcat = worldcat_api.get(
oclc=self.marc.oclc,
wskey=wc_key,
format="json",
maximumLibraries="50")
hathi = hathi_api.get(oclc=self.marc.oclc)
self.worldcat = get_worldcat_data_from_json(worldcat)
self.hathi = get_oclc_counts_from_json(hathi, "mdp." + barcode)
|
bsd-3-clause
|
Python
|
fae13bf07e3b336f52911cb23291c6db029922cb
|
fix timing issues with new test
|
commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot
|
selfdrive/controls/tests/test_startup.py
|
selfdrive/controls/tests/test_startup.py
|
#!/usr/bin/env python3
import time
import unittest
from parameterized import parameterized
from cereal import log, car
import cereal.messaging as messaging
from common.params import Params
from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp # pylint: disable=no-name-in-module,import-error
from selfdrive.car.fingerprints import _FINGERPRINTS
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.controls.lib.events import EVENT_NAME
from selfdrive.test.helpers import with_processes
EventName = car.CarEvent.EventName
class TestStartup(unittest.TestCase):
@parameterized.expand([
# TODO: test EventName.startup for release branches
# officially supported car
(EventName.startupMaster, HYUNDAI.SONATA, False),
(EventName.startupMaster, HYUNDAI.SONATA, True),
# community supported car
(EventName.startupMaster, HYUNDAI.KIA_STINGER, True),
(EventName.startupMaster, HYUNDAI.KIA_STINGER, False),
# dashcamOnly car
(EventName.startupMaster, MAZDA.CX5, True),
(EventName.startupMaster, MAZDA.CX5, False),
# unrecognized car
(EventName.startupNoCar, None, True),
(EventName.startupNoCar, None, False),
])
@with_processes(['controlsd'])
def test_startup_alert(self, expected_event, car, toggle_enabled):
# TODO: this should be done without any real sockets
controls_sock = messaging.sub_sock("controlsState")
pm = messaging.PubMaster(['can', 'health'])
Params().put("CommunityFeaturesToggle", b"1" if toggle_enabled else b"0")
time.sleep(2) # wait for controlsd to be ready
health = messaging.new_message('health')
health.health.hwType = log.HealthData.HwType.uno
pm.send('health', health)
# fingerprint
if car is None:
finger = {addr: 1 for addr in range(1, 100)}
else:
finger = _FINGERPRINTS[car][0]
for _ in range(500):
msgs = [[addr, 0, b'\x00'*length, 0] for addr, length in finger.items()]
pm.send('can', can_list_to_can_capnp(msgs))
time.sleep(0.01)
msgs = messaging.drain_sock(controls_sock)
if len(msgs):
event_name = msgs[0].controlsState.alertType.split("/")[0]
self.assertEqual(EVENT_NAME[expected_event], event_name,
f"expected {EVENT_NAME[expected_event]} for '{car}', got {event_name}")
break
else:
self.fail(f"failed to fingerprint {car}")
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python3
import time
import unittest
from parameterized import parameterized
from cereal import log, car
import cereal.messaging as messaging
from common.params import Params
from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp # pylint: disable=no-name-in-module,import-error
from selfdrive.car.fingerprints import _FINGERPRINTS
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.controls.lib.events import EVENT_NAME
from selfdrive.test.helpers import with_processes
EventName = car.CarEvent.EventName
class TestStartup(unittest.TestCase):
@parameterized.expand([
# TODO: test EventName.startup for release branches
# officially supported car
(EventName.startupMaster, HYUNDAI.SONATA, False),
(EventName.startupMaster, HYUNDAI.SONATA, True),
# community supported car
(EventName.startupMaster, HYUNDAI.KIA_STINGER, True),
(EventName.startupMaster, HYUNDAI.KIA_STINGER, False),
# dashcamOnly car
(EventName.startupMaster, MAZDA.CX5, True),
(EventName.startupMaster, MAZDA.CX5, False),
# unrecognized car
(EventName.startupNoCar, None, True),
(EventName.startupNoCar, None, False),
])
@with_processes(['controlsd'])
def test_startup_alert(self, expected_event, car, toggle_enabled):
# TODO: this should be done without any real sockets
sm = messaging.SubMaster(['controlsState'])
pm = messaging.PubMaster(['can', 'health'])
Params().put("CommunityFeaturesToggle", b"1" if toggle_enabled else b"0")
time.sleep(2) # wait for controlsd to be ready
health = messaging.new_message('health')
health.health.hwType = log.HealthData.HwType.uno
pm.send('health', health)
# fingerprint
if car is None:
finger = {addr: 1 for addr in range(1, 100)}
else:
finger = _FINGERPRINTS[car][0]
for _ in range(500):
msgs = [[addr, 0, b'\x00'*length, 0] for addr, length in finger.items()]
pm.send('can', can_list_to_can_capnp(msgs))
time.sleep(0.05)
sm.update(0)
if sm.updated["controlsState"]:
event_name = sm["controlsState"].alertType.split("/")[0]
self.assertEqual(EVENT_NAME[expected_event], event_name,
f"expected {EVENT_NAME[expected_event]} for '{car}', got {event_name}")
break
else:
self.fail(f"failed to fingerprint {car}")
if __name__ == "__main__":
unittest.main()
|
mit
|
Python
|
22fed955ddab3ccb222148917a369002405aef5b
|
Reduce FPS
|
moggers87/isdabizda
|
isdabizda/__init__.py
|
isdabizda/__init__.py
|
import pygame
import sys
from pygame.locals import *
from isdabizda.grid import Grid, SMALL
pygame.init()
grid = Grid(SMALL)
## options
RES = grid.sizes[0] * grid.sizes[1]
RES = (RES,RES)
TITLE = "Isdabizda!"
FPS = 15
DISPLAY_SURF = pygame.display.set_mode(RES)
pygame.display.set_caption(TITLE)
clock = pygame.time.Clock()
def update_display():
grid.draw_grid(DISPLAY_SURF)
pygame.display.flip()
update_display()
# loop
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
grid.drop_block()
elif event.button == 3:
grid.rotate_block()
else:
continue
grid.draw_grid(DISPLAY_SURF)
pygame.display.flip()
clock.tick(FPS)
|
import pygame
import sys
from pygame.locals import *
from isdabizda.grid import Grid, SMALL
pygame.init()
grid = Grid(SMALL)
## options
RES = grid.sizes[0] * grid.sizes[1]
RES = (RES,RES)
TITLE = "Isdabizda!"
FPS = 30
DISPLAY_SURF = pygame.display.set_mode(RES)
pygame.display.set_caption(TITLE)
clock = pygame.time.Clock()
# draw prettiness :D
grid.draw_grid(DISPLAY_SURF)
pygame.display.flip()
# loop
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONDOWN:
if event.button == 1:
grid.drop_block()
elif event.button == 3:
grid.rotate_block()
else:
continue
grid.draw_grid(DISPLAY_SURF)
pygame.display.flip()
clock.tick(FPS)
|
isc
|
Python
|
8b78463ac8d8953dffb3c3ecd5e9e1e4396da106
|
Make sure set_mpl_backend works if qtpy is not installed
|
stscieisenhamer/glue,saimn/glue,saimn/glue,stscieisenhamer/glue
|
glue/_mpl_backend.py
|
glue/_mpl_backend.py
|
class MatplotlibBackendSetter(object):
"""
Import hook to make sure the proper Qt backend is set when importing
Matplotlib.
"""
enabled = True
def find_module(self, mod_name, pth):
if self.enabled and 'matplotlib' in mod_name:
self.enabled = False
set_mpl_backend()
def find_spec(self, name, import_path, target_module=None):
pass
def set_mpl_backend():
try:
from qtpy import PYQT5
except:
# If Qt isn't available, we don't have to worry about
# setting the backend
return
from matplotlib import rcParams, rcdefaults
# standardize mpl setup
rcdefaults()
if PYQT5:
rcParams['backend'] = 'Qt5Agg'
else:
rcParams['backend'] = 'Qt4Agg'
# The following is a workaround for the fact that Matplotlib checks the
# rcParams at import time, not at run-time. I have opened an issue with
# Matplotlib here: https://github.com/matplotlib/matplotlib/issues/5513
from matplotlib import get_backend
from matplotlib import backends
backends.backend = get_backend()
|
class MatplotlibBackendSetter(object):
"""
Import hook to make sure the proper Qt backend is set when importing
Matplotlib.
"""
enabled = True
def find_module(self, mod_name, pth):
if self.enabled and 'matplotlib' in mod_name:
self.enabled = False
set_mpl_backend()
def find_spec(self, name, import_path, target_module=None):
pass
def set_mpl_backend():
from matplotlib import rcParams, rcdefaults
# standardize mpl setup
rcdefaults()
from qtpy import PYQT5
if PYQT5:
rcParams['backend'] = 'Qt5Agg'
else:
rcParams['backend'] = 'Qt4Agg'
# The following is a workaround for the fact that Matplotlib checks the
# rcParams at import time, not at run-time. I have opened an issue with
# Matplotlib here: https://github.com/matplotlib/matplotlib/issues/5513
from matplotlib import get_backend
from matplotlib import backends
backends.backend = get_backend()
|
bsd-3-clause
|
Python
|
eb1fdf3419bdfd1d5920d73a877f707162b783b0
|
Drop unused and dangerous entrypoint `open_fileindex`
|
ecmwf/cfgrib
|
cfgrib/__init__.py
|
cfgrib/__init__.py
|
#
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
#
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
|
apache-2.0
|
Python
|
ad8036e5a21fd29885dc7ebf201e599a0ca79563
|
add charliecloud 0.9.7 (#10661)
|
iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack
|
var/spack/repos/builtin/packages/charliecloud/package.py
|
var/spack/repos/builtin/packages/charliecloud/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/archive/v0.2.4.tar.gz"
version('0.9.7', sha256='ec80a4b9bef3a2161a783e11d99cc58e09a32dfbc8a6234c8f7ce7fa76e2f62d')
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', 'b112de661c2c360174b42c99022c1967')
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/archive/v0.2.4.tar.gz"
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', 'b112de661c2c360174b42c99022c1967')
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
|
lgpl-2.1
|
Python
|
c7322a1ff37c7f2d4c3dfb149c2e36daafae6043
|
Bump to version 0.11.3
|
reubano/ckanny,reubano/ckanny
|
ckanny/__init__.py
|
ckanny/__init__.py
|
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = '[email protected]'
__version__ = '0.11.3'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
@manager.command
def ver():
"""Show ckanny version"""
from . import __version__ as version
print('v%s' % version)
if __name__ == '__main__':
manager.main()
|
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = '[email protected]'
__version__ = '0.11.2'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
@manager.command
def ver():
"""Show ckanny version"""
from . import __version__ as version
print('v%s' % version)
if __name__ == '__main__':
manager.main()
|
mit
|
Python
|
07a74375fabddc9b6fa4de0c345949bfadb54504
|
Revert silly change
|
lkishline/expyfun,LABSN/expyfun,Eric89GXL/expyfun,rkmaddox/expyfun,drammock/expyfun
|
examples/sync_test.py
|
examples/sync_test.py
|
"""
=============
A-V sync test
=============
This example tests synchronization between the screen and the audio playback.
"""
# Author: Dan McCloy <[email protected]>
#
# License: BSD (3-clause)
print __doc__
import numpy as np
from expyfun import ExperimentController
rng = np.random.RandomState(0)
with ExperimentController('SyncTest', screen_num=0, window_size=[300, 300],
full_screen=False, stim_db=70, noise_db=-np.inf,
stim_fs=24414, participant='s', session='0',
output_dir=None) as ec:
ec.load_buffer(np.r_[0.1, np.zeros(2000)])
white = [1, 1, 1]
black = [-1, -1, -1]
while True:
ec.draw_background_color(white)
t1 = ec.flip_and_play()
ec.draw_background_color(black)
t2 = ec.flip() # expyfun
print 1. / (t2 - t1)
ec.wait_one_press(0.5)
|
"""
=============
A-V sync test
=============
This example tests synchronization between the screen and the audio playback.
"""
# Author: Dan McCloy <[email protected]>
#
# License: BSD (3-clause)
print __doc__
import numpy as np
from expyfun import ExperimentController
rng = np.random.RandomState(0)
with ExperimentController('SyncTest', screen_num=0, window_size=[300, 300],
full_screen=False, stim_db=70, noise_db=-np.inf,
stim_fs=44100, participant='s', session='0',
output_dir=None) as ec:
ec.load_buffer(np.r_[0.1, np.zeros(2000)])
white = [1, 1, 1]
black = [-1, -1, -1]
while True:
ec.draw_background_color(white)
t1 = ec.flip_and_play()
ec.draw_background_color(black)
t2 = ec.flip() # expyfun
print 1. / (t2 - t1)
ec.wait_one_press(0.5)
|
bsd-3-clause
|
Python
|
85880dbf68718737fa52535326163d9b40adf7f9
|
Add tags to event serializer
|
fuziontech/sentry,Kryz/sentry,ngonzalvez/sentry,pauloschilling/sentry,vperron/sentry,ewdurbin/sentry,wong2/sentry,mvaled/sentry,jokey2k/sentry,llonchj/sentry,JTCunning/sentry,wong2/sentry,jean/sentry,pauloschilling/sentry,BayanGroup/sentry,gencer/sentry,beeftornado/sentry,mvaled/sentry,Natim/sentry,imankulov/sentry,vperron/sentry,JamesMura/sentry,looker/sentry,gg7/sentry,Kryz/sentry,JamesMura/sentry,fuziontech/sentry,jokey2k/sentry,zenefits/sentry,boneyao/sentry,hongliang5623/sentry,felixbuenemann/sentry,gencer/sentry,korealerts1/sentry,zenefits/sentry,gg7/sentry,ngonzalvez/sentry,zenefits/sentry,argonemyth/sentry,daevaorn/sentry,nicholasserra/sentry,nicholasserra/sentry,imankulov/sentry,hongliang5623/sentry,beeftornado/sentry,looker/sentry,argonemyth/sentry,kevinastone/sentry,kevinlondon/sentry,kevinastone/sentry,mitsuhiko/sentry,beeftornado/sentry,BuildingLink/sentry,felixbuenemann/sentry,hongliang5623/sentry,ewdurbin/sentry,ifduyue/sentry,kevinastone/sentry,gg7/sentry,ewdurbin/sentry,alexm92/sentry,BuildingLink/sentry,TedaLIEz/sentry,felixbuenemann/sentry,songyi199111/sentry,drcapulet/sentry,jean/sentry,jean/sentry,gencer/sentry,TedaLIEz/sentry,jean/sentry,daevaorn/sentry,pauloschilling/sentry,BuildingLink/sentry,1tush/sentry,ifduyue/sentry,looker/sentry,Natim/sentry,mvaled/sentry,drcapulet/sentry,fotinakis/sentry,gencer/sentry,boneyao/sentry,JackDanger/sentry,imankulov/sentry,daevaorn/sentry,wujuguang/sentry,wujuguang/sentry,looker/sentry,kevinlondon/sentry,mvaled/sentry,jean/sentry,alexm92/sentry,1tush/sentry,nicholasserra/sentry,daevaorn/sentry,vperron/sentry,llonchj/sentry,JamesMura/sentry,BuildingLink/sentry,songyi199111/sentry,JTCunning/sentry,zenefits/sentry,1tush/sentry,drcapulet/sentry,fotinakis/sentry,JackDanger/sentry,fotinakis/sentry,BayanGroup/sentry,fuziontech/sentry,gencer/sentry,ifduyue/sentry,zenefits/sentry,mvaled/sentry,ifduyue/sentry,TedaLIEz/sentry,boneyao/sentry,korealerts1/sentry,wong2/sentry,llonchj/sentry,Natim/sentry,songyi199111/sentry,alexm92/sentry,wujuguang/sentry,JTCunning/sentry,JamesMura/sentry,fotinakis/sentry,korealerts1/sentry,mitsuhiko/sentry,ifduyue/sentry,looker/sentry,Kryz/sentry,BuildingLink/sentry,mvaled/sentry,JamesMura/sentry,argonemyth/sentry,jokey2k/sentry,BayanGroup/sentry,kevinlondon/sentry,ngonzalvez/sentry,JackDanger/sentry
|
src/sentry/api/serializers/models/event.py
|
src/sentry/api/serializers/models/event.py
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Event
@register(Event)
class EventSerializer(Serializer):
def _get_entries(self, event, user):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
if key == 'user':
continue
entry = {
'data': interface.to_json(),
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
results[item] = {
'entries': self._get_entries(item, user),
'user': user_data,
}
return results
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'eventID': str(obj.event_id),
'entries': attrs['entries'],
'message': obj.message,
'user': attrs['user'],
'tags': obj.get_tags(),
'platform': obj.platform,
'dateCreated': obj.datetime,
'timeSpent': obj.time_spent,
}
return d
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Event
@register(Event)
class EventSerializer(Serializer):
def _get_entries(self, event, user):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
if key == 'user':
continue
entry = {
'data': interface.to_json(),
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
results[item] = {
'entries': self._get_entries(item, user),
'user': user_data,
}
return results
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'eventID': str(obj.event_id),
'entries': attrs['entries'],
'message': obj.message,
'user': attrs['user'],
'platform': obj.platform,
'dateCreated': obj.datetime,
'timeSpent': obj.time_spent,
}
return d
|
bsd-3-clause
|
Python
|
71636292d089f16485691f242edf74fcbd72ff2b
|
Enforce PEP8 on readpdf.py
|
sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis
|
jarviscli/plugins/readpdf.py
|
jarviscli/plugins/readpdf.py
|
# importing the modules
import PyPDF2
import pyttsx3
from plugin import plugin
"""
A tool for reading out the pdf files using the jarvis.Uses PyPDF2 and pyttsx3 libraries
"""
@plugin('readpdf')
class readpdfjarvis():
def __init__(self):
self.path = None
def __call__(self, jarvis, s):
self.read_pdf(jarvis)
def read_pdf(self, jarvis):
filename = jarvis.input("Enter your file path with '/' separations:")
pdf = open(filename, 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
for i in range(pdfRead.getNumPages()):
page = pdfRead.getPage(i)
jarvis.say("Page No: " + str(1 + pdfRead.getPageNumber(page)))
pageContent = page.extractText()
jarvis.say(pageContent)
speak = pyttsx3.init()
speak.say(pageContent)
speak.runAndWait()
|
# importing the modules
import PyPDF2
import pyttsx3
from plugin import plugin
"""
A tool for reading out the pdf files using the jarvis.Uses PyPDF2 and pyttsx3 libraries
"""
@plugin('readpdf')
class readpdfjarvis():
def __init__(self):
self.path = None
def __call__(self, jarvis, s):
self.read_pdf(jarvis)
def read_pdf(self, jarvis):
filename = jarvis.input("Enter your file path with '/' seperations:")
pdf = open(filename, 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
for i in range(pdfRead.getNumPages()):
page = pdfRead.getPage(i)
jarvis.say("Page No: "+str(1 + pdfRead.getPageNumber(page)))
pageContent = page.extractText()
jarvis.say(pageContent)
speak = pyttsx3.init()
speak.say(pageContent)
speak.runAndWait()
|
mit
|
Python
|
bb7fa507a31901819dbc7712b13c4223fe6d3585
|
Correct p tags on system message output
|
wong2/sentry,1tush/sentry,boneyao/sentry,zenefits/sentry,fuziontech/sentry,camilonova/sentry,vperron/sentry,JamesMura/sentry,imankulov/sentry,JamesMura/sentry,mvaled/sentry,mvaled/sentry,drcapulet/sentry,fotinakis/sentry,rdio/sentry,mvaled/sentry,ifduyue/sentry,hongliang5623/sentry,Kryz/sentry,fotinakis/sentry,beeftornado/sentry,alexm92/sentry,ewdurbin/sentry,JTCunning/sentry,drcapulet/sentry,mitsuhiko/sentry,alexm92/sentry,SilentCircle/sentry,camilonova/sentry,daevaorn/sentry,BayanGroup/sentry,zenefits/sentry,SilentCircle/sentry,mvaled/sentry,camilonova/sentry,gencer/sentry,imankulov/sentry,songyi199111/sentry,mitsuhiko/sentry,ifduyue/sentry,wujuguang/sentry,JamesMura/sentry,BayanGroup/sentry,songyi199111/sentry,rdio/sentry,1tush/sentry,rdio/sentry,hongliang5623/sentry,BuildingLink/sentry,korealerts1/sentry,zenefits/sentry,jean/sentry,gencer/sentry,1tush/sentry,kevinastone/sentry,NickPresta/sentry,daevaorn/sentry,beni55/sentry,wujuguang/sentry,felixbuenemann/sentry,ngonzalvez/sentry,drcapulet/sentry,felixbuenemann/sentry,songyi199111/sentry,felixbuenemann/sentry,boneyao/sentry,zenefits/sentry,TedaLIEz/sentry,looker/sentry,JackDanger/sentry,vperron/sentry,looker/sentry,rdio/sentry,boneyao/sentry,kevinastone/sentry,Natim/sentry,gencer/sentry,ngonzalvez/sentry,NickPresta/sentry,argonemyth/sentry,beeftornado/sentry,argonemyth/sentry,Natim/sentry,gencer/sentry,SilentCircle/sentry,ewdurbin/sentry,zenefits/sentry,gencer/sentry,looker/sentry,argonemyth/sentry,alexm92/sentry,korealerts1/sentry,nicholasserra/sentry,wujuguang/sentry,looker/sentry,ifduyue/sentry,daevaorn/sentry,beeftornado/sentry,gg7/sentry,BuildingLink/sentry,nicholasserra/sentry,pauloschilling/sentry,jokey2k/sentry,JackDanger/sentry,JTCunning/sentry,ifduyue/sentry,BuildingLink/sentry,ifduyue/sentry,JTCunning/sentry,pauloschilling/sentry,ngonzalvez/sentry,imankulov/sentry,JackDanger/sentry,jokey2k/sentry,kevinlondon/sentry,mvaled/sentry,korealerts1/sentry,BuildingLink/sentry,nicholasserra/sentry,SilentCircle/sentry,BayanGroup/sentry,fuziontech/sentry,Natim/sentry,beni55/sentry,Kryz/sentry,looker/sentry,kevinlondon/sentry,wong2/sentry,NickPresta/sentry,jean/sentry,gg7/sentry,fotinakis/sentry,jean/sentry,pauloschilling/sentry,jean/sentry,daevaorn/sentry,TedaLIEz/sentry,fuziontech/sentry,fotinakis/sentry,NickPresta/sentry,beni55/sentry,wong2/sentry,hongliang5623/sentry,mvaled/sentry,kevinlondon/sentry,llonchj/sentry,JamesMura/sentry,llonchj/sentry,kevinastone/sentry,Kryz/sentry,vperron/sentry,gg7/sentry,llonchj/sentry,jean/sentry,JamesMura/sentry,TedaLIEz/sentry,BuildingLink/sentry,jokey2k/sentry,ewdurbin/sentry
|
src/sentry/templatetags/sentry_activity.py
|
src/sentry/templatetags/sentry_activity.py
|
"""
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from django.utils.html import escape, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.COMMENT: 'left a comment',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
action_str = ACTIVITY_ACTION_STRINGS[item.type]
output = '<p>'
if item.user:
name = item.user.first_name or item.user.email
output += '<strong>%s</strong> %s' % (escape(name), action_str)
else:
output += 'The system %s' % (action_str,)
output += ' — %s</p>' % (timesince(item.datetime),)
if item.type == Activity.COMMENT:
output += linebreaks(item.data['body'])
return mark_safe(output)
|
"""
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from django.utils.html import escape, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.COMMENT: 'left a comment',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
action_str = ACTIVITY_ACTION_STRINGS[item.type]
if item.user:
name = item.user.first_name or item.user.email
output = '<p><strong>%s</strong> %s' % (escape(name), action_str)
else:
output = 'The system %s' % (action_str,)
output += ' — %s</p>' % (timesince(item.datetime),)
if item.type == Activity.COMMENT:
output += linebreaks(item.data['body'])
return mark_safe(output)
|
bsd-3-clause
|
Python
|
736e1f7f4de56a57df3b51058c5b45455e577cf0
|
Fix flake8
|
jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk
|
busstops/management/commands/import_areas.py
|
busstops/management/commands/import_areas.py
|
"""
Import administrative areas from the NPTG.
Usage:
import_areas < AdminAreas.csv
"""
from ..import_from_csv import ImportFromCSVCommand
from ...models import AdminArea
class Command(ImportFromCSVCommand):
def handle_row(self, row):
AdminArea.objects.update_or_create(
id=row['AdministrativeAreaCode'],
defaults={
'atco_code': row['AtcoAreaCode'],
'name': row['AreaName'],
'short_name': row['ShortName'],
'country': row['Country'],
'region_id': row['RegionCode'],
}
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Move Cumbria to the North West.
# Necessary because of the confusing 'North East and Cumbria' Traveline
# region, but Cumbrian bus *services* are actually in the North West now
AdminArea.objects.filter(name='Cumbria').update(region_id='NW')
|
"""
Import administrative areas from the NPTG.
Usage:
import_areas < AdminAreas.csv
"""
from ..import_from_csv import ImportFromCSVCommand
from ...models import AdminArea
class Command(ImportFromCSVCommand):
def handle_row(self, row):
AdminArea.objects.update_or_create(
id=row['AdministrativeAreaCode'],
defaults={
'atco_code': row['AtcoAreaCode'],
'name': row['AreaName'],
'short_name': row['ShortName'],
'country': row['Country'],
'region_id': row['RegionCode'],
}
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Move Cumbria to the North West.
# There is the legacy of the confusing 'North East and Cumbria' Traveline region,
# but actually Cumbrian bus services are in the North West now
AdminArea.objects.filter(name='Cumbria').update(region_id='NW')
|
mpl-2.0
|
Python
|
8a870c6faf8aa50ad7f8c58458c4af9ddef7cfdc
|
Make authbind check graceful.
|
alex/braid,alex/braid
|
braid/authbind.py
|
braid/authbind.py
|
import os
from fabric.api import sudo, run, abort, quiet
from braid import package, hasSudoCapabilities
def install():
package.install('authbind')
def allow(user, port):
path = os.path.join('/etc/authbind/byport', str(port))
needsUpdate = True
with quiet():
state = run('stat -c %U:%a {}'.format(path))
needsUpdate = state.strip().split(':') != [user, '500']
if needsUpdate:
if not hasSudoCapabilities():
abort('Trying to give {} access to port {} but have insufficient '
'capabilities.'.format(user, port))
sudo('touch {}'.format(path))
sudo('chown {0}:{0} {1}'.format(user, path))
sudo('chmod 0500 {}'.format(path))
|
import os
from fabric.api import sudo, run, abort
from braid import package, hasSudoCapabilities
def install():
package.install('authbind')
def allow(user, port):
path = os.path.join('/etc/authbind/byport', str(port))
state = run('stat -c %U:%a {}'.format(path))
if state.strip().split(':') != (user, '500'):
if not hasSudoCapabilities():
abort('Trying to give {} access to port {} but have insufficient '
'capabilities.'.format(user, port))
sudo('touch {}'.format(path))
sudo('chown {0}:{0} {1}'.format(user, path))
sudo('chmod 0500 {}'.format(path))
|
mit
|
Python
|
ad2087daae138d3897fc47f0713c8955352ed6ae
|
add SecretBallotUserIdMiddleware
|
jamesturk/django-secretballot,jamesturk/django-secretballot
|
secretballot/middleware.py
|
secretballot/middleware.py
|
# -*- coding: utf-8 -*-
from hashlib import md5
from django.utils.deprecation import MiddlewareMixin
class SecretBallotMiddleware(MiddlewareMixin):
def process_request(self, request):
request.secretballot_token = self.generate_token(request)
def generate_token(self, request):
raise NotImplementedError
class SecretBallotIpMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
return request.META['REMOTE_ADDR']
class SecretBallotUserIdMiddleware(SecretBallotMiddleware):
"""
As the token is generated based on the user ID, this middleware
should only be used on pages where the user is logged in.
"""
def genereate_token(self, request):
return request.user.id
class SecretBallotIpUseragentMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
s = u"".join((request.META['REMOTE_ADDR'], request.META.get('HTTP_USER_AGENT', '')))
return md5(s.encode('utf-8')).hexdigest()
|
# -*- coding: utf-8 -*-
from hashlib import md5
from django.utils.deprecation import MiddlewareMixin
class SecretBallotMiddleware(MiddlewareMixin):
def process_request(self, request):
request.secretballot_token = self.generate_token(request)
def generate_token(self, request):
raise NotImplementedError
class SecretBallotIpMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
return request.META['REMOTE_ADDR']
class SecretBallotIpUseragentMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
s = u"".join((request.META['REMOTE_ADDR'], request.META.get('HTTP_USER_AGENT', '')))
return md5(s.encode('utf-8')).hexdigest()
|
bsd-2-clause
|
Python
|
112cb1eb06034f5afb24f9f1c20052a87d8a6374
|
Update pir_test.py
|
wikkii/raspluonto,wikkii/raspluonto,wikkii/raspluonto,wikkii/raspluonto,wikkii/raspluonto
|
sensor_testing/pir_test.py
|
sensor_testing/pir_test.py
|
# parallax_pir_reva.py - write to screen when movement detected
# (c) BotBook.com - Karvinen, Karvinen, Valtokari
# 22.9.2017 modified from original
import time
import botbook_gpio as gpio
learningPeriod = 30
def main():
pirPin = 7
gpio.mode(pirPin,"in")
#Learning period
print ("learning... " + str(learningPeriod) + " seconds")
time.sleep(learningPeriod) # <1>
while (True):
movement = gpio.read(pirPin) # <2>
if(movement == gpio.HIGH):
print ("Movement detected " + time.ctime())
else:
print ("No movement detected " + time.ctime())
time.sleep(0.3)
if __name__ == "__main__":
main()
|
# parallax_pir_reva.py - write to screen when movement detected
# (c) BotBook.com - Karvinen, Karvinen, Valtokari
# 22.9.2017 modified by Vesa Valli
import time
import botbook_gpio as gpio
learningPeriod = 30
def main():
pirPin = 7
gpio.mode(pirPin,"in")
#Learning period
print ("learning... " + str(learningPeriod) + " seconds")
time.sleep(learningPeriod) # <1>
while (True):
movement = gpio.read(pirPin) # <2>
if(movement == gpio.HIGH):
print ("Movement detected " + time.ctime())
else:
print ("No movement detected " + time.ctime())
time.sleep(0.3)
if __name__ == "__main__":
main()
|
mit
|
Python
|
6c17a81685f4f1b24cefb4760b26e9a33298742c
|
Bump to v1.10.0
|
Cal-CS-61A-Staff/ok-client
|
client/__init__.py
|
client/__init__.py
|
__version__ = 'v1.10.0'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
__version__ = 'v1.9.6'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
|
apache-2.0
|
Python
|
4b54488dd2b40254f6217d98c37690dcb37cf783
|
fix false origin on replies
|
Halibot/halibot,Halibot/halibot
|
halibot/halmodule.py
|
halibot/halmodule.py
|
from .halobject import HalObject
from .message import Message
class HalModule(HalObject):
def reply(self, msg0=None, **kwargs):
# Create the reply message
body = kwargs.get('body', msg0.body)
mtype = kwargs.get('type', msg0.type)
author = kwargs.get('author', msg0.author)
origin = kwargs.get('origin', self.name)
msg = Message(body=body, type=mtype, author=author, origin=origin)
# Synchronous reply?
if msg0.sync:
self.sync_replies[msg0.uuid].append(msg)
else:
self.send_to(msg, [ msg0.origin ])
def hasPermission(self, msg, perm):
return self._hal.auth.hasPermission(msg.origin, msg.identity, perm)
|
from .halobject import HalObject
from .message import Message
class HalModule(HalObject):
def reply(self, msg0=None, **kwargs):
# Create the reply message
body = kwargs.get('body', msg0.body)
mtype = kwargs.get('type', msg0.type)
author = kwargs.get('author', msg0.author)
origin = kwargs.get('origin', msg0.origin)
msg = Message(body=body, type=mtype, author=author, origin=origin)
# Synchronous reply?
if msg0.sync:
self.sync_replies[msg0.uuid].append(msg)
else:
self.send_to(msg, [ msg.origin ])
def hasPermission(self, msg, perm):
return self._hal.auth.hasPermission(msg.origin, msg.identity, perm)
|
bsd-3-clause
|
Python
|
c802426e1c7e45ed456ad92a8b88ab18fba59aa3
|
更新 modules ELOs 中的 management command 'clone_metadata', 新增函式功能宣告註解
|
yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo
|
commonrepo/elos/management/commands/clone_metadata.py
|
commonrepo/elos/management/commands/clone_metadata.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: [email protected]
# Maintained By: [email protected]
#
'''
Management command ``clone_metadata`` of ELOs in Common Repo projects.
This command will clone the metadata related with specefic ELOs.
'''
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from commonrepo.elos.models import ELO, ELOMetadata
class Command(BaseCommand):
help = 'Clone Metadata of ELOs'
def add_arguments(self, parser):
parser.add_argument('--source',
type=int,
help='Build Metadata of specific ELOs')
parser.add_argument('--target',
nargs='+',
type=int,
help='Build Metadata of specific ELOs')
def handle(self, *args, **options):
try:
elo_source = ELO.objects.get(id=options['source'])
except ELO.DoesNotExist:
raise CommandError(
'Source ELO "%s" does not exist' %
options['source'])
if not elo_source.metadata:
raise CommandError(
'Source Metadata of ELO "%s" does not exist' %
elo_source.id)
for target in options['target']:
try:
elo_target = ELO.objects.get(id=target)
except ELO.DoesNotExist:
raise CommandError('ELO "%s" does not exist' % target)
# Delete original metadata
if elo_target.metadata:
elo_target.metadata.delete()
metadata = elo_source.metadata
metadata.pk = None
metadata.save()
elo_target.metadata = metadata
elo_target.save()
self.stdout.write(
'Successfully clone Metadata to target ELO "%s"' %
elo_target.id)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from commonrepo.elos.models import ELO, ELOMetadata
class Command(BaseCommand):
help = 'Clone Metadata of ELOs'
def add_arguments(self, parser):
parser.add_argument('--source',
type=int,
help='Build Metadata of specific ELOs')
parser.add_argument('--target',
nargs='+',
type=int,
help='Build Metadata of specific ELOs')
def handle(self, *args, **options):
try:
elo_source = ELO.objects.get(id=options['source'])
except ELO.DoesNotExist:
raise CommandError('Source ELO "%s" does not exist' % options['source'])
if not elo_source.metadata:
raise CommandError('Source Metadata of ELO "%s" does not exist' % elo_source.id)
for target in options['target']:
try:
elo_target = ELO.objects.get(id=target)
except ELO.DoesNotExist:
raise CommandError('ELO "%s" does not exist' % target)
# Delete original metadata
if elo_target.metadata:
elo_target.metadata.delete()
metadata = elo_source.metadata
metadata.pk = None
metadata.save()
elo_target.metadata = metadata
elo_target.save()
self.stdout.write('Successfully clone Metadata to target ELO "%s"' % elo_target.id)
|
apache-2.0
|
Python
|
c01c97583e11bfe1c41dd41e7b39d19be22fbb7c
|
use the real paths
|
christopherjwang/rackspace-monitoring-agent,virgo-agent-toolkit/rackspace-monitoring-agent,cp16net/virgo-base,cp16net/virgo-base,AlphaStaxLLC/rackspace-monitoring-agent,cp16net/virgo-base,virgo-agent-toolkit/rackspace-monitoring-agent,cp16net/virgo-base,kaustavha/rackspace-monitoring-agent,cp16net/virgo-base,kaustavha/rackspace-monitoring-agent,AlphaStaxLLC/rackspace-monitoring-agent,christopherjwang/rackspace-monitoring-agent
|
tools/build.py
|
tools/build.py
|
#!/usr/bin/env python
import os
import subprocess
import sys
# TODO: release/debug
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
build_dir = os.path.join(root, 'out')
def build():
if sys.platform != "win32":
cmd = 'make -C %s' % build_dir
else:
cmd = 'tools\win_build.bat'
print cmd
sys.exit(subprocess.call(cmd, shell=True))
def test():
agent = os.path.join(root, 'out', 'Debug', 'monitoring-agent')
cmd = '%s --zip out/Debug/monitoring-test.zip -e tests -c docs/sample.state' % agent
print cmd
rc = subprocess.call(cmd, shell=True)
sys.exit(rc)
commands = {
'build': build,
'test': test,
}
def usage():
print('Usage: build.py [%s]' % ', '.join(commands.keys()))
sys.exit(1)
if len(sys.argv) != 2:
usage()
ins = sys.argv[1]
if not commands.has_key(ins):
print('Invalid command: %s' % ins)
sys.exit(1)
print('Running %s' % ins)
cmd = commands.get(ins)
cmd()
|
#!/usr/bin/env python
import os
import subprocess
import sys
# TODO: release/debug
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
build_dir = os.path.join(root, 'out')
def build():
if sys.platform != "win32":
cmd = 'make -C %s' % build_dir
else:
cmd = 'tools\win_build.bat'
print cmd
sys.exit(subprocess.call(cmd, shell=True))
def test():
agent = os.path.join(root, 'monitoring-agent')
cmd = '%s --zip monitoring-test.zip -e tests -c docs/sample.state' % agent
print cmd
rc = subprocess.call(cmd, shell=True)
sys.exit(rc)
commands = {
'build': build,
'test': test,
}
def usage():
print('Usage: build.py [%s]' % ', '.join(commands.keys()))
sys.exit(1)
if len(sys.argv) != 2:
usage()
ins = sys.argv[1]
if not commands.has_key(ins):
print('Invalid command: %s' % ins)
sys.exit(1)
print('Running %s' % ins)
cmd = commands.get(ins)
cmd()
|
apache-2.0
|
Python
|
1edac6151b4a730039e0782a5cb9777fe7f4a21d
|
Implement basic teste
|
arcosta/sci-synergy,arcosta/sci-synergy,arcosta/sci-synergy
|
code/web/scisynergy_flask/tests/test_basic.py
|
code/web/scisynergy_flask/tests/test_basic.py
|
import os
import unittest
from scisynergy_flask import app
class BasicTests(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
def test_maintenance(self):
response = self.app.get('/maintenance')
self.assertEqual(response.status_code, 200)
if __name__ == "__main__":
unittest.main()
|
import os
import unittest
from scisynergy import app
class BasicTests(unittest.TestCase):
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
|
mit
|
Python
|
492ab05637b92f2decbd8fe60e25783ce63f9733
|
remove ignore from staging
|
Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok
|
server/settings/staging.py
|
server/settings/staging.py
|
""" Do not put secrets in this file. This file is public.
For staging environment (Using Dokku)
"""
import os
import sys
import binascii
from server.settings import RAVEN_IGNORE_EXCEPTIONS
default_secret = binascii.hexlify(os.urandom(24))
ENV = 'staging'
PREFERRED_URL_SCHEME = 'https'
SECRET_KEY = os.getenv('SECRET_KEY', default_secret)
CACHE_TYPE = 'simple'
DEBUG = False
ASSETS_DEBUG = False
TESTING_LOGIN = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
db_url = os.getenv('DATABASE_URL')
if db_url:
db_url = db_url.replace('mysql://', 'mysql+pymysql://')
db_url += "&sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
else:
db_url = os.getenv('SQLALCHEMY_URL', 'sqlite:///../oksqlite.db')
SQLALCHEMY_DATABASE_URI = db_url
WTF_CSRF_CHECK_DEFAULT = True
WTF_CSRF_ENABLED = True
try:
os.environ["GOOGLE_ID"]
os.environ["GOOGLE_SECRET"]
except KeyError:
print("Please set the google login variables. source secrets.sh")
sys.exit(1)
GOOGLE = {
'consumer_key': os.environ.get('GOOGLE_ID'),
'consumer_secret': os.environ.get('GOOGLE_SECRET')
}
SENDGRID_AUTH = {
'user': os.environ.get("SENDGRID_USER"),
'key': os.environ.get("SENDGRID_KEY")
}
|
""" Do not put secrets in this file. This file is public.
For staging environment (Using Dokku)
"""
import os
import sys
import binascii
from server.settings import RAVEN_IGNORE_EXCEPTIONS
default_secret = binascii.hexlify(os.urandom(24))
ENV = 'staging'
PREFERRED_URL_SCHEME = 'https'
SECRET_KEY = os.getenv('SECRET_KEY', default_secret)
CACHE_TYPE = 'simple'
DEBUG = False
ASSETS_DEBUG = False
TESTING_LOGIN = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
db_url = os.getenv('DATABASE_URL')
if db_url:
db_url = db_url.replace('mysql://', 'mysql+pymysql://')
db_url += "&sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
else:
db_url = os.getenv('SQLALCHEMY_URL', 'sqlite:///../oksqlite.db')
SQLALCHEMY_DATABASE_URI = db_url
WTF_CSRF_CHECK_DEFAULT = True
WTF_CSRF_ENABLED = True
RAVEN_IGNORE_EXCEPTIONS =['werkzeug.exceptions.Forbidden', 'werkzeug.exceptions.NotFound',
'werkzeug.exceptions.Unauthorized']
try:
os.environ["GOOGLE_ID"]
os.environ["GOOGLE_SECRET"]
except KeyError:
print("Please set the google login variables. source secrets.sh")
sys.exit(1)
GOOGLE = {
'consumer_key': os.environ.get('GOOGLE_ID'),
'consumer_secret': os.environ.get('GOOGLE_SECRET')
}
SENDGRID_AUTH = {
'user': os.environ.get("SENDGRID_USER"),
'key': os.environ.get("SENDGRID_KEY")
}
|
apache-2.0
|
Python
|
352583af500746b431d46d7efc3a0d3f931b43a0
|
Fix context processors
|
TamiaLab/PySkCodeOnlineTester,TamiaLab/PySkCodeOnlineTester,TamiaLab/PySkCodeOnlineTester,TamiaLab/PySkCodeOnlineTester
|
skcodeonlinetester/context_processors.py
|
skcodeonlinetester/context_processors.py
|
"""
Extra context processors for the SkCodeOnlineTester app.
"""
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.shortcuts import get_current_site
def app_constants(request):
"""
Constants context processor.
:param request: the current request.
:return: All constants for the app.
"""
site = get_current_site(request)
return {
'APP': {
'TITLE': _('PySkCode test console'),
'AUTHOR': 'Fabien Batteix',
'COPYRIGHT': _('TamiaLab 2016'),
'DESCRIPTION': _('Test console for the PySkCode project.'),
'GOOGLE_SITE_VERIFICATION_CODE': '',
'TWITTER_USERNAME': 'skywodd',
'TWITTER_ACCOUNT_ID': '250273994',
'FACEBOOK_URL': 'https://www.facebook.com/fabien.batteix',
},
'SITE': {
'NAME': site.name,
'DOMAIN': site.domain,
'PROTO': 'https' if request.is_secure() else 'http'
}
}
|
"""
Extra context processors for the SkCodeOnlineTester app.
"""
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.shortcuts import get_current_site
def app_constants(request):
"""
Constants context processor.
:param request: the current request.
:return: All constants for the app.
"""
site = get_current_site(request)
return {
'APP': {
'TITLE': _('Test console for the PySkCode project'),
'TITLE_SHORT': _('PySkCode test console'),
'AUTHOR': 'Fabien Batteix',
'COPYRIGHT': 'TamiaLab 2016',
'DESCRIPTION': _('Test console for the PySkCode project.'),
'GOOGLE_SITE_VERIFICATION_CODE': '',
'TWITTER_USERNAME': 'skywodd',
'TWITTER_ACCOUNT_ID': '250273994',
'FACEBOOK_URL': 'https://www.facebook.com/fabien.batteix',
},
'SITE': {
'NAME': site.name,
'DOMAIN': site.domain,
'PROTO': 'https' if request.is_secure() else 'http'
}
}
|
agpl-3.0
|
Python
|
e3548d62aa67472f291f6d3c0c8beca9813d6032
|
Make it possible to step() in a newly created env, rather than throwing AttributeError
|
d1hotpep/openai_gym,Farama-Foundation/Gymnasium,dianchen96/gym,machinaut/gym,dianchen96/gym,d1hotpep/openai_gym,machinaut/gym,Farama-Foundation/Gymnasium
|
gym/envs/toy_text/discrete.py
|
gym/envs/toy_text/discrete.py
|
from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
self._reset()
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
|
from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
|
mit
|
Python
|
92737e3f95ff94129e52e1fab1f40a0f70550d46
|
Update the ParticleFilterSetOperations
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
hoomd/filter/set_.py
|
hoomd/filter/set_.py
|
from hoomd.filter.filter_ import ParticleFilter
from hoomd import _hoomd
class ParticleFilterSetOperations(ParticleFilter):
def __init__(self, f, g):
if f == g:
raise ValueError("Cannot use same filter for {}"
"".format(self.__class__.__name__))
else:
self._f = f
self._g = g
# Grab the C++ class constructor for the set operation using the class
# variable _cpp_cls_name
getattr(_hoomd, self._cpp_cls_name).__init__(self, f, g)
def __hash__(self):
return hash(hash(self._f) + hash(self._g))
def __eq__(self, other):
if self._symmetric:
return type(self) == type(other) and \
(self._f == other._f or self._f == other._g) and \
(self._g == other._g or self._g == other._f)
else:
return type(self) == type(other) and \
self._f == other._f and self._g == other._g
class SetDifference(_ParticleFilterSetOperations,
_hoomd.ParticleFilterSetDifference):
_cpp_cls_name = 'ParticleFilterSetDifference'
_symmetric = False
class Union(_ParticleFilterSetOperations, _hoomd.ParticleFilterUnion):
_cpp_cls_name = 'ParticleFilterUnion'
_symmetric = True
class Intersection(_ParticleFilterSetOperations,
_hoomd.ParticleFilterIntersection):
_cpp_cls_name = 'ParticleFilterIntersection'
_symmetric = True
|
from hoomd.filter.filter_ import ParticleFilter
from hoomd import _hoomd
class ParticleFilterSetOperations(ParticleFilter):
def __init__(self, f, g):
if f == g:
raise ValueError("Cannot use same filter for {}"
"".format(self.__class__.__name__))
else:
self._f = f
self._g = g
getattr(_hoomd, self._cpp_cls_name).__init__(self, f, g)
def __hash__(self):
return hash(hash(self._f) + hash(self._g))
def __eq__(self, other):
return type(self) == type(other) and \
self._f == other._f and \
self._g == other._g
class SetDifference(ParticleFilterSetOperations,
_hoomd.ParticleFilterSetDifference):
_cpp_cls_name = 'ParticleFilterSetDifference'
class Union(ParticleFilterSetOperations, _hoomd.ParticleFilterUnion):
_cpp_cls_name = 'ParticleFilterUnion'
class Intersection(ParticleFilterSetOperations,
_hoomd.ParticleFilterIntersection):
_cpp_cls_name = 'ParticleFilterIntersection'
|
bsd-3-clause
|
Python
|
8d98fe5570ce37512128d46853000dc860f798b2
|
Update jupyterhub_config.py
|
simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote
|
jupyterhub/jupyterhub_config.py
|
jupyterhub/jupyterhub_config.py
|
# Configuration file for jupyterhub.
from jupyter_client.localinterfaces import public_ips
c = get_config() # noqa
c.JupyterHub.ssl_key = 'test.key'
c.JupyterHub.ssl_cert = 'test.crt'
c.JupyterHub.hub_ip = public_ips()[0]
# Choose between system-user mode and virtual-user mode
setting_mode = ('system_user', 'virtual_user')[1]
if setting_mode == 'virtual_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.VirtualUserSpawner'
# Parent directory in which temporary directory is created for
# each virtual user
# Set this to a drive with well defined capacity quota
# If unset, no workspace would be available
c.Spawner.workspace_dir = '/tmp/remoteapp'
# FIXME: replace me with other authenticator (e.g. GitHub OAuth...)
c.JupyterHub.authenticator_class = (
'remoteappmanager.auth.WorldAuthenticator')
elif setting_mode == 'system_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.SystemUserSpawner'
|
# Configuration file for jupyterhub.
from jupyter_client.localinterfaces import public_ips
c = get_config() # noqa
c.JupyterHub.ssl_key = 'test.key'
c.JupyterHub.ssl_cert = 'test.crt'
c.JupyterHub.hub_ip = public_ips()[0]
# Choose between system-user mode and virtual-user mode
setting_mode = ('system_user', 'virtual_user')[1]
if setting_mode == 'virtual_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.VirtualUserSpawner'
# Parent directory in which temporary directory is created for
# each virtual user
# Set this to a drive with well defined capacity quota
# If unset, no workspace would be available
c.Spawner.workspace_dir = '/tmp/remoteapp'
# FIXME: replace me with other authenticator (e.g. GitHub OAuth...)
c.JupyterHub.authenticator_class = (
'remoteappmanager.auth.WorldAuthenticator')
elif setting_mode == 'system_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.Spawner'
|
bsd-3-clause
|
Python
|
bca7f7f6ae870a0a307566ee1735e899596d3f99
|
Simplify the brightness calculation, in preparation for multi-LED drips
|
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
|
hardware/mote/mote_icicles.py
|
hardware/mote/mote_icicles.py
|
import time
from random import randint
from mote import Mote
mote = Mote()
mote.configure_channel(1, 16, False)
mote.configure_channel(2, 16, False)
mote.configure_channel(3, 16, False)
mote.configure_channel(4, 16, False)
full_brightness = 40
class Icicle:
def __init__(self, channel):
self.channel = channel
self.current_pixel = 0
self.start_random_wait_for_next_drip()
def step(self):
# Turn off previous pixel
mote.set_pixel(self.channel, self.previous_pixel(), 0, 0, 0)
# Check if we are pausing between drips
if self.frames_to_wait > 0:
self.frames_to_wait -= 1
return
# Advance to next pixel
brightness = full_brightness
mote.set_pixel(self.channel, self.current_pixel, brightness, brightness, brightness)
# Advance pixel number, ready for next frame
self.current_pixel = self.next_pixel()
# If the next pixel will be zero, set up a random wait before starting the
# next cycle:
if self.current_pixel == 0:
self.start_random_wait_for_next_drip()
def next_pixel(self, delta = 1):
new_pixel = self.current_pixel + delta
if not self.valid_pixel(new_pixel):
new_pixel -= 16
return new_pixel
def previous_pixel(self, delta = 1):
new_pixel = self.current_pixel - delta
if not self.valid_pixel(new_pixel):
new_pixel += 16
return new_pixel
def valid_pixel(self, pixel):
return pixel >=0 and pixel <= 15
def start_random_wait_for_next_drip(self):
self.frames_to_wait = randint(15, 30)
if __name__ == "__main__":
mote.clear()
icicles = [
Icicle(1),
Icicle(2),
Icicle(3),
Icicle(4)
]
while True:
for icicle in icicles:
icicle.step()
mote.show()
time.sleep(0.2)
|
import time
from random import randint
from mote import Mote
mote = Mote()
mote.configure_channel(1, 16, False)
mote.configure_channel(2, 16, False)
mote.configure_channel(3, 16, False)
mote.configure_channel(4, 16, False)
max_brightness = 40
class Icicle:
def __init__(self, channel):
self.channel = channel
self.current_pixel = 0
self.start_random_wait_for_next_drip()
def step(self):
# Turn off previous pixel
mote.set_pixel(self.channel, self.previous_pixel(), 0, 0, 0)
# Check if we are pausing between drips
if self.frames_to_wait > 0:
self.frames_to_wait -= 1
return
# Advance to next pixel
brightness = max_brightness -(2*self.current_pixel)
mote.set_pixel(self.channel, self.current_pixel, brightness, brightness, brightness)
# Advance pixel number, ready for next frame
self.current_pixel = self.next_pixel()
# If the next pixel will be zero, set up a random wait before starting the
# next cycle:
if self.current_pixel == 0:
self.start_random_wait_for_next_drip()
def next_pixel(self, delta = 1):
new_pixel = self.current_pixel + delta
if not self.valid_pixel(new_pixel):
new_pixel -= 16
return new_pixel
def previous_pixel(self, delta = 1):
new_pixel = self.current_pixel - delta
if not self.valid_pixel(new_pixel):
new_pixel += 16
return new_pixel
def valid_pixel(self, pixel):
return pixel >=0 and pixel <= 15
def start_random_wait_for_next_drip(self):
self.frames_to_wait = randint(15, 30)
if __name__ == "__main__":
mote.clear()
icicles = [
Icicle(1),
Icicle(2),
Icicle(3),
Icicle(4)
]
while True:
for icicle in icicles:
icicle.step()
mote.show()
time.sleep(0.2)
|
mit
|
Python
|
9718e6c216b8d5205a19f095593ec099004785a6
|
add app
|
qisanstudio/qstudio-launch,qisanstudio/qstudio-launch
|
src/studio/launch/commands/app_commands.py
|
src/studio/launch/commands/app_commands.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import json
import importlib
from sh import pip
from termcolor import colored
from studio.frame.config import common as common_config
from studio.launch.base import manager
app_manager = manager.subcommand('app')
VASSALS = common_config['UWSGI_EMPEROR']
def _get_app(appname):
try:
module = importlib.import_module(appname)
except ImportError:
print(colored('Can\'t import app %s.' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
for name in dir(module):
app = getattr(module, name)
if hasattr(app, 'config'):
return app
else:
print(colored('Can\'t find app %s\'s entry' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
def _iter_all():
for pkg in pip.freeze():
appname, _ = pkg.split('==')
if 'microsite' == appname:
yield appname
def _get_pkgs():
return [str(pkg.split('==')[0]) for pkg in pip.freeze()]
def _get_appnames():
pkgs = _get_pkgs()
return [pkg[6:] for pkg in pkgs if pkg.startswith('qsapp-')]
def _mk_uwsgi_config(config):
config_d = {}
for k, v in config.items():
if k.startswith('UWSGI_'):
k = k[6:].replace('_', '-')
config_d[k] = v
return config_d
def _register(appname, **config_d):
vassals_dir = VASSALS
try:
os.makedirs(vassals_dir)
except OSError:
pass
uwsgi_cfg = {}
uwsgi_cfg.setdefault('env', []).extend([
# 'STUDIO_ENVIRON=%s' % common_config['ENVIRON'],
'STUDIO_APPNAME=%s' % appname])
uwsgi_cfg.update(config_d)
print('Registering app %s:' % appname, end=' ')
with open(os.path.join(vassals_dir,
'%s.json' % appname), 'wb') as fp:
json.dump({'uwsgi': uwsgi_cfg}, fp)
print(colored('ok', 'green', attrs=['bold']) + '.')
@app_manager.command
def add(*appnames):
_names = _get_appnames()
for appname in appnames:
if appname in _names:
app = _get_app(appname)
config_d = _mk_uwsgi_config(app.config)
_register(appname, **config_d)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import importlib
from sh import pip
from termcolor import colored
from studio.frame.config import common as common_config
from studio.launch.base import manager
app_manager = manager.subcommand('app')
VASSAL = common_config['UWSGI_EMPEROR']
def _get_app(appname):
try:
module = importlib.import_module(appname)
except ImportError:
print(colored('Can\'t import app %s.' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
for name in dir(module):
app = getattr(module, name)
if hasattr(app, 'config'):
return app
else:
print(colored('Can\'t find app %s\'s entry' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
def _iter_all():
for pkg in pip.freeze():
appname, _ = pkg.split('==')
if 'microsite' == appname:
yield appname
def _get_pkgs():
return [str(pkg.split('==')[0]) for pkg in pip.freeze()]
def _get_appnames():
pkgs = _get_pkgs()
return [pkg[6:] for pkg in pkgs if pkg.startswith('qsapp-')]
def _mk_uwsgi_config(config):
conifg_d = {}
for k, v in config.items():
if k.startswith('UWSGI_'):
k = k[6:].replace('_', '-')
conifg_d[k] = v
print(VASSAL)
@app_manager.command
def add(*appnames):
_names = _get_appnames()
for appname in appnames:
if appname in _names:
app = _get_app(appname)
_mk_uwsgi_config(app.config)
|
mit
|
Python
|
6380aabe25e38d198b6c4e10d126d6fd97860c85
|
remove Simple.validate function
|
kolodziej/Flask-PAM,KujiraProject/Flask-PAM,KujiraProject/Flask-PAM,kolodziej/Flask-PAM
|
flask_pam/token/simple.py
|
flask_pam/token/simple.py
|
# -*- coding: utf-8 -*-
from hashlib import sha256
from token import Token
class Simple(Token):
"""Simple token implementation. It's not safe. Only for testing purposes!"""
def generate(self):
return sha256(self.username).hexdigest()
|
# -*- coding: utf-8 -*-
from hashlib import sha256
from token import Token
class Simple(Token):
"""Simple token implementation. It's not safe. Only for testing purposes!"""
def generate(self):
return sha256(self.username).hexdigest()
def validate(self, token):
return sha256(self.username).hexdigest() == token
|
mit
|
Python
|
eb57a07277f86fc90b7845dc48fb5cde1778c8d4
|
Test cut_by_number with words and normal chunk numbers
|
WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos
|
test/unit_test/test_cut_number.py
|
test/unit_test/test_cut_number.py
|
from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
def test_cut_by_number_normal(self):
assert cut_by_number("Text", 1) == ["Text"]
assert cut_by_number("This text has five words", 5) == \
["This ", "text ", "has ", "five ", "words"]
assert cut_by_number("Hanging space ", 2) == ["Hanging ", "space "]
|
from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
|
mit
|
Python
|
c05d0f2dd77678133af1bbf49915aeaf24efbedc
|
simplify line counting method
|
Max00355/HTTPLang
|
httplang/httplang.py
|
httplang/httplang.py
|
import parse
import sys
import utils
import repl
def main():
if len(sys.argv) < 2:
repl.enterREPL()
sys.exit()
inputFile = sys.argv[1]
run(inputFile)
def run(file_):
with open(file_, 'rb') as file:
#pass enumerated file so we can get line numbers starting at 1
parse.preParse(enumerate(file,1))
return utils.baseVariables
if __name__ == "__main__":
main()
|
import parse
import sys
import utils
import repl
def main():
if len(sys.argv) < 2:
repl.enterREPL()
sys.exit()
inputFile = sys.argv[1]
run(inputFile)
def run(file_):
with open(file_, 'rb') as file:
#pass enumerated file so we can get line numbers
parse.preParse(enumerate(file))
return utils.baseVariables
if __name__ == "__main__":
main()
|
mit
|
Python
|
cf84dfda73032a276b2d6f63f2c70f69e61f89fe
|
Check validity of the config to avoid silent errors.
|
delftrobotics/keras-retinanet
|
keras_retinanet/utils/config.py
|
keras_retinanet/utils/config.py
|
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import configparser
import numpy as np
import keras
from ..utils.anchors import AnchorParameters
def read_config_file(config_path):
config = configparser.ConfigParser()
config.read(config_path)
assert os.path.isfile(config_path), "Could not find {}.".format(config_path)
assert 'anchor_parameters' in config, \
"Malformed config file. Verify that it contains the anchor_parameters section."
assert {'sizes', 'strides', 'ratios', 'scales'} <= set(config['anchor_parameters']), \
"Malformed config file. Verify that it contains the following keys: sizes, strides, ratios and scales."
return config
def parse_anchor_parameters(config):
ratios = np.array(list(map(float, config['anchor_parameters']['ratios'].split(' '))), keras.backend.floatx())
scales = np.array(list(map(float, config['anchor_parameters']['scales'].split(' '))), keras.backend.floatx())
sizes = list(map(int, config['anchor_parameters']['sizes'].split(' ')))
strides = list(map(int, config['anchor_parameters']['strides'].split(' ')))
return AnchorParameters(sizes, strides, ratios, scales)
|
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import configparser
import numpy as np
import keras
from ..utils.anchors import AnchorParameters
def read_config_file(config_path):
config = configparser.ConfigParser()
config.read(config_path)
return config
def parse_anchor_parameters(config):
ratios = np.array(list(map(float, config['anchor_parameters']['ratios'].split(' '))), keras.backend.floatx())
scales = np.array(list(map(float, config['anchor_parameters']['scales'].split(' '))), keras.backend.floatx())
sizes = list(map(int, config['anchor_parameters']['sizes'].split(' ')))
strides = list(map(int, config['anchor_parameters']['strides'].split(' ')))
return AnchorParameters(sizes, strides, ratios, scales)
|
apache-2.0
|
Python
|
91e916cb67867db9ce835be28b31904e6efda832
|
Add comment to new test
|
aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy
|
spacy/tests/regression/test_issue1727.py
|
spacy/tests/regression/test_issue1727.py
|
'''Test that models with no pretrained vectors can be deserialized correctly
after vectors are added.'''
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
|
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
|
mit
|
Python
|
3826140004b0686f9f262756da20c5163fc5b80d
|
update icinga_simple format string handling
|
tobes/py3status,valdur55/py3status,vvoland/py3status,Andrwe/py3status,Spirotot/py3status,guiniol/py3status,docwalter/py3status,ultrabug/py3status,valdur55/py3status,ultrabug/py3status,ultrabug/py3status,Shir0kamii/py3status,tobes/py3status,Andrwe/py3status,guiniol/py3status,alexoneill/py3status,valdur55/py3status
|
py3status/modules/icinga_simple.py
|
py3status/modules/icinga_simple.py
|
# -*- coding: utf-8 -*-
"""
Display Icinga2 service status information
Configuration Parameters:
- cache_timeout: how often the data should be updated
- base_url: the base url to the icinga-web2 services list
- disable_acknowledge: enable or disable counting of acknowledged service problems
- user: username to authenticate against the icinga-web2 interface
- password: password to authenticate against the icinga-web2 interface
- format: define a format string like "CRITICAL: %d"
- color: define a color for the output
- status: set the status you want to optain (0=OK,1=WARNING,2=CRITICAL,3=UNKNOWN)
@author Ben Oswald <[email protected]>
@license MIT License <https://opensource.org/licenses/MIT>
@source https://github.com/nazco/i3status-modules
"""
from time import time
import requests
class Py3status:
"""
"""
STATUS_NAMES = {
0: 'OK',
1: 'WARNING',
2: 'CRITICAL',
3: 'UNKNOWN'
}
# available configuration parameters
cache_timeout = 60
base_url = ''
disable_acknowledge = False
url_parameters = "?service_state={service_state}&format=json"
user = ''
password = ''
ca = True
format = '{status_name}: {count}'
color = '#ffffff'
status = 0
def get_status(self, i3s_output_list, i3s_config):
response = {
'color': self.color,
'cached_until': time() + self.cache_timeout,
'full_text': self.format.format(
status_name=self.STATUS_NAMES.get(self.status),
count=self._query_service_count(self.status)
)
}
return response
def _query_service_count(self, state):
if self.disable_acknowledge:
self.url_parameters = self.url_parameters + "&service_handled=0"
result = requests.get(
self.base_url + self.url_parameters.format(service_state=state),
auth=(self.user, self.password), verify=self.ca)
return len(result.json())
if __name__ == "__main__":
pass
|
# -*- coding: utf-8 -*-
"""
Display Icinga2 service status information
Configuration Parameters:
- cache_timeout: how often the data should be updated
- base_url: the base url to the icinga-web2 services list
- disable_acknowledge: enable or disable counting of acknowledged service problems
- user: username to authenticate against the icinga-web2 interface
- password: password to authenticate against the icinga-web2 interface
- format: define a format string like "CRITICAL: %d"
- color: define a color for the output
- status: set the status you want to optain (0=OK,1=WARNING,2=CRITICAL,3=UNKNOWN)
@author Ben Oswald <[email protected]>
@license MIT License <https://opensource.org/licenses/MIT>
@source https://github.com/nazco/i3status-modules
"""
from time import time
import requests
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 60
base_url = ''
disable_acknowledge = False
url_parameters = "?service_state={service_state}&format=json"
user = ''
password = ''
ca = True
format = ''
color = '#ffffff'
status = 0
def get_status(self, i3s_output_list, i3s_config):
response = {
'color': self.color,
'cached_until': time() + self.cache_timeout,
'full_text': self.format % self._query_service_count(self.status)
}
return response
def _query_service_count(self, state):
if self.disable_acknowledge:
self.url_parameters = self.url_parameters + "&service_handled=0"
result = requests.get(
self.base_url + self.url_parameters.format(service_state=state),
auth=(self.user, self.password), verify=self.ca)
return len(result.json())
if __name__ == "__main__":
pass
|
bsd-3-clause
|
Python
|
30e984a0517e6443835f113c3a479aa8302ef14f
|
Update profile url on amazon tests
|
python-social-auth/social-core,python-social-auth/social-core
|
social_core/tests/backends/test_amazon.py
|
social_core/tests/backends/test_amazon.py
|
import json
from .oauth import OAuth2Test
class AmazonOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://api.amazon.com/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'user_id': 'amzn1.account.ABCDE1234',
'email': '[email protected]',
'name': 'Foo Bar'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class AmazonOAuth2BrokenServerResponseTest(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'Request-Id': '02GGTU7CWMNFTV3KH3J6',
'Profile': {
'Name': 'Foo Bar',
'CustomerId': 'amzn1.account.ABCDE1234',
'PrimaryEmail': '[email protected]'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
import json
from .oauth import OAuth2Test
class AmazonOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'user_id': 'amzn1.account.ABCDE1234',
'email': '[email protected]',
'name': 'Foo Bar'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class AmazonOAuth2BrokenServerResponseTest(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'Request-Id': '02GGTU7CWMNFTV3KH3J6',
'Profile': {
'Name': 'Foo Bar',
'CustomerId': 'amzn1.account.ABCDE1234',
'PrimaryEmail': '[email protected]'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
bsd-3-clause
|
Python
|
a85019e7c5e117467d0ce3bf30b9a7589cd17958
|
Update create_test_cutout
|
seung-lab/Julimaps,seung-lab/Julimaps
|
src/tasks/python/create_test_cutout.py
|
src/tasks/python/create_test_cutout.py
|
from cloudvolume import CloudVolume
image_in = 'gs://neuroglancer/pinky100_v0/image_single_slices'
image_out = 'gs://neuroglancer/pinky100_v0/test_image'
image_mip = 0
roi_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/roicc'
roi_out = 'gs://neuroglancer/pinky100_v0/test_image/roicc'
roi_mip = 6
cfsplit_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfsplit'
cfsplit_out = 'gs://neuroglancer/pinky100_v0/test_image/cfsplit'
cfsplit_mip = 2
cfmanual_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfmanual'
cfmanual_out = 'gs://neuroglancer/pinky100_v0/test_image/cfmanual'
cfmanual_mip = 5
match_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/nccnet'
match_out = 'gs://neuroglancer/pinky100_v0/test_image/nccnet'
match_mip = 2
dst_in = 'gs://neuroglancer/pinky100_v0/aligned_test_v5'
dst_mip = 0
src_dst = [(cfmanual_in, cfmanual_out, cfmanual_mip)]
z_slice = slice(199, 208)
src_mip = 0
def scale_slice(s, src_mip, dst_mip):
scale = 1/2**(dst_mip - src_mip)
return slice(int(s.start*scale), int(s.stop*scale))
def scale_slices(x_slice, y_slice, z_slice, src_mip, dst_mip):
return (scale_slice(x_slice, src_mip, dst_mip),
scale_slice(y_slice, src_mip, dst_mip),
scale_slice(z_slice, src_mip, dst_mip))
def get_cloudvolume(path, mip):
return CloudVolume(path, mip=mip)
def update_info_mips(cv, no_of_mips=6):
print("updating info mips")
for mip in range(1,no_of_mips+1):
factor = (2**mip, 2**mip, 1)
cv.add_scale(factor)
cv.commit_info()
def get_xy_slice(cv):
o = cv.voxel_offset
s = cv.shape
return slice(o[0], o[0]+s[0]), slice(o[1], o[1]+s[1])
for (src_path, dst_path, mip) in src_dst:
print(src_path)
print(dst_path)
print(mip)
cv = get_cloudvolume(dst_path, 0)
update_info_mips(cv, 6)
dst_cv = get_cloudvolume(dst_path, mip)
src_cv = get_cloudvolume(src_path, mip)
sl = get_xy_slice(dst_cv) + (z_slice,)
print(sl)
dst_cv[sl] = src_cv[sl]
|
from cloudvolume import CloudVolume
image_in = 'gs://neuroglancer/pinky100_v0/image_single_slices'
image_out = 'gs://neuroglancer/pinky100_v0/test_image'
image_mip = 0
roi_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/roicc'
roi_out = 'gs://neuroglancer/pinky100_v0/test_image/roicc'
roi_mip = 6
cfsplit_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfsplit'
cfsplit_out = 'gs://neuroglancer/pinky100_v0/test_image/cfsplit'
cfsplit_mip = 2
match_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/nccnet'
match_out = 'gs://neuroglancer/pinky100_v0/test_image/nccnet'
match_mip = 2
dst_in = 'gs://neuroglancer/pinky100_v0/aligned_test_v5'
dst_mip = 0
src_dst = [(cfsplit_in, cfsplit_out, cfsplit_mip),
(match_in, match_out, match_mip)]
z_slice = slice(199, 208)
src_mip = 0
def scale_slice(s, src_mip, dst_mip):
scale = 1/2**(dst_mip - src_mip)
return slice(int(s.start*scale), int(s.stop*scale))
def scale_slices(x_slice, y_slice, z_slice, src_mip, dst_mip):
return (scale_slice(x_slice, src_mip, dst_mip),
scale_slice(y_slice, src_mip, dst_mip),
scale_slice(z_slice, src_mip, dst_mip))
def get_cloudvolume(path, mip):
return CloudVolume(path, mip=mip)
def update_info_mips(cv, no_of_mips=6):
print("updating info mips")
for mip in range(1,no_of_mips+1):
factor = (2**mip, 2**mip, 1)
cv.add_scale(factor)
cv.commit_info()
def get_xy_slice(cv):
o = cv.voxel_offset
s = cv.shape
return slice(o[0], o[0]+s[0]), slice(o[1], o[1]+s[1])
for (src_path, dst_path, mip) in src_dst:
print(src_path)
print(dst_path)
print(mip)
cv = get_cloudvolume(dst_path, 0)
update_info_mips(cv, 6)
dst_cv = get_cloudvolume(dst_path, mip)
src_cv = get_cloudvolume(src_path, mip)
sl = get_xy_slice(dst_cv) + (z_slice,)
print(sl)
dst_cv[sl] = src_cv[sl]
|
mit
|
Python
|
6ae4f3a71a80d7fe5bb1abe6925a05c4fe811f3c
|
bump version
|
simas/django-forms-builder,simas/django-forms-builder
|
forms_builder/__init__.py
|
forms_builder/__init__.py
|
__version__ = "9.7.16"
|
__version__ = "0.12.2"
|
bsd-2-clause
|
Python
|
a58646ee72fc894a2f2b885b242cc283a0addd7c
|
remove args
|
derwolfe/teiler,derwolfe/teiler
|
src/app.py
|
src/app.py
|
import argparse
import os
from actions import server, client
# the main entry point for the application
# for simplicity, let's decide that the user decides at runtime to listen
# and the server decides to serve
# location from which files should be served
app_directory = '/home/chris/blaster'
def main():
# get the arguments
parser = argparse.ArgumentParser(description="Exchange files!")
parser.add_argument('action',
help="To be the server, type serve; to be the client, type listen",
)
args = parser.parse_args()
app_runner(args.action)
def app_runner(how):
if how == "serve":
if os.path.exists(app_directory) == False:
os.mkdir(app_directory)
server.main(app_directory)
elif how == "listen":
client.main()
else:
return u'Please specify either listen or serve'
if __name__ == '__main__':
main()
|
import argparse
import os
from actions import server, client
# the main entry point for the application
# for simplicity, let's decide that the user decides at runtime to listen
# and the server decides to serve
# location from which files should be served
app_directory = '/home/chris/blaster'
def main():
# get the arguments
parser = argparse.ArgumentParser(description="Exchange files!")
parser.add_argument('action',
help="To be the server, type serve; to be the client, type listen",
)
# parser.add_argument('directory',
# help="The top level directory from which to serve files, e.g. '~/Downloads'",
# )
args = parser.parse_args()
app_runner(args.action) #, args.directory)
def app_runner(how):
if how == "serve":
if os.path.exists(app_directory) == False:
os.mkdir(app_directory)
server.main(app_directory)
elif how == "listen":
client.main()
else:
return u'Please specify either listen or serve'
if __name__ == '__main__':
main()
|
mit
|
Python
|
0f216b43f42ebabedda701fafefe271a223798cb
|
Fix mcscf example
|
gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf
|
examples/mcscf/41-mcscf_with_given_densityfit_ints.py
|
examples/mcscf/41-mcscf_with_given_densityfit_ints.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
import tempfile
import h5py
from pyscf import gto, df, scf, mcscf
'''
Input Cholesky decomposed integrals for CASSCF
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
#
# Integrals in memory. The size of the integral array is (M,N*(N+1)/2), where
# the last two AO indices are compressed due to the symmetry
#
int3c = df.incore.cholesky_eri(mol, auxbasis='ccpvdz-fit')
mf = scf.density_fit(scf.RHF(mol))
mf.with_df._cderi = int3c
mf.kernel()
# 3-cetner DF or Cholesky decomposed integrals need to be initialized once in
# mf.with_df._cderi. DFCASSCF method automatically use the approximate integrals
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
#
# Integrals on disk
#
ftmp = tempfile.NamedTemporaryFile()
df.outcore.cholesky_eri(mol, ftmp.name, auxbasis='ccpvdz-fit')
with h5py.File(ftmp.name, 'r') as file1:
mf = scf.density_fit(scf.RHF(mol))
# Note, here the integral object file1['eri_mo'] are not loaded in memory.
# It is still the HDF5 array object held on disk. The HDF5 array can be used
# the same way as the regular numpy ndarray stored in memory.
mf.with_df._cderi = file1['eri_mo']
mf.kernel()
# Note the mc object must be put inside the "with" statement block because it
# still needs access the HDF5 integral array on disk
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
|
#!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
import tempfile
import h5py
from pyscf import gto, df, scf, mcscf
'''
Input Cholesky decomposed integrals for CASSCF
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
#
# Integrals in memory. The size of the integral array is (M,N*(N+1)/2), where
# the last two AO indices are compressed due to the symmetry
#
int3c = df.incore.cholesky_eri(mol, auxbasis='ccpvdz-fit')
mf = scf.density_fit(scf.RHF(mol))
mf._cderi = int3c
mf.kernel()
# 3-cetner DF or Cholesky decomposed integrals need to be initialized once in
# mf._cderi. DFCASSCF method automatically use the approximate integrals
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
#
# Integrals on disk
#
ftmp = tempfile.NamedTemporaryFile()
df.outcore.cholesky_eri(mol, ftmp.name, auxbasis='ccpvdz-fit')
with h5py.File(ftmp.name, 'r') as file1:
mf = scf.density_fit(scf.RHF(mol))
# Note, here the integral object file1['eri_mo'] are not loaded in memory.
# It is still the HDF5 array object held on disk. The HDF5 array can be used
# the same way as the regular numpy ndarray stored in memory.
mf._cderi = file1['eri_mo']
mf.kernel()
# Note the mc object must be put inside the "with" statement block because it
# still needs access the HDF5 integral array on disk
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
|
apache-2.0
|
Python
|
9a4f1da48e72627aa0ff358a3dafe8bb5639482a
|
refresh access token on each verification
|
rjw57/componentsdb,rjw57/componentsdb,rjw57/componentsdb
|
componentsdb/ui.py
|
componentsdb/ui.py
|
"""
Traditional Web UI.
"""
from functools import wraps
from flask import (
Blueprint, redirect, url_for, render_template, request, session, g
)
from werkzeug.exceptions import BadRequest, Unauthorized
from componentsdb.app import set_current_user_with_token
from componentsdb.auth import user_for_google_id_token
ui = Blueprint(
'ui', __name__, template_folder='ui/templates', static_folder='ui/static',
static_url_path='/ui_static',
)
AUTH_TOKEN_SESSION_KEY = 'componentsdb_auth'
def try_verify_session():
"""Like verify_session but return a boolean indicating success rather than
raising an exception."""
try:
verify_session()
except Unauthorized:
return False
return True
def verify_session():
"""Verify the authorisation in the current session. Raises Unauthorized if
the session is not authorised. Sets current_user if the session is
authorised.
"""
t = session.get(AUTH_TOKEN_SESSION_KEY)
if t is None:
raise Unauthorized('no user token provided')
set_current_user_with_token(t)
# Update the token in the session to make sure that the user always has a
# good long expiry windows
session[AUTH_TOKEN_SESSION_KEY] = g.current_user.token
def auth_or_signin(f):
"""Decorator for a view which re-directs to the sign in page if there is no
current user. The sign in page is given a query string which requests the
current URL as the redirect."""
@wraps(f)
def view(*args, **kwargs):
if not try_verify_session():
return redirect(url_for('ui.signin', target=request.url))
return f(*args, **kwargs)
return view
@ui.route('/')
@auth_or_signin
def index():
return render_template('index.html')
@ui.route('/auth/signin')
def signin():
redir_url = request.args.get('target', url_for('ui.index'))
# Already signed in?
if try_verify_session():
return redirect(redir_url)
# Have we been given a token?
token = request.args.get('token', None)
if token is not None:
set_current_user_with_token(token)
return redirect(redir_url)
# Show sign in
return render_template('signin.html')
@ui.route('/auth/google')
def signin_with_google_token():
redir_url = request.args.get('target', url_for('ui.index'))
token = request.args.get('token', None)
if token is None:
raise BadRequest('no token given')
# Get auth token and add to session
user = user_for_google_id_token(request.args['token'])
session[AUTH_TOKEN_SESSION_KEY] = user.token
return redirect(redir_url)
@ui.route('/auth/signout')
def signout():
redir_url = request.args.get('target', url_for('ui.index'))
# Clear token from user session
del session[AUTH_TOKEN_SESSION_KEY]
return redirect(redir_url)
|
"""
Traditional Web UI.
"""
from functools import wraps
from flask import (
Blueprint, redirect, url_for, render_template, request, session
)
from werkzeug.exceptions import BadRequest, Unauthorized
from componentsdb.app import set_current_user_with_token
from componentsdb.auth import user_for_google_id_token
ui = Blueprint(
'ui', __name__, template_folder='ui/templates', static_folder='ui/static',
static_url_path='/ui_static',
)
AUTH_TOKEN_SESSION_KEY = 'componentsdb_auth'
def try_verify_session():
"""Like verify_session but return a boolean indicating success rather than
raising an exception."""
try:
verify_session()
except Unauthorized:
return False
return True
def verify_session():
"""Verify the authorisation in the current session. Raises Unauthorized if
the session is not authorised. Sets current_user if the session is
authorised.
"""
t = session.get(AUTH_TOKEN_SESSION_KEY)
if t is None:
raise Unauthorized('no user token provided')
set_current_user_with_token(t)
def auth_or_signin(f):
"""Decorator for a view which re-directs to the sign in page if there is no
current user. The sign in page is given a query string which requests the
current URL as the redirect."""
@wraps(f)
def view(*args, **kwargs):
if not try_verify_session():
return redirect(url_for('ui.signin', target=request.url))
return f(*args, **kwargs)
return view
@ui.route('/')
@auth_or_signin
def index():
return render_template('index.html')
@ui.route('/auth/signin')
def signin():
redir_url = request.args.get('target', url_for('ui.index'))
# Already signed in?
if try_verify_session():
return redirect(redir_url)
# Have we been given a token?
token = request.args.get('token', None)
if token is not None:
set_current_user_with_token(token)
return redirect(redir_url)
# Show sign in
return render_template('signin.html')
@ui.route('/auth/google')
def signin_with_google_token():
redir_url = request.args.get('target', url_for('ui.index'))
token = request.args.get('token', None)
if token is None:
raise BadRequest('no token given')
# Get auth token and add to session
user = user_for_google_id_token(request.args['token'])
session[AUTH_TOKEN_SESSION_KEY] = user.token
return redirect(redir_url)
@ui.route('/auth/signout')
def signout():
redir_url = request.args.get('target', url_for('ui.index'))
# Clear token from user session
del session[AUTH_TOKEN_SESSION_KEY]
return redirect(redir_url)
|
mit
|
Python
|
becef09e0680786343c581d984e7de5dcb961d16
|
Fix for handle failed html parse
|
yashodhank/frappe,frappe/frappe,frappe/frappe,almeidapaulopt/frappe,vjFaLk/frappe,adityahase/frappe,frappe/frappe,mhbu50/frappe,adityahase/frappe,saurabh6790/frappe,adityahase/frappe,saurabh6790/frappe,mhbu50/frappe,StrellaGroup/frappe,yashodhank/frappe,saurabh6790/frappe,vjFaLk/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,yashodhank/frappe,vjFaLk/frappe,almeidapaulopt/frappe,saurabh6790/frappe,yashodhank/frappe,mhbu50/frappe,StrellaGroup/frappe,adityahase/frappe,mhbu50/frappe,StrellaGroup/frappe,vjFaLk/frappe
|
frappe/utils/xlsxutils.py
|
frappe/utils/xlsxutils.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import openpyxl
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
from six import BytesIO, string_types
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None):
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri',bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, string_types) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, string_types) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data:
return data
if '>' not in data:
return data
from html2text import HTML2Text
h = HTML2Text()
h.unicode_snob = True
h = h.unescape(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return data
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_id=None, fcontent=None, filepath=None):
if file_id:
from frappe.utils.file_manager import get_file_path
filename = get_file_path(file_id)
elif fcontent:
from io import BytesIO
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import openpyxl
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
from six import BytesIO, string_types
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None):
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri',bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, string_types) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, string_types) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data:
return data
if '>' not in data:
return data
from html2text import HTML2Text
h = HTML2Text()
h.unicode_snob = True
h = h.unescape(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return value
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_id=None, fcontent=None, filepath=None):
if file_id:
from frappe.utils.file_manager import get_file_path
filename = get_file_path(file_id)
elif fcontent:
from io import BytesIO
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
|
mit
|
Python
|
f5a1e7f8e350a5f1b29c0e60caf178208946a2b1
|
Add more samples.
|
precompiler/python-101
|
learning-python/ch02/Looping.py
|
learning-python/ch02/Looping.py
|
for i in [1, 2, 3, 4]:
print(i)
for i in range(5):
print(i)
colors = ["red", "green", "blue"]
for i in range(len(colors)):
print(i, colors[i])
for color in colors:
print(color)
for idx, color in enumerate(colors):
print(idx, color)
people = ["Scott", "John", "Mike"]
ages = [50, 30, 25]
for person, age in zip(people, ages):
print(person, age)
for data in zip(people, ages):
print(data)
arr = [1, 2, 3, 4, 5, 6]
print(arr)
arr = arr[::-1]
print(arr)
empid = 2
class NoResourceFoundException(Exception):
pass
emps = {1: "Scott", 2: "John", 3: "Tiger"}
for emp in emps.items():
if emp.__contains__(empid):
print("Found")
break
else:
raise NoResourceFoundException("Not found")
from itertools import count
for n in count(5, 3):
if n > 20:
break
print(n, end=", ")
print()
from itertools import compress
ret = compress("abcdefg", (1, 0, 1, 1))
for x in ret:
print(x)
data = range(10)
even = [1, 0] * 10
odd = [0, 1] * 10
evenNumbers = compress(data, even)
oddNumbers = compress(data, odd)
print(list(data))
print(list(evenNumbers))
print(list(oddNumbers))
from itertools import permutations, combinations
print(list(permutations("ABC")))
print(list(combinations("ABC", 2)))
|
for i in [1, 2, 3, 4]:
print(i)
for i in range(5):
print(i)
colors = ["red", "green", "blue"]
for i in range(len(colors)):
print(i, colors[i])
for color in colors:
print(color)
for idx, color in enumerate(colors):
print(idx, color)
people = ["Scott", "John", "Mike"]
ages = [50, 30, 25]
for person, age in zip(people, ages):
print(person, age)
for data in zip(people, ages):
print(data)
|
apache-2.0
|
Python
|
c0da9801f726ab3ac5c360f77598f1d14c615c2e
|
make sure windrose_utils._make_plot gets exercised!
|
akrherz/pyIEM
|
pyiem/tests/test_windrose_utils.py
|
pyiem/tests/test_windrose_utils.py
|
import unittest
import datetime
import psycopg2
from pyiem.windrose_utils import windrose, _get_timeinfo
class Test(unittest.TestCase):
def test_timeinfo(self):
"""Exercise the _get_timeinfo method"""
res = _get_timeinfo(range(1, 10), 'hour', 24)
self.assertEquals(res['labeltext'], '(1, 2, 3, 4, 5, 6, 7, 8, 9)')
res = _get_timeinfo([1], 'month', 1)
self.assertEquals(res['sqltext'],
' and extract(month from valid) = 1 ')
def test_windrose(self):
"""Exercise the windrose code"""
pgconn = psycopg2.connect(database='asos', host="iemdb")
cursor = pgconn.cursor()
v = datetime.datetime(2015, 1, 1, 6)
for s in range(100):
v += datetime.timedelta(hours=1)
cursor.execute("""INSERT into t2015(station, valid, sknt, drct)
VALUES (%s, %s, %s, %s)""", ('AMW2', v, s, s))
fig = windrose('AMW2', cursor=cursor, sname='Ames')
self.assertTrue(fig is not None)
fig = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2001, 1, 1),
ets=datetime.datetime(2016, 1, 1))
# fig.savefig('/tmp/test_plot_windrose.png')
self.assertTrue(fig is not None)
res = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2015, 1, 1),
ets=datetime.datetime(2015, 10, 2), justdata=True)
assert isinstance(res, str)
|
import unittest
import datetime
import psycopg2
from pyiem.windrose_utils import windrose, _get_timeinfo
class Test(unittest.TestCase):
def test_timeinfo(self):
"""Exercise the _get_timeinfo method"""
res = _get_timeinfo(range(1, 10), 'hour', 24)
self.assertEquals(res['labeltext'], '(1, 2, 3, 4, 5, 6, 7, 8, 9)')
res = _get_timeinfo([1], 'month', 1)
self.assertEquals(res['sqltext'],
' and extract(month from valid) = 1 ')
def test_windrose(self):
"""Exercise the windrose code"""
pgconn = psycopg2.connect(database='asos', host="iemdb")
cursor = pgconn.cursor()
v = datetime.datetime(2015, 1, 1, 6)
for s in range(100):
v += datetime.timedelta(hours=1)
cursor.execute("""INSERT into t2015(station, valid, sknt, drct)
VALUES (%s, %s, %s, %s)""", ('AMW2', v, s, s))
# plot.windrose('AMW2', fp='/tmp/test_plot_windrose.png',
# cursor=cursor)
fig = windrose('AMW2',
cursor=cursor, justdata=True)
self.assertTrue(fig is not None)
fig = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2001, 1, 1),
ets=datetime.datetime(2001, 1, 2))
self.assertTrue(fig is not None)
res = windrose('AMW2',
cursor=cursor, sts=datetime.datetime(2015, 1, 1),
ets=datetime.datetime(2015, 10, 2), justdata=True)
assert isinstance(res, str)
|
mit
|
Python
|
cf35695481b703e49fbc00e286ef6380a8aec394
|
Remove invalid test
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/apps/notifications/tests/test_views.py
|
corehq/apps/notifications/tests/test_views.py
|
from unittest.mock import patch
from corehq.apps.accounting.models import Subscription
from corehq.apps.groups.models import Group
from ..views import NotificationsServiceRMIView
def test_should_hide_feature_notifs_for_pro_with_groups():
with case_sharing_groups_patch(['agroupid']):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for pro domain with groups"
def test_should_hide_feature_notifs_for_pro_without_groups():
with case_sharing_groups_patch([]), active_service_type_patch("not_IMPLEMENTATION_or_SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert not hide, "notifications should not be hidden for pro domain without groups"
def test_should_hide_feature_notifs_for_implementation_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("IMPLEMENTATION"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for IMPLEMENTATION subscription"
def test_should_hide_feature_notifs_for_sandbox_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for SANDBOX subscription"
def test_should_hide_feature_notifs_bug():
with case_sharing_groups_patch([]), active_service_type_patch():
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", None)
assert not hide, "notifications should not be hidden for null subscription"
def active_service_type_patch(service_type=None):
def getter(domain):
return sub
sub = None if service_type is None else Subscription(service_type=service_type)
return patch.object(Subscription, "get_active_subscription_by_domain", getter)
def case_sharing_groups_patch(groups):
# patch because quickcache makes this hard to test
def getter(domain, wrap):
assert not wrap, "expected wrap to be false"
return groups
return patch.object(Group, "get_case_sharing_groups", getter)
|
from unittest.mock import patch
from corehq.apps.accounting.models import Subscription
from corehq.apps.groups.models import Group
from ..views import NotificationsServiceRMIView
def test_should_hide_feature_notifs_for_pro_with_groups():
with case_sharing_groups_patch(['agroupid']):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for pro domain with groups"
def test_should_hide_feature_notifs_for_pro_without_groups():
with case_sharing_groups_patch([]), active_service_type_patch("not_IMPLEMENTATION_or_SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert not hide, "notifications should not be hidden for pro domain without groups"
def test_should_hide_feature_notifs_for_non_pro_with_groups():
with case_sharing_groups_patch(['agroupid']), active_service_type_patch("not_IMPLEMENTATION_or_SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", None)
assert not hide, "notifications should not be hidden for pro domain without groups"
def test_should_hide_feature_notifs_for_implementation_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("IMPLEMENTATION"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for IMPLEMENTATION subscription"
def test_should_hide_feature_notifs_for_sandbox_subscription():
with case_sharing_groups_patch([]), active_service_type_patch("SANDBOX"):
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", "pro")
assert hide, "notifications should be hidden for SANDBOX subscription"
def test_should_hide_feature_notifs_bug():
with case_sharing_groups_patch([]), active_service_type_patch():
hide = NotificationsServiceRMIView._should_hide_feature_notifs("test", None)
assert not hide, "notifications should not be hidden for null subscription"
def active_service_type_patch(service_type=None):
def getter(domain):
return sub
sub = None if service_type is None else Subscription(service_type=service_type)
return patch.object(Subscription, "get_active_subscription_by_domain", getter)
def case_sharing_groups_patch(groups):
# patch because quickcache makes this hard to test
def getter(domain, wrap):
assert not wrap, "expected wrap to be false"
return groups
return patch.object(Group, "get_case_sharing_groups", getter)
|
bsd-3-clause
|
Python
|
9df00bbfa829006396c2a6718e4540410b27c4c6
|
Clear the job queue upon kolibri initialization.
|
MingDai/kolibri,mrpau/kolibri,benjaoming/kolibri,DXCanas/kolibri,lyw07/kolibri,learningequality/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,rtibbles/kolibri,DXCanas/kolibri,indirectlylit/kolibri,MingDai/kolibri,lyw07/kolibri,mrpau/kolibri,christianmemije/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,christianmemije/kolibri,christianmemije/kolibri,lyw07/kolibri,lyw07/kolibri,benjaoming/kolibri,jonboiser/kolibri,jonboiser/kolibri,indirectlylit/kolibri,christianmemije/kolibri,benjaoming/kolibri,learningequality/kolibri,rtibbles/kolibri,learningequality/kolibri,benjaoming/kolibri,MingDai/kolibri,indirectlylit/kolibri,MingDai/kolibri,indirectlylit/kolibri,rtibbles/kolibri,jonboiser/kolibri,DXCanas/kolibri
|
kolibri/tasks/apps.py
|
kolibri/tasks/apps.py
|
from __future__ import absolute_import, print_function, unicode_literals
from django.apps import AppConfig
class KolibriTasksConfig(AppConfig):
name = 'kolibri.tasks'
label = 'kolibritasks'
verbose_name = 'Kolibri Tasks'
def ready(self):
from kolibri.tasks.api import client
client.clear(force=True)
|
from __future__ import absolute_import, print_function, unicode_literals
from django.apps import AppConfig
class KolibriTasksConfig(AppConfig):
name = 'kolibri.tasks'
label = 'kolibritasks'
verbose_name = 'Kolibri Tasks'
def ready(self):
pass
|
mit
|
Python
|
6a6cb75ad2c29435d74768aa88c5d925570a6ad0
|
Add some meta
|
ligthyear/flask-environments,mattupstate/flask-environments
|
flask_environments.py
|
flask_environments.py
|
# -*- coding: utf-8 -*-
"""
flask_environments
~~~~~~~~~~~~~~~~~~
Environment tools and configuration for Flask applications
:copyright: (c) 2012 by Matt Wright.
:license: MIT, see LICENSE for more details.
"""
import os
import yaml
from flask import current_app
class Environments(object):
def __init__(self, app=None, var_name=None, default_env=None):
self.app = app
self.var_name = var_name or 'FLASK_ENV'
self.default_env = default_env or 'DEVELOPMENT'
self.env = os.environ.get(self.var_name, self.default_env)
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config['ENVIORNMENT'] = self.env
if app.extensions is None:
app.extensions = {}
app.extensions['environments'] = self
def get_app(self, reference_app=None):
if reference_app is not None:
return reference_app
if self.app is not None:
return self.app
return current_app
def from_object(self, config_obj):
app = self.get_app()
for name in self._possible_names():
try:
obj = '%s.%s' % (config_obj, name)
app.config.from_object(obj)
return
except:
pass
app.config.from_object(config_obj)
def from_yaml(self, path):
with open(path) as f:
c = yaml.load(f)
for name in self._possible_names():
try:
c = c[name]
except:
pass
app = self.get_app()
for key in c.iterkeys():
if key.isupper():
app.config[key] = c[key]
def _possible_names(self):
return (self.env, self.env.capitalize(), self.env.lower())
|
import os
import yaml
from flask import current_app
class Environments(object):
def __init__(self, app=None, var_name=None, default_env=None):
self.app = app
self.var_name = var_name or 'FLASK_ENV'
self.default_env = default_env or 'DEVELOPMENT'
self.env = os.environ.get(self.var_name, self.default_env)
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config['ENVIORNMENT'] = self.env
if app.extensions is None:
app.extensions = {}
app.extensions['environments'] = self
def get_app(self, reference_app=None):
if reference_app is not None:
return reference_app
if self.app is not None:
return self.app
return current_app
def from_object(self, config_obj):
app = self.get_app()
for name in self._possible_names():
try:
obj = '%s.%s' % (config_obj, name)
app.config.from_object(obj)
return
except:
pass
app.config.from_object(config_obj)
def from_yaml(self, path):
with open(path) as f:
c = yaml.load(f)
for name in self._possible_names():
try:
c = c[name]
except:
pass
app = self.get_app()
for key in c.iterkeys():
if key.isupper():
app.config[key] = c[key]
def _possible_names(self):
return (self.env, self.env.capitalize(), self.env.lower())
|
mit
|
Python
|
bb5cbae79ef8efb8d0b7dd3ee95e76955317d3d7
|
Fix for broken container security test
|
tenable/Tenable.io-SDK-for-Python
|
tests/integration/api/test_sc_test_jobs.py
|
tests/integration/api/test_sc_test_jobs.py
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
|
mit
|
Python
|
6594bb843998ee22b0a12036a0e16c1fd625fd03
|
Revert "Catch Validation error"
|
khchine5/django-shop,khchine5/django-shop,jrief/django-shop,rfleschenberg/django-shop,jrief/django-shop,nimbis/django-shop,nimbis/django-shop,divio/django-shop,awesto/django-shop,rfleschenberg/django-shop,jrief/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,rfleschenberg/django-shop,nimbis/django-shop,khchine5/django-shop,rfleschenberg/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,awesto/django-shop
|
shop/context_processors.py
|
shop/context_processors.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from shop.models.customer import CustomerModel
def customer(request):
"""
Add the customer to the RequestContext
"""
msg = "The request object does not contain a customer. Edit your MIDDLEWARE_CLASSES setting to insert 'shop.middlerware.CustomerMiddleware'."
assert hasattr(request, 'customer'), msg
context = {
'customer': request.customer,
'site_header': settings.SHOP_APP_LABEL.capitalize(),
}
if request.user.is_staff:
try:
context.update(customer=CustomerModel.objects.get(pk=request.session['emulate_user_id']))
except (CustomerModel.DoesNotExist, KeyError, AttributeError):
pass
return context
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.forms.utils import ValidationError
from shop.models.customer import CustomerModel
def customer(request):
"""
Add the customer to the RequestContext
"""
msg = "The request object does not contain a customer. Edit your MIDDLEWARE_CLASSES setting to insert 'shop.middlerware.CustomerMiddleware'."
assert hasattr(request, 'customer'), msg
context = {
'customer': request.customer,
'site_header': settings.SHOP_APP_LABEL.capitalize(),
}
if request.user.is_staff:
try:
context.update(customer=CustomerModel.objects.get(pk=request.session['emulate_user_id']))
except (CustomerModel.DoesNotExist, KeyError, AttributeError, ValidationError):
pass
return context
|
bsd-3-clause
|
Python
|
0cb7f9c41c7ae0a7f487188721f56adf2ff9999d
|
add type hints.
|
jonhadfield/acli,jonhadfield/acli
|
lib/acli/services/route53.py
|
lib/acli/services/route53.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from boto3.session import Session
from acli.output.route53 import (output_route53_list, output_route53_info)
import botocore.exceptions
def get_boto3_session(aws_config):
"""
@type aws_config: Config
"""
return Session(region_name=aws_config.region,
aws_access_key_id=aws_config.access_key_id,
aws_secret_access_key=aws_config.secret_access_key)
def route53_list(aws_config=None):
"""
@type aws_config: Config
"""
session = get_boto3_session(aws_config)
conn = session.client('route53')
output_route53_list(output_media='console', zones=conn.list_hosted_zones())
def route53_info(aws_config=None, zone_id=None):
"""
@type aws_config: Config
@type zone_id: unicode
"""
session = get_boto3_session(aws_config)
conn = session.client('route53')
try:
hosted_zone = conn.get_hosted_zone(Id=zone_id)
record_sets = conn.list_resource_record_sets(HostedZoneId=zone_id)
if hosted_zone['HostedZone']['Id']:
output_route53_info(output_media='console',
zone=hosted_zone,
record_sets=record_sets)
except AttributeError:
exit("Cannot find hosted zone: {0}".format(zone_id))
except botocore.exceptions.ClientError:
exit("Cannot request hosted zone: {0}".format(zone_id))
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, print_function, unicode_literals)
from boto3.session import Session
from acli.output.route53 import (output_route53_list, output_route53_info)
import botocore.exceptions
def get_boto3_session(aws_config):
return Session(region_name=aws_config.region,
aws_access_key_id=aws_config.access_key_id,
aws_secret_access_key=aws_config.secret_access_key)
def route53_list(aws_config=None):
session = get_boto3_session(aws_config)
conn = session.client('route53')
output_route53_list(output_media='console', zones=conn.list_hosted_zones())
def route53_info(aws_config=None, zone_id=None):
"""
@type aws_config: Config
@type zone_id: unicode
"""
session = get_boto3_session(aws_config)
conn = session.client('route53')
try:
hosted_zone = conn.get_hosted_zone(Id=zone_id)
record_sets = conn.list_resource_record_sets(HostedZoneId=zone_id)
if hosted_zone['HostedZone']['Id']:
output_route53_info(output_media='console',
zone=hosted_zone,
record_sets=record_sets)
except AttributeError:
exit("Cannot find hosted zone: {0}".format(zone_id))
except botocore.exceptions.ClientError:
exit("Cannot request hosted zone: {0}".format(zone_id))
|
mit
|
Python
|
b747391c748c94cd8433dfacd935d131b484a29c
|
Improve error handling and refactor base path
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
java/ql/src/utils/model-generator/RegenerateModels.py
|
java/ql/src/utils/model-generator/RegenerateModels.py
|
#!/usr/bin/python3
# Tool to regenerate existing framework CSV models.
from pathlib import Path
import json
import os
import requests
import shutil
import subprocess
import tempfile
import sys
defaultModelPath = "java/ql/lib/semmle/code/java/frameworks"
lgtmSlugToModelFile = {
# "apache/commons-beanutils": "apache/BeanUtilsGenerated.qll",
# "apache/commons-codec": "apache/CodecGenerated.qll",
# "apache/commons-lang": "apache/Lang3Generated.qll",
"apache/commons-io": "apache/IOGenerated.qll",
}
def findGitRoot():
return subprocess.check_output(
["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()
def regenerateModel(lgtmSlug, extractedDb):
tmpDir = tempfile.mkdtemp()
print("============================================================")
print("Generating models for " + lgtmSlug)
print("============================================================")
# check if lgtmSlug exists as key
if lgtmSlug not in lgtmSlugToModelFile:
print("ERROR: slug " + lgtmSlug +
" is not mapped to a model file in script " + sys.argv[0])
sys.exit(1)
modelFile = defaultModelPath + \
lgtmSlugToModelFile[lgtmSlug]
codeQlRoot = findGitRoot()
targetModel = codeQlRoot + "/" + modelFile
subprocess.check_call([codeQlRoot + "/java/ql/src/utils/model-generator/GenerateFlowModel.py", extractedDb,
targetModel])
print("Regenerated " + targetModel)
shutil.rmtree(tmpDir)
if len(sys.argv) == 3:
lgtmSlug = sys.argv[1]
db = sys.argv[2]
regenerateModel(lgtmSlug, db)
else:
print('error')
|
#!/usr/bin/python3
# Tool to regenerate existing framework CSV models.
from pathlib import Path
import json
import os
import requests
import shutil
import subprocess
import tempfile
import sys
lgtmSlugToModelFile = {
# "apache/commons-beanutils": "java/ql/lib/semmle/code/java/frameworks/apache/BeanUtilsGenerated.qll",
# "apache/commons-codec": "java/ql/lib/semmle/code/java/frameworks/apache/CodecGenerated.qll",
# "apache/commons-lang": "java/ql/lib/semmle/code/java/frameworks/apache/Lang3Generated.qll",
"apache/commons-io": "java/ql/lib/semmle/code/java/frameworks/apache/IOGenerated.qll",
}
def findGitRoot():
return subprocess.check_output(
["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()
def regenerateModel(lgtmSlug, extractedDb):
tmpDir = tempfile.mkdtemp()
print("============================================================")
print("Generating models for " + lgtmSlug)
print("============================================================")
modelFile = lgtmSlugToModelFile[lgtmSlug]
codeQlRoot = findGitRoot()
targetModel = codeQlRoot + "/" + modelFile
subprocess.check_call([codeQlRoot + "/java/ql/src/utils/model-generator/GenerateFlowModel.py", extractedDb,
targetModel])
print("Regenerated " + targetModel)
shutil.rmtree(tmpDir)
if len(sys.argv) == 3:
lgtmSlug = sys.argv[1]
db = sys.argv[2]
regenerateModel(lgtmSlug, db)
else:
print('error')
|
mit
|
Python
|
7dffc7115b5e91ba13de8cb3e306832be7f8e185
|
print result in show components
|
matiasbargas/jiraffe,matiasbargas/jiraffe
|
client/jiraffe.py
|
client/jiraffe.py
|
import urllib
import os
SERVICE_URL = "http://jiraffe.cloudhub.io/api"
CREATE_SERVICE = SERVICE_URL + "/issues"
DEFAULT_SERVICE = SERVICE_URL + "/defaults"
COMPONENT_SERVICE = SERVICE_URL + "/components"
def get_valid_reporter(reporter):
if reporter == "":
return os.environ['JIRA_ID']
return reporter
def createIssue(project, summary, bug_type, sprint, reporter, assignee, priority, component):
query_args = {'summary': summary, 'reporter': get_valid_reporter(reporter)}
headers = {"content-type": "application/plain-text"}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
valid_assignee = get_valid_reporter(assignee)
if valid_assignee != "":
query_args['assignee'] = valid_assignee
if priority != "":
query_args['priority'] = priority
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
print(urllib.urlopen(CREATE_SERVICE +"?"+ encoded_args, encoded_args).read())
def update_defaults(project, sprint, bug_type):
#{"sprint":"123","type":"Bug","project":"AUTOMATION","id":9}
query_args = {}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
headers = {"content-type": "application/plain-text"}
print(urllib.urlopen(DEFAULT_SERVICE +"?"+ encoded_args, encoded_args).read())
def show_issue(issue_id):
response = urllib.urlopen(CREATE_SERVICE + "/" + issue_id)
print(response.read())
def show_components(project_id):
query_args = {}
if project_id != "":
query_args['project'] = project_id
encoded_args = urllib.urlencode(query_args)
response = urllib.urlopen(COMPONENT_SERVICE+ "?" + encoded_args)
print(response.read())
def show_defaults():
response = urllib.urlopen(DEFAULT_SERVICE)
print(response.read())
|
import urllib
import os
SERVICE_URL = "http://jiraffe.cloudhub.io/api"
CREATE_SERVICE = SERVICE_URL + "/issues"
DEFAULT_SERVICE = SERVICE_URL + "/defaults"
COMPONENT_SERVICE = SERVICE_URL + "/components"
def get_valid_reporter(reporter):
if reporter == "":
return os.environ['JIRA_ID']
return reporter
def createIssue(project, summary, bug_type, sprint, reporter, assignee, priority, component):
query_args = {'summary': summary, 'reporter': get_valid_reporter(reporter)}
headers = {"content-type": "application/plain-text"}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
valid_assignee = get_valid_reporter(assignee)
if valid_assignee != "":
query_args['assignee'] = valid_assignee
if priority != "":
query_args['priority'] = priority
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
print(urllib.urlopen(CREATE_SERVICE +"?"+ encoded_args, encoded_args).read())
def update_defaults(project, sprint, bug_type):
#{"sprint":"123","type":"Bug","project":"AUTOMATION","id":9}
query_args = {}
if project != "":
query_args['project'] = project
if bug_type != "":
query_args['type'] = bug_type
if sprint != "":
query_args['sprint'] = sprint
encoded_args = urllib.urlencode(query_args)
#print(encoded_args)
headers = {"content-type": "application/plain-text"}
print(urllib.urlopen(DEFAULT_SERVICE +"?"+ encoded_args, encoded_args).read())
def show_issue(issue_id):
response = urllib.urlopen(CREATE_SERVICE + "/" + issue_id)
print(response.read())
def show_components(project_id):
query_args = {}
if project_id != "":
query_args['project'] = project_id
encoded_args = urllib.urlencode(query_args)
response = urllib.urlopen(COMPONENT_SERVICE+ "?" + encoded_args)
def show_defaults():
response = urllib.urlopen(DEFAULT_SERVICE)
print(response.read())
|
mit
|
Python
|
f6be438e01a499dc2bde6abfa5a00fb281db7b83
|
Add account_id as the element of this class
|
henrysher/kamboo,henrysher/kamboo
|
kamboo/core.py
|
kamboo/core.py
|
import botocore
from kotocore.session import Session
class KambooConnection(object):
"""
Kamboo connection with botocore session initialized
"""
session = botocore.session.get_session()
def __init__(self, service_name="ec2", region_name="us-east-1",
account_id=None,
credentials=None):
self.region = region_name
self.account_id = account_id
self.credentials = credentials
if self.credentials:
self.session.set_credentials(**self.credentials)
Connection = Session(session=self.session).get_connection(service_name)
self.conn = Connection(region_name=self.region)
|
import botocore
from kotocore.session import Session
class KambooConnection(object):
"""
Kamboo connection with botocore session initialized
"""
session = botocore.session.get_session()
def __init__(self, service_name="ec2", region_name="us-east-1",
credentials=None):
self.region = region_name
self.credentials = credentials
if self.credentials:
self.session.set_credentials(**self.credentials)
Connection = Session(session=self.session).get_connection(service_name)
self.conn = Connection(region_name=self.region)
|
apache-2.0
|
Python
|
a76b866862874ce52c762b4e0381b233917a977a
|
Increment version
|
johnwlockwood/karl_data,johnwlockwood/stream_tap,johnwlockwood/iter_karld_tools,johnwlockwood/stream_tap
|
karld/_meta.py
|
karld/_meta.py
|
version_info = (0, 2, 7)
version = '.'.join(map(str, version_info))
|
version_info = (0, 2, 6)
version = '.'.join(map(str, version_info))
|
apache-2.0
|
Python
|
31d7df470dbaf996f4f3c7639107ec04afda1ec4
|
Update runcount.py
|
suzannerohrback/somaticCNVpipeline,suzannerohrback/somaticCNVpipeline
|
bin/runcount.py
|
bin/runcount.py
|
#!/usr/bin/python
import os
from count import countfile
import common
def runAll(args):
print('\n\n\nYou have requested to count unique sam files')
print('\tWARNING:')
print('\t\tIF USING ANY REFERENCES OTHER THAN THOSE I PROVIDE I CANNOT GUARANTEE RESULT ACCURACY')
print('\n')
#set up environment#
args.SamDirectory = common.fixDirName(args.SamDirectory)
countDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + BinCounts + '/'
if args.output:
countDir = common.fixDirName(args.output)
statsDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + PipelineStats + '/'
if args.statdir:
statsDir = common.fixDirName(args.statdir)
for i in [countDir, statsDir]:
common.makeDir(i)
samFiles = common.getSampleList(args.SamDirectory, args.samples, 'sam')
#run multiprocessing of all bin counting commands#
argList = [(x, countDir, statsDir, args.species) for x in samFiles]
common.daemon(countfile.runOne, argList, 'count sam files')
print('\nBin counts complete\n\n\n')
|
#!/usr/bin/python
import os
from count import countfile
import common
def runAll(args):
print('\n\n\nYou have requested to count unique sam files')
print('\tWARNING:')
print('\t\tIF USING ANY REFERENCES OTHER THAN THOSE I PROVIDE I CANNOT GUARANTEE RESULT ACCURACY')
print('\n')
#set up environment#
args.SamDirectory = common.fixDirName(args.SamDirectory)
countDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + BinCounts + '/'
if args.output:
countDir = common.fixDirName(args.output)
statsDir = os.path.dirname(args.SamDirectory[:-1]) + '/' + PipelineStats + '/'
if args.statdir:
statsDir = common.fixDirName(args.statdir)
for i in [countDir, statsDir]:
common.makeDir(i)
samFiles = common.getSampleList(args.SamDirectory, args.samples, 'sam')
#run multiprocessing of all mapping commands#
argList = [(x, countDir, statsDir, args.species) for x in samFiles]
common.daemon(countfile.runOne, argList, 'count sam files')
print('\nBin counts complete\n\n\n')
|
mit
|
Python
|
c4a0a83fe4a028b1d571058aed755be5b4714531
|
fix logging
|
koteq/hlstats_inviter
|
includes/SteamGroupMembers.py
|
includes/SteamGroupMembers.py
|
import logging
import urllib2
import xml.etree.ElementTree as ElementTree
logger = logging.getLogger()
class SteamGroupMembers(object):
"""
Retrives all members of the specified group.
"""
_members = None
def __init__(self, group_id):
self._group_id = group_id
def __len__(self):
return len(self._get_members())
def __contains__(self, item):
return item in self._get_members()
def __iter__(self):
return self._get_members().__iter__()
def _get_members(self):
if self._members is None:
self._members = []
url = 'http://steamcommunity.com/gid/%s/memberslistxml/?xml=1' % self._group_id
logger.debug('Loading steam group members %s', url)
while True:
responce = urllib2.urlopen(url)
xml = ElementTree.parse(responce).getroot()
members_elements = xml.findall('members/steamID64')
members = map(lambda e: e.text, members_elements)
self._members.extend(members)
next_page = xml.find('nextPageLink')
if next_page is not None:
url = next_page.text
logger.debug('Loading steam group members (next page) %s', url)
else:
break
logger.debug('Found %d members in group %s', len(self._members), self._group_id)
return self._members
|
import logging
import urllib2
import xml.etree.ElementTree as ElementTree
logger = logging.getLogger()
class SteamGroupMembers(object):
"""
Retrives all members of the specified group.
"""
_members = None
def __init__(self, group_id):
self._group_id = group_id
def __len__(self):
return len(self._get_members())
def __contains__(self, item):
return item in self._get_members()
def __iter__(self):
return self._get_members().__iter__()
def _get_members(self):
if self._members is None:
self._members = []
url = 'http://steamcommunity.com/gid/%s/memberslistxml/?xml=1' % self._group_id
while True:
logger.debug('Requesting %s', url)
responce = urllib2.urlopen(url)
xml = ElementTree.parse(responce).getroot()
members_elements = xml.findall('members/steamID64')
logger.info('Found %d members in group %d', len(members_elements), self._group_id)
members = map(lambda e: e.text, members_elements)
self._members.extend(members)
next_page = xml.find('nextPageLink')
if next_page is not None:
logger.debug('Found next page link')
url = next_page.text
else:
break
return self._members
|
unlicense
|
Python
|
0b7a5929208bddb9e850f10ff40f1521363283fd
|
decrease map_stats precision
|
therewillbecode/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea
|
ichnaea/map_stats.py
|
ichnaea/map_stats.py
|
import csv
from cStringIO import StringIO
from ichnaea.db import Measure
def map_stats_request(request):
session = request.database.session()
query = session.query(Measure.lat, Measure.lon)
unique = set()
for lat, lon in query:
unique.add(((lat // 100000) / 1000.0, (lon // 100000) / 1000.0))
rows = StringIO()
csvwriter = csv.writer(rows)
csvwriter.writerow(('lat', 'lon'))
for lat, lon in unique:
csvwriter.writerow((lat, lon))
return rows.getvalue()
|
import csv
from cStringIO import StringIO
from ichnaea.db import Measure
def map_stats_request(request):
session = request.database.session()
query = session.query(Measure.lat, Measure.lon)
unique = set()
for lat, lon in query:
unique.add(((lat // 10000) / 1000.0, (lon // 10000) / 1000.0))
rows = StringIO()
csvwriter = csv.writer(rows)
csvwriter.writerow(('lat', 'lon'))
for lat, lon in unique:
csvwriter.writerow((lat, lon))
return rows.getvalue()
|
apache-2.0
|
Python
|
3def5ee6b6ffbb60260130deedee65cfc0e186f0
|
add missing super() constructor in IosAccelerometer
|
johnbolia/plyer,kostyll/plyer,cleett/plyer,inclement/plyer,kostyll/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,cleett/plyer,kivy/plyer,johnbolia/plyer,KeyWeeUsr/plyer,kived/plyer,kived/plyer
|
plyer/platforms/ios/accelerometer.py
|
plyer/platforms/ios/accelerometer.py
|
'''
iOS accelerometer
-----------------
Taken from: https://pyobjus.readthedocs.org/en/latest/pyobjus_ios.html#accessing-accelerometer
'''
from plyer.facades import Accelerometer
from pyobjus import autoclass
class IosAccelerometer(Accelerometer):
def __init__(self):
super(IosAccelerometer, self).__init__()
self.bridge = autoclass('bridge').alloc().init()
self.bridge.motionManager.setAccelerometerUpdateInterval_(0.1)
def _enable(self):
self.bridge.startAccelerometer()
def _disable(self):
self.bridge.stopAccelerometer()
def _get_acceleration(self):
return (
self.bridge.ac_x,
self.bridge.ac_y,
self.bridge.ac_z)
def instance():
return IosAccelerometer()
|
'''
iOS accelerometer
-----------------
Taken from: https://pyobjus.readthedocs.org/en/latest/pyobjus_ios.html#accessing-accelerometer
'''
from plyer.facades import Accelerometer
from pyobjus import autoclass
class IosAccelerometer(Accelerometer):
def __init__(self):
self.bridge = autoclass('bridge').alloc().init()
self.bridge.motionManager.setAccelerometerUpdateInterval_(0.1)
def _enable(self):
self.bridge.startAccelerometer()
def _disable(self):
self.bridge.stopAccelerometer()
def _get_acceleration(self):
return (
self.bridge.ac_x,
self.bridge.ac_y,
self.bridge.ac_z)
def instance():
return IosAccelerometer()
|
mit
|
Python
|
a536da0d925201fc652b08ad27985f37c5bd4b6c
|
Fix relative_urls helper for call from initialization code
|
liqd/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,liqd/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,SysTheron/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,phihag/adhocracy,SysTheron/adhocracy,phihag/adhocracy,liqd/adhocracy,alkadis/vcv,alkadis/vcv
|
src/adhocracy/lib/helpers/site_helper.py
|
src/adhocracy/lib/helpers/site_helper.py
|
from pylons import config, app_globals as g
from pylons.i18n import _
from paste.deploy.converters import asbool
from adhocracy.model import instance_filter as ifilter
CURRENT_INSTANCE = object()
def get_domain_part(domain_with_port):
return domain_with_port.split(':')[0]
def domain():
return get_domain_part(config.get('adhocracy.domain'))
def name():
return config.get('adhocracy.site.name', _("Adhocracy"))
def relative_urls(config=config):
return asbool(config.get('adhocracy.relative_urls', 'false'))
def base_url(path='', instance=CURRENT_INSTANCE, absolute=False,
append_slash=False, config=config):
"""
Constructs an URL.
Path is expected to start with '/'. If not, a relative path to the current
object will be created.
If instance isn't defined, the current instance is assumed. Otherwise,
either an instance instance or None has to be passed.
If absolute is True, an absolute URL including the protocol part is
returned. Otherwise this is avoided, if relative_urls is set to True.
"""
if instance == CURRENT_INSTANCE:
instance = ifilter.get_instance()
if relative_urls(config):
if instance is None:
prefix = ''
else:
prefix = '/i/' + instance.key
if absolute:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
result = '%s://%s%s%s' % (protocol, domain, prefix, path)
else:
result = '%s%s' % (prefix, path)
else:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
if instance is None or g.single_instance:
subdomain = ''
else:
subdomain = '%s.' % instance.key
result = '%s://%s%s%s' % (protocol, subdomain, domain, path)
if result == '':
result = '/'
if append_slash and not result.endswith('/'):
result += '/'
return result
def shortlink_url(delegateable):
path = "/d/%s" % delegateable.id
return base_url(path, None, absolute=True)
|
from pylons import config, app_globals as g
from pylons.i18n import _
from paste.deploy.converters import asbool
from adhocracy.model import instance_filter as ifilter
CURRENT_INSTANCE = object()
def get_domain_part(domain_with_port):
return domain_with_port.split(':')[0]
def domain():
return get_domain_part(config.get('adhocracy.domain'))
def name():
return config.get('adhocracy.site.name', _("Adhocracy"))
def relative_urls():
return asbool(config.get('adhocracy.relative_urls', 'false'))
def base_url(path='', instance=CURRENT_INSTANCE, absolute=False,
append_slash=False, config=config):
"""
Constructs an URL.
Path is expected to start with '/'. If not, a relative path to the current
object will be created.
If instance isn't defined, the current instance is assumed. Otherwise,
either an instance instance or None has to be passed.
If absolute is True, an absolute URL including the protocol part is
returned. Otherwise this is avoided, if relative_urls is set to True.
"""
if instance == CURRENT_INSTANCE:
instance = ifilter.get_instance()
if relative_urls():
if instance is None:
prefix = ''
else:
prefix = '/i/' + instance.key
if absolute:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
result = '%s://%s%s%s' % (protocol, domain, prefix, path)
else:
result = '%s%s' % (prefix, path)
else:
protocol = config.get('adhocracy.protocol', 'http').strip()
domain = config.get('adhocracy.domain').strip()
if instance is None or g.single_instance:
subdomain = ''
else:
subdomain = '%s.' % instance.key
result = '%s://%s%s%s' % (protocol, subdomain, domain, path)
if result == '':
result = '/'
if append_slash and not result.endswith('/'):
result += '/'
return result
def shortlink_url(delegateable):
path = "/d/%s" % delegateable.id
return base_url(path, None, absolute=True)
|
agpl-3.0
|
Python
|
3f8a29efa3128f8167306b46e47e7ac18cf592ab
|
set broker pool limit
|
total-impact/total-impact-core,Impactstory/total-impact-core,Impactstory/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core,total-impact/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core
|
celeryconfig.py
|
celeryconfig.py
|
import os
import sys
import urlparse
from kombu import Exchange, Queue
sys.path.append('.')
redis_url = os.environ.get('REDIS_URL', "redis://127.0.0.1:6379/")
if not redis_url.endswith("/"):
redis_url += "/"
BROKER_URL = redis_url + "1" # REDIS_CELERY_TASKS_DATABASE_NUMBER = 1
CELERY_RESULT_BACKEND = redis_url + "2" # REDIS_CELERY_RESULTS_DATABASE_NUMBER = 2
REDIS_CONNECT_RETRY = True
# these options will be defaults in future as per http://celery.readthedocs.org/en/latest/getting-started/brokers/redis.html
BROKER_TRANSPORT_OPTIONS = {'fanout_prefix': True,
'fanout_patterns': True,
'visibility_timeout': 60 # one minute
}
CELERY_DEFAULT_QUEUE = 'core_high'
CELERY_QUEUES = [
Queue('core_high', routing_key='core_high'),
Queue('core_low', routing_key='core_low')
]
# added because https://github.com/celery/celery/issues/896
BROKER_POOL_LIMIT = 100
CELERY_CREATE_MISSING_QUEUES = True
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_ENABLE_UTC = True
CELERY_TASK_RESULT_EXPIRES = 60*60 # 1 hour
CELERY_ACKS_LATE = True
# remove this, might fix deadlocks as per https://github.com/celery/celery/issues/970
# CELERYD_MAX_TASKS_PER_CHILD = 100
CELERYD_FORCE_EXECV = True
CELERY_TRACK_STARTED = True
# https://groups.google.com/forum/#!topic/celery-users/Y_ifty2l6Fc
CELERYD_PREFETCH_MULTIPLIER=1
# List of modules to import when celery starts.
CELERY_IMPORTS = ("tasks",)
CELERY_ANNOTATIONS = {
'celery.chord_unlock': {'soft_time_limit': 60} # 1 minute
}
|
import os
import sys
import urlparse
from kombu import Exchange, Queue
sys.path.append('.')
redis_url = os.environ.get('REDIS_URL', "redis://127.0.0.1:6379/")
if not redis_url.endswith("/"):
redis_url += "/"
BROKER_URL = redis_url + "1" # REDIS_CELERY_TASKS_DATABASE_NUMBER = 1
CELERY_RESULT_BACKEND = redis_url + "2" # REDIS_CELERY_RESULTS_DATABASE_NUMBER = 2
REDIS_CONNECT_RETRY = True
# these options will be defaults in future as per http://celery.readthedocs.org/en/latest/getting-started/brokers/redis.html
BROKER_TRANSPORT_OPTIONS = {'fanout_prefix': True,
'fanout_patterns': True,
'visibility_timeout': 60 # one minute
}
CELERY_DEFAULT_QUEUE = 'core_high'
CELERY_QUEUES = [
Queue('core_high', routing_key='core_high'),
Queue('core_low', routing_key='core_low')
]
# added because https://github.com/celery/celery/issues/896
BROKER_POOL_LIMIT = None
CELERY_CREATE_MISSING_QUEUES = True
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_ENABLE_UTC = True
CELERY_TASK_RESULT_EXPIRES = 60*60 # 1 hour
CELERY_ACKS_LATE = True
# remove this, might fix deadlocks as per https://github.com/celery/celery/issues/970
# CELERYD_MAX_TASKS_PER_CHILD = 100
CELERYD_FORCE_EXECV = True
CELERY_TRACK_STARTED = True
# https://groups.google.com/forum/#!topic/celery-users/Y_ifty2l6Fc
CELERYD_PREFETCH_MULTIPLIER=1
# List of modules to import when celery starts.
CELERY_IMPORTS = ("tasks",)
CELERY_ANNOTATIONS = {
'celery.chord_unlock': {'soft_time_limit': 60} # 1 minute
}
|
mit
|
Python
|
3abbba864df16e06a768b761baefd3d705008114
|
Update vigenereCipher: fixed typo
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
books/CrackingCodesWithPython/Chapter18/vigenereCipher.py
|
books/CrackingCodesWithPython/Chapter18/vigenereCipher.py
|
# Vigenere Cipher (Polyalphabetic Substitution Cipher)
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def main():
# This text can be downloaded from https://www.nostarch.com/crackingcodes/:
myMessage = """Alan Mathison Turing was a British mathematician, logician, cryptanalyst, and computer scientist."""
myKey = 'ASIMOV'
myMode = 'encrypt' # Set to either 'encrypt' or 'decrypt'.
if myMode == 'encrypt':
translated = encryptMessage(myKey, myMessage)
elif myMode == 'decrypt':
translated = decryptMessage(myKey, myMessage)
print('%sed message:' % (myMode.title()))
print(translated)
copy(translated)
print()
print('The message has been copied to the clipboard.')
def encryptMessage(key, message):
return translateMessage(key, message, 'encrypt')
def decryptMessage(key, message):
return translatedMessage(key, message, 'decrypt')
def translateMessage(key, message, mode):
translated = [] # Stores the encrypted/decrypted message string.
keyIndex = 0
key = key.upper()
for symbol in message: # Loop through each symbol in message.
num = LETTERS.find(symbol.upper())
if num != -1: # -1 means symbol.upper() was not found in LETTERS.
if mode == 'encrypt':
num += LETTERS.find(key[keyIndex]) # Add if encrypting.
elif mode == 'decrypt':
num -= LETTERS.find(key[keyIndex]) # Subtract if decrypting.
num %= len(LETTERS) # Handle any wraparound.
# Add the encrypted/decrypted symbol to the end of translated:
if symbol.isupper():
translated.append(LETTERS[num])
elif symbol.islower():
translated.append(LETTERS[num].lower())
keyIndex += 1 # Move to the next letter in the key.
if keyIndex == len(key):
keyIndex = 0
else:
# Append the symbol without encrypting/decrypting:
translated.append(symbol)
return ''.join(translated)
# If vigenereCipher.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
|
# Vigenere Cipher (Polyalphabetic Substitution Cipher)
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def main():
# This text can be downloaded from https://www.nostarch.com/crackingcodes/:
myMessage = """Alan Mathison Turing was a British mathematician, logician, cryptanalyst, and computer scientist."""
myKey = 'ASIMOV'
myMode = 'encrypt' # Set to either 'encrypt' or 'decrypt'.
if myMode == 'encrypt':
translated = encryptMessage(myKey, myMessage)
elif myMode == 'decrypt':
translated = decryptMessage(myKey, myMessage)
print('%sed message:' % (myMode.title()))
print(translated)
copy(translated)
print()
print('The message has been copied to the clipboard.')
def encryptMessage(key, message):
return translateMessage(key, message, 'encrypt')
def decryptMessage(key, message):
return translatedMessage(key, message, 'decrypt')
def translateMessage(key, message, mode):
translated = [] # Stores the encrypted/decrypted message string.
keyIndex = 0
key = key.upper()
for symbol in message: # Loop through each symbol in message.
num = LETTERS.find(symbol.upper())
if num != -1: # -1 means symbol.upper() was not found in LETTERS.
if mode == 'encrypt':
num += LETTERS.find(key[keyIndex]) # Add if encrypting.
elif mode == 'decrypt':
num -= LETTERS.find(key[keyIndex]) # Subtract if decrypting.
num %= len(LETTERS) # Handle any wraparound.
# Add the encrypted/decrypted symbol to the end of translated:
if symbol.isupper():
translated.append(LETTERS[num])
elif symbol.islower():
translated.append(LETTERS[num].lower())
keyIndex += 1 # Move to the next letter in the key.
if keyIndex == len(key):
keyIndex = 0
else:
# Append the symbol without encrypting/decrypting:
translated.append(symbol)
return ''.join(translated)
# If vigenereCipher.py is run (instead of imported as a module), call
# the main() function:
if __name__ = '__main__':
main()
|
mit
|
Python
|
ae92abffcbe792d41ee7aafb08e59ba874f3a4c4
|
Fix migration dependencies
|
JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw
|
longclaw/basket/migrations/0003_auto_20170207_2053.py
|
longclaw/basket/migrations/0003_auto_20170207_2053.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-07 20:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('basket', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='basketitem',
old_name='product',
new_name='variant',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-07 20:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('basket', '0002_basketitem_product'),
]
operations = [
migrations.RenameField(
model_name='basketitem',
old_name='product',
new_name='variant',
),
]
|
mit
|
Python
|
20003796eb8f3949d931a4b8752fb07f2be39136
|
Update utils.py
|
lk-geimfari/mimesis,lk-geimfari/elizabeth,lk-geimfari/mimesis,lk-geimfari/church,wikkiewikkie/elizabeth
|
church/utils.py
|
church/utils.py
|
from functools import lru_cache
from os.path import (
join,
dirname,
abspath
)
PATH = abspath(join(dirname(__file__), 'data'))
@lru_cache(maxsize=None)
def pull(filename, lang='en_us'):
"""
Function for getting data from text files in data/
1. de_de - Folder for Germany.
2. en_us - Folder for United States
3. ru_ru - Folder for Russian Federation.
"""
with open(join(PATH + '/' + lang, filename), 'r') as f:
_result = f.readlines()
return _result
|
from functools import lru_cache
from os.path import (
join,
dirname,
abspath
)
PATH = abspath(join(dirname(__file__), 'data'))
__all__ = ['priest']
@lru_cache(maxsize=None)
def pull(filename, lang='en_us'):
"""
Function for getting data from text files in data/
1. de_de - Folder for Germany.
2. en_us - Folder for United States
3. ru_ru - Folder for Russian Federation.
"""
with open(join(PATH + '/' + lang, filename), 'r') as f:
_result = f.readlines()
return _result
|
mit
|
Python
|
3c978eab962ed8a6158df2266852a1b1a47c4ec7
|
add more terminal nodes
|
NCI-GDC/gdcdatamodel,NCI-GDC/gdcdatamodel
|
gdcdatamodel/query.py
|
gdcdatamodel/query.py
|
from psqlgraph import Node, Edge
traversals = {}
terminal_nodes = ['annotations', 'centers', 'archives', 'tissue_source_sites',
'files', 'related_files', 'describing_files',
'clinical_metadata_files', 'experiment_metadata_files', 'run_metadata_files',
'analysis_metadata_files', 'biospecimen_metadata_files', 'aligned_reads_metrics',
'read_group_metrics', 'pathology_reports', 'simple_germline_variations',
'aligned_reads_indexes', 'mirna_expressions', 'exon_expressions',
'simple_somatic_mutations', 'gene_expressions', 'aggregated_somatic_mutations',
]
def construct_traversals(root, node, visited, path):
recurse = lambda neighbor: (
neighbor
# no backtracking
and neighbor not in visited
and neighbor != node
# no traveling THROUGH terminal nodes
and (path[-1] not in terminal_nodes
if path else neighbor.label not in terminal_nodes)
and (not path[-1].startswith('_related')
if path else not neighbor.label.startswith('_related')))
for edge in Edge._get_edges_with_src(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__dst_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__src_dst_assoc__])
for edge in Edge._get_edges_with_dst(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__src_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__dst_src_assoc__])
traversals[root][node.label] = traversals[root].get(node.label) or set()
traversals[root][node.label].add('.'.join(path))
for node in Node.get_subclasses():
traversals[node.label] = {}
construct_traversals(node.label, node, [node], [])
def union_subq_without_path(q, *args, **kwargs):
return q.except_(union_subq_path(q, *args, **kwargs))
def union_subq_path(q, dst_label, post_filters=[]):
src_label = q.entity().label
if not traversals.get(src_label, {}).get(dst_label, {}):
return q
paths = list(traversals[src_label][dst_label])
base = q.subq_path(paths.pop(), post_filters)
while paths:
base = base.union(q.subq_path(paths.pop(), post_filters))
return base
|
from psqlgraph import Node, Edge
traversals = {}
terminal_nodes = ['annotations', 'centers', 'archives', 'tissue_source_sites',
'files', 'related_files', 'describing_files']
def construct_traversals(root, node, visited, path):
recurse = lambda neighbor: (
neighbor
# no backtracking
and neighbor not in visited
and neighbor != node
# no traveling THROUGH terminal nodes
and (path[-1] not in terminal_nodes
if path else neighbor.label not in terminal_nodes))
for edge in Edge._get_edges_with_src(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__dst_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__src_dst_assoc__])
for edge in Edge._get_edges_with_dst(node.__name__):
neighbor = [n for n in Node.get_subclasses()
if n.__name__ == edge.__src_class__][0]
if recurse(neighbor):
construct_traversals(
root, neighbor, visited+[node], path+[edge.__dst_src_assoc__])
traversals[root][node.label] = traversals[root].get(node.label) or set()
traversals[root][node.label].add('.'.join(path))
for node in Node.get_subclasses():
traversals[node.label] = {}
construct_traversals(node.label, node, [node], [])
def union_subq_without_path(q, *args, **kwargs):
return q.except_(union_subq_path(q, *args, **kwargs))
def union_subq_path(q, dst_label, post_filters=[]):
src_label = q.entity().label
if not traversals.get(src_label, {}).get(dst_label, {}):
return q
paths = list(traversals[src_label][dst_label])
base = q.subq_path(paths.pop(), post_filters)
while paths:
base = base.union(q.subq_path(paths.pop(), post_filters))
return base
|
apache-2.0
|
Python
|
7c6c8e9ed2b89c7fa15992b5b68c793a53b327d8
|
fix test case to run on_commit hook before assertion
|
RegioHelden/django-datawatch,RegioHelden/django-datawatch
|
django_datawatch/tests/test_trigger_update.py
|
django_datawatch/tests/test_trigger_update.py
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals, print_function
try:
from unittest import mock
except ImportError:
import mock
from django.db import transaction
from django.test.testcases import TestCase, override_settings
from django_datawatch.backends.base import BaseBackend
from django_datawatch.datawatch import datawatch, run_checks
from django_datawatch.base import BaseCheck
from django_datawatch.models import Result
@datawatch.register
class CheckTriggerUpdate(BaseCheck):
model_class = Result
trigger_update = dict(foobar=Result)
def get_foobar_payload(self, instance):
return instance
def get_identifier(self, payload):
return payload.pk
def check(self, payload):
return payload
class TriggerUpdateTestCase(TestCase):
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_true(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertTrue(mock_update.called)
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=False)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_false(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertFalse(mock_update.called)
def run_commit_hooks(self):
"""
Fake transaction commit to run delayed on_commit functions
source: https://medium.com/@juan.madurga/speed-up-django-transaction-hooks-tests-6de4a558ef96
"""
for db_name in reversed(self._databases_names()):
with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.validate_no_atomic_block', lambda a: False):
transaction.get_connection(using=db_name).run_and_clear_commit_hooks()
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.get_backend')
def test_update_related_calls_backend(self, mock_get_backend):
backend = mock.Mock(spec=BaseBackend)
mock_get_backend.return_value = backend
datawatch.update_related(sender=Result, instance=Result())
# run our on_commit hook
self.run_commit_hooks()
# make sure that we called backend.run
self.assertTrue(backend.run.called)
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals, print_function
from django_datawatch.backends.base import BaseBackend
try:
from unittest import mock
except ImportError:
import mock
from django.test.testcases import TestCase, override_settings
from django_datawatch.datawatch import datawatch, run_checks
from django_datawatch.base import BaseCheck
from django_datawatch.models import Result
@datawatch.register
class CheckTriggerUpdate(BaseCheck):
model_class = Result
trigger_update = dict(foobar=Result)
def get_foobar_payload(self, instance):
return instance
def get_identifier(self, payload):
return payload.pk
def check(self, payload):
return payload
class TriggerUpdateTestCase(TestCase):
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_true(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertTrue(mock_update.called)
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=False)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.update_related')
def test_setting_run_signals_false(self, mock_update):
run_checks(sender=None, instance=None, created=None, raw=None,
using=None)
self.assertFalse(mock_update.called)
@override_settings(DJANGO_DATAWATCH_RUN_SIGNALS=True)
@mock.patch('django_datawatch.datawatch.DatawatchHandler.get_backend')
def test_update_related_calls_backend(self, mock_get_backend):
backend = mock.Mock(spec=BaseBackend)
mock_get_backend.return_value = backend
datawatch.update_related(sender=Result, instance=Result())
self.assertTrue(backend.run.called)
|
mit
|
Python
|
2323699ae6b266823b30784293b2d1d900d94700
|
Bump aioTV version.
|
aioTV/django-rest-swagger,aioTV/django-rest-swagger,aioTV/django-rest-swagger
|
rest_framework_swagger/__init__.py
|
rest_framework_swagger/__init__.py
|
VERSION = '0.3.5-aio-v3'
DEFAULT_SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '',
'api_key': '',
'token_type': 'Token',
'enabled_methods': ['get', 'post', 'put', 'patch', 'delete'],
'is_authenticated': False,
'is_superuser': False,
'permission_denied_handler': None,
'resource_access_handler': None,
'template_path': 'rest_framework_swagger/index.html',
'doc_expansion': 'none',
'base_path': ''
}
try:
from django.conf import settings
from django.test.signals import setting_changed
def load_settings(provided_settings):
global SWAGGER_SETTINGS
SWAGGER_SETTINGS = provided_settings
for key, value in DEFAULT_SWAGGER_SETTINGS.items():
if key not in SWAGGER_SETTINGS:
SWAGGER_SETTINGS[key] = value
def reload_settings(*args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == 'SWAGGER_SETTINGS':
load_settings(value)
load_settings(getattr(settings,
'SWAGGER_SETTINGS',
DEFAULT_SWAGGER_SETTINGS))
setting_changed.connect(reload_settings)
except:
SWAGGER_SETTINGS = DEFAULT_SWAGGER_SETTINGS
|
VERSION = '0.3.5-aio-v2'
DEFAULT_SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '',
'api_key': '',
'token_type': 'Token',
'enabled_methods': ['get', 'post', 'put', 'patch', 'delete'],
'is_authenticated': False,
'is_superuser': False,
'permission_denied_handler': None,
'resource_access_handler': None,
'template_path': 'rest_framework_swagger/index.html',
'doc_expansion': 'none',
'base_path': ''
}
try:
from django.conf import settings
from django.test.signals import setting_changed
def load_settings(provided_settings):
global SWAGGER_SETTINGS
SWAGGER_SETTINGS = provided_settings
for key, value in DEFAULT_SWAGGER_SETTINGS.items():
if key not in SWAGGER_SETTINGS:
SWAGGER_SETTINGS[key] = value
def reload_settings(*args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == 'SWAGGER_SETTINGS':
load_settings(value)
load_settings(getattr(settings,
'SWAGGER_SETTINGS',
DEFAULT_SWAGGER_SETTINGS))
setting_changed.connect(reload_settings)
except:
SWAGGER_SETTINGS = DEFAULT_SWAGGER_SETTINGS
|
bsd-2-clause
|
Python
|
00bf40ba386d7d1ffebcc1a41766250e0fc975ac
|
Add related name fields
|
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
|
src/core/models/base.py
|
src/core/models/base.py
|
from django.db import models
from django.contrib.auth.models import User
class Location(models.Model):
class Meta:
app_label = "core"
x = models.DecimalField(max_digits=10, decimal_places=5)
y = models.DecimalField(max_digits=10, decimal_places=5)
def __str__(self):
return "x:" + str(self.x) + ", y:" + str(self.y)
class Idea(models.Model):
class Meta:
app_label = "core"
time = models.DateTimeField(null=True, blank=True)
location = models.ForeignKey(Location, null=True, blank=True, related_name='ideas')
owner = models.ForeignKey(User, related_name='ideas')
users = models.ManyToManyField(User, through='Suggestion', related_name='idea_suggestions')
activity = models.CharField(max_length=50, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.activity) + " @ " + str(self.location) + ", " + str(self.time)
class Suggestion(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE = 'M'
NONE = 'O'
RESPONSE_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE, 'Maybe'),
(NONE, 'No vote'),
)
response = models.CharField(max_length=1, choices=RESPONSE_CHOICES, default=NONE)
user = models.ForeignKey(User, related_name='suggestions')
idea = models.ForeignKey(Idea, related_name='suggestions')
def __str__(self):
return str(self.user) + ":" + str(self.suggestion)
class Event(models.Model):
class Meta:
app_label = "core"
owner = models.ForeignKey(User, related_name='events')
invites = models.ManyToManyField(User, through='Invite', related_name='event_invites')
description = models.TextField()
location = models.ForeignKey(Location, related_name='events')
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.location) + "@" + str(self.start_time)
class Invite(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE_YES = 'MY'
MAYBE_NO = 'MN'
NONE = 'O'
RSVP_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE_YES, 'Maybe Yes'),
(MAYBE_NO, 'Maybe No'),
(NONE, 'No response'),
)
event = models.ForeignKey(Event, related_name='invites')
user = models.ForeignKey(User, related_name='invites')
rsvp = models.CharField(max_length=2, choices=RSVP_CHOICES, default=NONE)
def __str__(self):
return str(user) + ":" + str(event)
|
from django.db import models
from django.contrib.auth.models import User
class Location(models.Model):
class Meta:
app_label = "core"
x = models.DecimalField(max_digits=10, decimal_places=5)
y = models.DecimalField(max_digits=10, decimal_places=5)
def __str__(self):
return "x:" + str(self.x) + ", y:" + str(self.y)
class Idea(models.Model):
class Meta:
app_label = "core"
time = models.DateTimeField(null=True, blank=True)
location = models.ForeignKey(Location, null=True, blank=True)
owner = models.ForeignKey(User, related_name='idea_owner')
users = models.ManyToManyField(User, through='Suggestion')
activity = models.CharField(max_length=50, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.activity) + " @ " + str(self.location) + ", " + str(self.time)
class Suggestion(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE = 'M'
NONE = 'O'
RESPONSE_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE, 'Maybe'),
(NONE, 'No vote'),
)
response = models.CharField(max_length=1, choices=RESPONSE_CHOICES, default=NONE)
user = models.ForeignKey(User)
idea = models.ForeignKey(Idea)
def __str__(self):
return str(self.user) + ":" + str(self.suggestion)
class Event(models.Model):
class Meta:
app_label = "core"
owner = models.ForeignKey(User, related_name='event_owner')
invites = models.ManyToManyField(User, through='Invite')
description = models.TextField()
location = models.ForeignKey(Location)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True, blank=True)
def __str__(self):
return str(self.owner) + ":" + str(self.location) + "@" + str(self.start_time)
class Invite(models.Model):
class Meta:
app_label = "core"
YES = 'Y'
NO = 'N'
MAYBE_YES = 'MY'
MAYBE_NO = 'MN'
NONE = 'O'
RSVP_CHOICES = (
(YES, 'Yes'),
(NO, 'No'),
(MAYBE_YES, 'Maybe Yes'),
(MAYBE_NO, 'Maybe No'),
(NONE, 'No response'),
)
event = models.ForeignKey(Event)
user = models.ForeignKey(User)
rsvp = models.CharField(max_length=2, choices=RSVP_CHOICES, default=NONE)
def __str__(self):
return str(user) + ":" + str(event)
|
agpl-3.0
|
Python
|
2edb2145f6f7447a7c659d7eeb51c7b75aa0c6d4
|
Add generate username signal
|
allanlei/rhinocloud-utils
|
rhinocloud/contrib/auth/signals.py
|
rhinocloud/contrib/auth/signals.py
|
from django.contrib.auth.models import User
from rhinocloud.utils import random_generator
def generate_username_from_email(sender, instance, **kwargs):
if sender == User:
username = instance.email
if len(username) > 30:
username = random_generator(username[:25])
instance.username = username
def username_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.username = random_generator(instance.username[:25])
def first_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.first_name) > 30:
instance.first_name = instance.first_name[:30]
def last_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.last_name = instance.last_name[:30]
|
from django.contrib.auth.models import User
from rhinocloud.utils import random_generator
def username_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.username = random_generator(instance.username[:25])
def first_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.first_name) > 30:
instance.first_name = instance.first_name[:30]
def last_name_shorten(sender, instance, **kwargs):
if sender == User:
if len(instance.username) > 30:
instance.last_name = instance.last_name[:30]
|
bsd-3-clause
|
Python
|
a390800d3ee510a7e8d2d26bf1b44d1b735808cf
|
sort import
|
nansencenter/DAPPER,nansencenter/DAPPER
|
dapper/mods/VL20/demo.py
|
dapper/mods/VL20/demo.py
|
"""Demonstrate the Vissio-Lucarini-20 model.
Reproduce Hovmoller diagram Fig 4. in `bib.vissio2020mechanics`.
"""
import numpy as np
from matplotlib import pyplot as plt
import dapper.mods as modelling
from dapper.mods.VL20 import model_instance
VL20 = model_instance(nX=36, F=10, G=0)
step = modelling.with_rk4(VL20.dxdt, autonom=True)
simulator = modelling.with_recursion(step, prog="Simulating")
x0 = np.random.rand(72)
dt = 0.05
xx = simulator(x0, k=22000, t0=0, dt=dt)
plt.figure(1)
plt.clf()
plt.contourf(xx[-200:, :], levels=100, cmap='jet')
plt.colorbar()
plt.xticks([0, 9, 19, 29, 36, 45, 55, 65], [1, 10, 20, 30, 1, 10, 20, 30])
plt.yticks(np.arange(0, 220, 20), np.arange(0, 11, 1))
plt.xlabel(r'$X_k, k = 1, ..., 36$'+' '*15+r'$\theta_k, k = 1, ..., 36$')
plt.ylabel('time')
plt.show()
|
"""Demonstrate the Vissio-Lucarini-20 model.
Reproduce Hovmoller diagram Fig 4. in `bib.vissio2020mechanics`.
"""
import numpy as np
from matplotlib import pyplot as plt
import dapper.mods as modelling
from dapper.mods.VL20 import model_instance
VL20 = model_instance(nX=36, F=10, G=0)
step = modelling.with_rk4(VL20.dxdt, autonom=True)
simulator = modelling.with_recursion(step, prog="Simulating")
x0 = np.random.rand(72)
dt = 0.05
xx = simulator(x0, k=22000, t0=0, dt=dt)
plt.figure(1)
plt.clf()
plt.contourf(xx[-200:, :], levels=100, cmap='jet')
plt.colorbar()
plt.xticks([0, 9, 19, 29, 36, 45, 55, 65], [1, 10, 20, 30, 1, 10, 20, 30])
plt.yticks(np.arange(0, 220, 20), np.arange(0, 11, 1))
plt.xlabel(r'$X_k, k = 1, ..., 36$'+' '*15+r'$\theta_k, k = 1, ..., 36$')
plt.ylabel('time')
plt.show()
|
mit
|
Python
|
916638e11ef20e2976c81f0e8230079cf96a3c3a
|
Set DJANGO_SETTINGS_MODULE env variable.
|
parksandwildlife/ibms,parksandwildlife/ibms,parksandwildlife/ibms
|
ibms_project/wsgi.py
|
ibms_project/wsgi.py
|
"""
WSGI config for IBMS project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import dotenv
from django.core.wsgi import get_wsgi_application
import os
from pathlib import Path
# These lines are required for interoperability between local and container environments.
d = Path(__file__).resolve().parents[1]
dot_env = os.path.join(str(d), '.env')
if os.path.exists(dot_env):
dotenv.read_dotenv(dot_env)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ibms_project.settings")
application = get_wsgi_application()
|
"""
WSGI config for IBMS project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import dotenv
from django.core.wsgi import get_wsgi_application
import os
from pathlib import Path
# These lines are required for interoperability between local and container environments.
d = Path(__file__).resolve().parents[1]
dot_env = os.path.join(str(d), '.env')
if os.path.exists(dot_env):
dotenv.read_dotenv(dot_env)
application = get_wsgi_application()
|
apache-2.0
|
Python
|
2636d76fa4d9dd820fd673bc6044f4c3ccdfd0b1
|
Fix permissions fixture problem.
|
4dn-dcic/fourfront,kidaa/encoded,T2DREAM/t2dream-portal,ClinGen/clincoded,ENCODE-DCC/encoded,4dn-dcic/fourfront,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,kidaa/encoded,kidaa/encoded,hms-dbmi/fourfront,ENCODE-DCC/snovault,philiptzou/clincoded,kidaa/encoded,ENCODE-DCC/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,ClinGen/clincoded,philiptzou/clincoded,T2DREAM/t2dream-portal,ClinGen/clincoded,ClinGen/clincoded,hms-dbmi/fourfront,ENCODE-DCC/snovault,ENCODE-DCC/encoded,hms-dbmi/fourfront,ENCODE-DCC/encoded,ClinGen/clincoded,4dn-dcic/fourfront,kidaa/encoded,T2DREAM/t2dream-portal,hms-dbmi/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,ENCODE-DCC/snovault,ENCODE-DCC/snovault
|
src/encoded/tests/test_permissions.py
|
src/encoded/tests/test_permissions.py
|
import pytest
@pytest.fixture
def users(testapp):
from .sample_data import URL_COLLECTION
url = '/labs/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/awards/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/users/'
users = []
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
users.append(res.json['@graph'][0])
return users
@pytest.fixture
def wrangler(users, app, external_tx, zsa_savepoints):
user = [u for u in users if 'wrangler' in u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def submitter(users, app, external_tx, zsa_savepoints):
user = [u for u in users if not u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def lab():
return 'b635b4ed-dba3-4672-ace9-11d76a8d03af'
@pytest.fixture
def award():
return 'Myers'
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_wrangler_post_non_lab_collection(wrangler, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
res = wrangler.post_json(url, item, status=201)
assert item['name'] in res.location
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_submitter_post_non_lab_collection(submitter, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
item = item.copy()
del item['uuid']
submitter.post_json(url, item, status=403)
def test_submitter_post_update_experiment(submitter, lab, award):
experiment = {'lab': lab, 'award': award}
res = submitter.post_json('/experiment/', experiment, status=201)
location = res.location
res = submitter.get(location + '@@testing-allowed?permission=edit', status=200)
assert res.json['has_permission'] is True
assert 'submits_for.%s' % lab in res.json['principals_allowed_by_permission']
submitter.patch_json(location, {'description': 'My experiment'}, status=200)
|
import pytest
@pytest.datafixture
def users(app):
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'TEST',
}
testapp = TestApp(app, environ)
from .sample_data import URL_COLLECTION
url = '/labs/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/awards/'
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
url = '/users/'
users = []
for item in URL_COLLECTION[url]:
res = testapp.post_json(url, item, status=201)
users.append(res.json['@graph'][0])
return users
@pytest.fixture
def wrangler(users, app, external_tx, zsa_savepoints):
user = [u for u in users if 'wrangler' in u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def submitter(users, app, external_tx, zsa_savepoints):
user = [u for u in users if not u['groups']][0]
from webtest import TestApp
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': str(user['uuid']),
}
return TestApp(app, environ)
@pytest.fixture
def lab():
return 'b635b4ed-dba3-4672-ace9-11d76a8d03af'
@pytest.fixture
def award():
return 'Myers'
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_wrangler_post_non_lab_collection(wrangler, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
res = wrangler.post_json(url, item, status=201)
assert item['name'] in res.location
@pytest.mark.parametrize('url', ['/organisms/', '/sources/'])
def test_submitter_post_non_lab_collection(submitter, url):
from .sample_data import URL_COLLECTION
collection = URL_COLLECTION[url]
for item in collection:
item = item.copy()
del item['uuid']
submitter.post_json(url, item, status=403)
def test_submitter_post_update_experiment(submitter, lab, award):
experiment = {'lab': lab, 'award': award}
res = submitter.post_json('/experiment/', experiment, status=201)
location = res.location
res = submitter.get(location + '@@testing-allowed?permission=edit', status=200)
assert res.json['has_permission'] is True
assert 'submits_for.%s' % lab in res.json['principals_allowed_by_permission']
submitter.patch_json(location, {'description': 'My experiment'}, status=200)
|
mit
|
Python
|
d51a13ed70c157d90c2d77461ad1747f7ce12e7c
|
Improve comment syntax
|
openfisca/country-template,openfisca/country-template
|
openfisca_country_template/variables/taxes.py
|
openfisca_country_template/variables/taxes.py
|
# -*- coding: utf-8 -*-
# This file defines the variables of our legislation.
# A variable is property of a person, or an entity (e.g. a household).
# See http://openfisca.org/doc/variables.html
# Import from openfisca-core the common python objects used to code the legislation in OpenFisca
from openfisca_core.model_api import *
# Import the entities specifically defined for this tax and benefit system
from openfisca_country_template.entities import *
class income_tax(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Income tax"
reference = "https://law.gov.example/income_tax" # Always use the most official source
# The formula to compute the income tax for a given person at a given period
def formula(person, period, parameters):
return person('salary', period) * parameters(period).taxes.income_tax_rate
class social_security_contribution(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Progressive contribution paid on salaries to finance social security"
reference = "https://law.gov.example/social_security_contribution" # Always use the most official source
def formula(person, period, parameters):
salary = person('salary', period)
# The social_security_contribution is computed according to a marginal scale.
scale = parameters(period).taxes.social_security_contribution
return scale.calc(salary)
class housing_tax(Variable):
value_type = float
entity = Household
definition_period = YEAR # This housing tax is defined for a year.
label = u"Tax paid by each household proportionally to the size of its accommodation"
reference = "https://law.gov.example/housing_tax" # Always use the most official source
def formula(household, period, parameters):
# The housing tax is defined for a year, but depends on the `accomodation_size` and `housing_occupancy_status` on the first month of the year.
# Here period is a year. We can get the first month of a year with the following shortcut.
# To build different periods, see http://openfisca.org/doc/coding-the-legislation/35_periods.html#calculating-dependencies-for-a-specific-period
january = period.first_month
accommodation_size = household('accomodation_size', january)
# `housing_occupancy_status` is an Enum variable
occupancy_status = household('housing_occupancy_status', january)
HousingOccupancyStatus = occupancy_status.possible_values # Get the enum associated with the variable
# To access an enum element, we use the `.` notation.
tenant = (occupancy_status == HousingOccupancyStatus.tenant)
owner = (occupancy_status == HousingOccupancyStatus.owner)
# The tax is applied only if the household owns or rents its main residency
return (owner + tenant) * accommodation_size * 10
|
# -*- coding: utf-8 -*-
# This file defines the variables of our legislation.
# A variable is property of a person, or an entity (e.g. a household).
# See http://openfisca.org/doc/variables.html
# Import from openfisca-core the common python objects used to code the legislation in OpenFisca
from openfisca_core.model_api import *
# Import the entities specifically defined for this tax and benefit system
from openfisca_country_template.entities import *
class income_tax(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Income tax"
reference = "https://law.gov.example/income_tax" # Always use the most official source
# The formula to compute the income tax for a given person at a given period
def formula(person, period, parameters):
return person('salary', period) * parameters(period).taxes.income_tax_rate
class social_security_contribution(Variable):
value_type = float
entity = Person
definition_period = MONTH
label = u"Progressive contribution paid on salaries to finance social security"
reference = "https://law.gov.example/social_security_contribution" # Always use the most official source
def formula(person, period, parameters):
salary = person('salary', period)
# The social_security_contribution is computed according to a marginal scale.
scale = parameters(period).taxes.social_security_contribution
return scale.calc(salary)
class housing_tax(Variable):
value_type = float
entity = Household
definition_period = YEAR # This housing tax is defined for a year.
label = u"Tax paid by each household proportionally to the size of its accommodation"
reference = "https://law.gov.example/housing_tax" # Always use the most official source
def formula(household, period, parameters):
# The housing tax is defined for a year, but depends on the `accomodation_size` and `housing_occupancy_status` on the first month of the year.
# Here period is a year. We can get the first month of a year with the following shortcut.
# To build different periods, see http://openfisca.org/doc/coding-the-legislation/35_periods.html#calculating-dependencies-for-a-specific-period
january = period.first_month
accommodation_size = household('accomodation_size', january)
# `housing_occupancy_status` is an Enum variable
occupancy_status = household('housing_occupancy_status', january)
HousingOccupancyStatus = occupancy_status.possible_values # Get the enum associated with the variable
# To access an enum element, we use the . notation.
tenant = (occupancy_status == HousingOccupancyStatus.tenant)
owner = (occupancy_status == HousingOccupancyStatus.owner)
# The tax is applied only if the household owns or rents its main residency
return (owner + tenant) * accommodation_size * 10
|
agpl-3.0
|
Python
|
da1df870f5d5b7703c4c4c3a6b8cb7d140778469
|
Set default task target to 100.
|
VISTAS-IVES/pyvistas
|
source/vistas/core/task.py
|
source/vistas/core/task.py
|
from threading import RLock
class Task:
STOPPED = 'stopped'
RUNNING = 'running'
INDETERMINATE = 'indeterminate'
COMPLETE = 'complete'
SHOULD_STOP = 'should_stop'
tasks = []
def __init__(self, name, description=None, target=100, progress=0):
self.name = name
self.description = description
self._target = target
self._progress = progress
self._status = self.STOPPED
self.lock = RLock()
Task.tasks.append(self)
@property
def stopped(self):
return self._status == self.STOPPED
@property
def running(self):
return self._status == self.RUNNING
@property
def indeterminate(self):
return self._status == self.INDETERMINATE
@property
def complete(self):
return self._status == self.COMPLETE
@property
def should_stop(self):
return self._status == self.SHOULD_STOP
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
if self.complete:
Task.tasks.remove(self)
@property
def target(self):
with self.lock:
return self._target
@target.setter
def target(self, value):
with self.lock:
self._target = value
@property
def progress(self):
with self.lock:
return self._progress
@progress.setter
def progress(self, value):
with self.lock:
self._progress = value
@property
def percent(self):
with self.lock:
return int(self._progress / self._target * 100)
def inc_target(self, increment=1):
with self.lock:
self._target += increment
def inc_progress(self, increment=1):
with self.lock:
self._progress += increment
|
from threading import RLock
class Task:
STOPPED = 'stopped'
RUNNING = 'running'
INDETERMINATE = 'indeterminate'
COMPLETE = 'complete'
SHOULD_STOP = 'should_stop'
tasks = []
def __init__(self, name, description=None, target=0, progress=0):
self.name = name
self.description = description
self._target = target
self._progress = progress
self._status = self.STOPPED
self.lock = RLock()
Task.tasks.append(self)
@property
def stopped(self):
return self._status == self.STOPPED
@property
def running(self):
return self._status == self.RUNNING
@property
def indeterminate(self):
return self._status == self.INDETERMINATE
@property
def complete(self):
return self._status == self.COMPLETE
@property
def should_stop(self):
return self._status == self.SHOULD_STOP
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
if self.complete:
Task.tasks.remove(self)
@property
def target(self):
with self.lock:
return self._target
@target.setter
def target(self, value):
with self.lock:
self._target = value
@property
def progress(self):
with self.lock:
return self._progress
@progress.setter
def progress(self, value):
with self.lock:
self._progress = value
@property
def percent(self):
with self.lock:
return int(self._progress / self._target * 100)
def inc_target(self, increment=1):
with self.lock:
self._target += increment
def inc_progress(self, increment=1):
with self.lock:
self._progress += increment
|
bsd-3-clause
|
Python
|
285d5f43b112354f1d5c05f9dd6b050e30f517e4
|
Remove country=DE parameter
|
DenisCarriere/geocoder
|
geocoder/gisgraphy.py
|
geocoder/gisgraphy.py
|
#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.location import BBox
from geocoder.base import OneResult, MultipleResultsQuery
class GisgraphyResult(OneResult):
@property
def lat(self):
return self.raw.get('lat')
@property
def lng(self):
return self.raw.get('lng')
@property
def address(self):
return self.raw.get('formatedFull', '')
@property
def country(self):
return self.raw.get('countryCode', '')
@property
def state(self):
return self.raw.get('state', '')
@property
def city(self):
return self.raw.get('city', '')
@property
def street(self):
return self.raw.get('streetName', '')
@property
def housenumber(self):
return self.raw.get('houseNumber', '')
@property
def postal(self):
return self.raw.get('zipCode', '')
class GisgraphyQuery(MultipleResultsQuery):
"""
Gisgraphy REST API
=======================
API Reference
-------------
http://www.gisgraphy.com/documentation/api/
"""
provider = 'gisgraphy'
method = 'geocode'
_URL = 'https://services.gisgraphy.com/geocoding/'
_RESULT_CLASS = GisgraphyResult
_KEY_MANDATORY = False
def _build_params(self, location, provider_key, **kwargs):
return {
'address': location,
'to': kwargs.get('maxRows', 1),
'format': 'json',
}
def _adapt_results(self, json_response):
return json_response['result']
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = GisgraphyQuery('Ottawa Ontario', maxRows=3)
g.debug()
|
#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.location import BBox
from geocoder.base import OneResult, MultipleResultsQuery
class GisgraphyResult(OneResult):
@property
def lat(self):
return self.raw.get('lat')
@property
def lng(self):
return self.raw.get('lng')
@property
def address(self):
return self.raw.get('formatedFull', '')
@property
def country(self):
return self.raw.get('countryCode', '')
@property
def state(self):
return self.raw.get('state', '')
@property
def city(self):
return self.raw.get('city', '')
@property
def street(self):
return self.raw.get('streetName', '')
@property
def housenumber(self):
return self.raw.get('houseNumber', '')
@property
def postal(self):
return self.raw.get('zipCode', '')
class GisgraphyQuery(MultipleResultsQuery):
"""
Gisgraphy REST API
=======================
API Reference
-------------
http://www.gisgraphy.com/documentation/api/
"""
provider = 'gisgraphy'
method = 'geocode'
_URL = 'https://services.gisgraphy.com/geocoding/'
_RESULT_CLASS = GisgraphyResult
_KEY_MANDATORY = False
def _build_params(self, location, provider_key, **kwargs):
return {
'address': location,
'to': kwargs.get('maxRows', 1),
'format': 'json',
'country': 'DE',
}
def _adapt_results(self, json_response):
return json_response['result']
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = GisgraphyQuery('Ottawa Ontario', maxRows=3)
g.debug()
|
mit
|
Python
|
6d9efe005e346aaef359f369c89d007da1b83189
|
add more untested changes for slack integration
|
nuxis/lamper,nuxis/lamper
|
lampeflaske.py
|
lampeflaske.py
|
#!/usr/bin/env python3
import pprint
import os
import lamper
from flask import Flask, request, jsonify
from flask_api import status
app = Flask(__name__)
@app.route("/", methods=['POST', 'GET'])
def hello():
pprint.pprint(request.form)
if request.form.get('command') != '/lamper':
return "wrong command" , status.HTTP_400_BAD_REQUEST
if request.form.get('team_id') != os.environ['SLACK_TEAMID']:
return "wrong team id" , status.HTTP_403_FORBIDDEN
if request.form.get('token') != os.environ['SLACK_TOKEN']:
return "wrong token", status.HTTP_403_FORBIDDEN
if request.form.get('channel_id') != os.environ['SLACK_CHANNELID']:
return "wrong channel id", status.HTTP_403_FORBIDDEN
if request.form.get('text') not in lamper.colors.keys():
return "wrong color" , status.HTTP_400_BAD_REQUEST
lamper.set_dmx(lamper.colors[request.form.get('text')])
#return "Hello World! " + request.form.get('text')
r = {
'response_type': 'in_channel',
'text': 'Light switched to {}'.format(request.form.get('text')),
}
return jsonify(r)
|
#!/usr/bin/env python3
import pprint
import os
import lamper
from flask import Flask, request
from flask_api import status
app = Flask(__name__)
@app.route("/", methods=['POST', 'GET'])
def hello():
pprint.pprint(request.form)
if request.form.get('command') != '/lamper':
return "wrong command" , status.HTTP_400_BAD_REQUEST
if request.form.get('team_id') != os.environ['SLACK_TEAMID']:
return "wrong team id" , status.HTTP_403_FORBIDDEN
if request.form.get('token') != os.environ['SLACK_TOKEN']:
return "wrong token", status.HTTP_403_FORBIDDEN
if request.form.get('channel_id') != os.environ['SLACK_CHANNELID']:
return "wrong channel id", status.HTTP_403_FORBIDDEN
if request.form.get('text') not in lamper.colors.keys():
return "wrong color" , status.HTTP_400_BAD_REQUEST
lamper.set_dmx(lamper.colors[request.form.get('text')])
#return "Hello World! " + request.form.get('text')
return """
{
"response_type": "in_channel",
"text": "Light switched to {}",
"attachments": [
{
"text":"Light switched to {}"
}
]
}
""".format(request.form.get('text'))
|
mit
|
Python
|
1f9dea20b433e5b2a69f348d1a842d71a99bc56e
|
Modify tests
|
chainer/chainer,hvy/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,wkentaro/chainer,pfnet/chainer,wkentaro/chainer,niboshi/chainer,niboshi/chainer,chainer/chainer,chainer/chainer
|
tests/chainerx_tests/unit_tests/routines_tests/test_evaluation.py
|
tests/chainerx_tests/unit_tests/routines_tests/test_evaluation.py
|
import chainer
from chainer import functions as F
import numpy
import chainerx
from chainerx_tests import dtype_utils
from chainerx_tests import op_utils
_in_out_eval_dtypes = [
(('float16', 'int16'), 'float32'),
(('float32', 'int32'), 'float32'),
(('float64', 'int64'), 'float64'),
(('float32', 'int16'), 'float32'),
(('float64', 'int16'), 'float64'),
(('float64', 'int32'), 'float64'),
]
class EvalBase(op_utils.ChainerOpTest):
def generate_inputs(self):
x_dtype, t_dtype = self.in_dtypes
y = numpy.random.uniform(-1, 1, self.x_shape).astype(x_dtype)
targ = numpy.random.randint(
3, size=self.t_shape).astype(t_dtype)
return y, targ
def forward_chainerx(self, inputs):
return self.forward_xp(inputs, chainerx)
def forward_chainer(self, inputs):
return self.forward_xp(inputs, F)
def forward_xp(self, inputs, xp):
raise NotImplementedError(
'Op test implementation must override `forward_xp`.')
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
chainer.testing.product([
chainer.testing.from_pytest_parameterize(
'x_shape,t_shape', [
((10, 3), (10,)),
((10, 3, 1), (10,)),
((10, 3, 1, 1), (10,)),
((10, 3, 5), (10, 5)),
((10, 3, 5, 4), (10, 5, 4)),
((10, 3, 5, 4, 1), (10, 5, 4)),
((10, 3, 5, 4, 1, 1), (10, 5, 4))
]),
chainer.testing.from_pytest_parameterize(
'in_dtypes,out_dtype', _in_out_eval_dtypes),
chainer.testing.from_pytest_parameterize(
'ignore_label', [None, 0])
])
))
class TestAccuracy(EvalBase):
def forward_xp(self, inputs, xp):
x, t = inputs
out = xp.accuracy(x, t, self.ignore_label)
return out,
|
import chainer
from chainer import functions as F
import numpy
import chainerx
from chainerx_tests import dtype_utils
from chainerx_tests import op_utils
_in_out_eval_dtypes = dtype_utils._permutate_dtype_mapping([
(('float16', 'float16'), 'float16'),
(('float32', 'float32'), 'float32'),
(('float64', 'float64'), 'float64'),
(('float32', 'float16'), 'float32'),
(('float64', 'float16'), 'float64'),
(('float64', 'float32'), 'float64'),
])
class EvalBase(op_utils.ChainerOpTest):
def generate_inputs(self):
x_dtype, t_dtype = self.in_dtypes
y = numpy.random.uniform(-1, 1, self.x_shape).astype(x_dtype)
targ = numpy.random.randint(
3, size=self.t_shape).astype(t_dtype)
return y, targ
def forward_chainerx(self, inputs):
return self.forward_xp(inputs, chainerx)
def forward_chainer(self, inputs):
return self.forward_xp(inputs, F)
def forward_xp(self, inputs, xp):
raise NotImplementedError(
'Op test implementation must override `forward_xp`.')
@op_utils.op_test(['native:0', 'cuda:0'])
@chainer.testing.parameterize(*(
chainer.testing.product([
chainer.testing.from_pytest_parameterize(
'x_shape,t_shape', [
((10, 3), (10,)),
((10, 3, 1), (10,)),
((10, 3, 1, 1), (10,)),
((10, 3, 5), (10, 5)),
((10, 3, 5, 4), (10, 5, 4)),
((10, 3, 5, 4, 1), (10, 5, 4)),
((10, 3, 5, 4, 1, 1), (10, 5, 4))
]),
chainer.testing.from_pytest_parameterize(
'in_dtypes,out_dtype', _in_out_eval_dtypes),
chainer.testing.from_pytest_parameterize(
'ignore_label', [None, 0])
])
))
class TestAccuracy(EvalBase):
def forward_xp(self, inputs, xp):
x, t = inputs
t = t.astype(numpy.int64)
if xp is chainerx:
out = xp.accuracy(x, t, self.ignore_label)
else:
out = xp.accuracy(x, t, self.ignore_label)
return out,
|
mit
|
Python
|
9963642c1cc05fb6d9dfe397b9ed811d4f7e3d26
|
add 4.6.1 and 3.10.1 (#24701)
|
LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack
|
var/spack/repos/builtin/packages/py-importlib-metadata/package.py
|
var/spack/repos/builtin/packages/py-importlib-metadata/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyImportlibMetadata(PythonPackage):
"""Read metadata from Python packages."""
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
version('4.6.1', sha256='079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac')
version('3.10.1', sha256='c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1')
version('3.10.0', sha256='c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a')
version('2.0.0', sha256='77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da')
version('1.2.0', sha256='41e688146d000891f32b1669e8573c57e39e5060e7f5f647aa617cd9a9568278')
version('0.23', sha256='aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')
version('0.19', sha256='23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8')
version('0.18', sha256='cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db')
depends_on('[email protected]:', type=('build', 'run'), when='@3:')
depends_on('[email protected]:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('[email protected]:+toml', type='build', when='@3:')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-pathlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-contextlib2', when='^python@:2', type=('build', 'run'))
depends_on('[email protected]:', when='^python@:2', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'), when='@3: ^python@:3.7.999')
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyImportlibMetadata(PythonPackage):
"""Read metadata from Python packages."""
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
version('3.10.0', sha256='c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a')
version('2.0.0', sha256='77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da')
version('1.2.0', sha256='41e688146d000891f32b1669e8573c57e39e5060e7f5f647aa617cd9a9568278')
version('0.23', sha256='aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')
version('0.19', sha256='23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8')
version('0.18', sha256='cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db')
depends_on('[email protected]:', type=('build', 'run'), when='@3:')
depends_on('[email protected]:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('[email protected]:+toml', type='build', when='@3:')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-pathlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-contextlib2', when='^python@:2', type=('build', 'run'))
depends_on('[email protected]:', when='^python@:2', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'), when='@3: ^python@:3.7.999')
|
lgpl-2.1
|
Python
|
a2749190545a6765a479777b1ea97d2f9090593f
|
clean up project config a bit
|
propublica/cookcountyjail2,propublica/cookcountyjail2,propublica/cookcountyjail2
|
jailscraper/project_config.py
|
jailscraper/project_config.py
|
"""ProPublica specific configuration and utilities"""
import boto3
import botocore
import os
### Helpers
def get_secrets():
"""Get all environment variables associated with this project.
Reads environment variables that start with PROJECT_SLUG, strips out the slug
and adds them to a dictionary.
"""
secrets = {}
for k, v in os.environ.items():
if k.startswith(PROJECT_SLUG):
new_k = k[len(PROJECT_SLUG) + 1:]
secrets[new_k] = v
return secrets
SECRETS = get_secrets()
S3_BUCKET = SECRETS['S3_BUCKET']
TARGET = SECRETS['TARGET']
S3_URL = 's3://{0}/{1}'.format(SECRETS['S3_BUCKET'], SECRETS['TARGET'])
|
"""ProPublica specific configuration and utilities"""
import os
PROJECT_SLUG = 'cookcountyjail2'
INMATE_URL_TEMPLATE = 'http://www2.cookcountysheriff.org/search2/details.asp?jailnumber={0}'
"""Sets the maximum jail number to scan for by default.
If the subsequent jail number returns a 2xx status code, it will be incremented
until an error code is sent. [@TODO: Not implemented, see
https://github.com/propublica/cookcountyjail2/issues/9]
"""
MAX_DEFAULT_JAIL_NUMBER = 400
def get_secrets():
"""Get all environment variables associated with this project.
Reads environment variables that start with PROJECT_SLUG, strips out the slug
and adds them to a dictionary.
"""
secrets = {}
for k, v in os.environ.items():
if k.startswith(PROJECT_SLUG):
new_k = k[len(PROJECT_SLUG) + 1:]
secrets[new_k] = v
return secrets
SECRETS = get_secrets()
S3_BUCKET = SECRETS['S3_BUCKET']
TARGET = SECRETS['TARGET']
S3_URL = 's3://{0}/{1}'.format(SECRETS['S3_BUCKET'], SECRETS['TARGET'])
|
mit
|
Python
|
9f531eec31e141b458c4c7896bebb16611cc7b00
|
Refactor calories plugin (#503)
|
sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,appi147/Jarvis,sukeesh/Jarvis,appi147/Jarvis
|
jarviscli/plugins/calories.py
|
jarviscli/plugins/calories.py
|
from plugin import plugin
from colorama import Back, Fore, Style
@plugin("calories")
class calories:
"""
Tells the recommended daily calorie intake, also recommends
calories for weight add and loss.(Source 1)
It is based on gender, age, height and weight.
Uses the Miffin-St Jeor Equation as it is considered the
most accurate when we don't know our body fat percentage(Source 2).
Add gender(man/woman), age(15 - 80 recommended), metric height(cm),
weight(kg), workout level(1-4). No decimal weight for now.
Workout Levels:
[1] Little or no exercise
[2] Light 1-3 per week
[3] Moderate 4-5 per week
[4] Active daily exercise or physical job
#Example: health calories woman 27 164 60 3
^Sources:
1) https://en.wikipedia.org/wiki/Basal_metabolic_rate
2) https://jandonline.org/article/S0002-8223(05)00149-5/fulltext
"""
def __call__(self, jarvis, s):
jarvis.say("Welcome!")
info = input("Please enter the information about you following this order(gender age height weight level): ")
self.calories(jarvis, info)
def calories(self, jarvis, info):
strings = info.split()
if len(strings) == 5:
gender = strings[0]
age = int(strings[1])
height = int(strings[2])
weight = float(strings[3])
level = int(strings[4])
else:
jarvis.say("You wrote less or more arguments than it needed.")
return None
gender_no = 0
if(gender == 'man'):
gender_no = 5
elif(gender == 'woman'):
gender_no = -161
if gender_no != 0 and age > 14 and height > 0.0 and weight > 0.0 and level > 0 and level < 5:
brm = float(10 * weight + 6.25 * height - 5
* age + gender_no) * self.exercise_level(level)
brm_loss = brm - 500.0
brm_put_on = brm + 500.0
jarvis.say("Daily caloric intake : " + str(brm))
jarvis.say("Loss weight calories : " + str(brm_loss))
jarvis.say("Put on weight calories : " + str(brm_put_on))
else:
jarvis.say("Please add correct input!")
return None
def exercise_level(self, level):
multipliers = {1: 1.2, 2: 1.4, 3: 1.6, 4: 1.95}
multiplier = multipliers.get(level, 1)
return multiplier
|
from plugin import plugin
@plugin("calories")
def calories(jarvis, s):
"""
Tells the recommended daily calorie intake, also recommends
calories for weight add and loss.(Source 1)
It is based on gender, age, height and weight.
Uses the Miffin-St Jeor Equation as it is considered the
most accurate when we don't know our body fat percentage(Source 2).
Add gender(man/woman), age(15 - 80 recommended), metric height(cm),
weight(kg), workout level(1-4). No decimal weight for now.
Workout Levels:
[1] Little or no exercise
[2] Light 1-3 per week
[3] Moderate 4-5 per week
[4] Active daily exercise or physical job
#Example: health calories woman 27 164 60 3
^Sources:
1) https://en.wikipedia.org/wiki/Basal_metabolic_rate
2) https://jandonline.org/article/S0002-8223(05)00149-5/fulltext
"""
strings = s.split()
if len(strings) == 5:
gender = strings[0]
age = int(strings[1])
height = int(strings[2])
weight = float(strings[3])
level = int(strings[4])
else:
jarvis.say("You wrote less or more arguments than it needed.")
return None
gender_no = 0
if(gender == 'man'):
gender_no = 5
elif(gender == 'woman'):
gender_no = -161
if gender_no != 0 and age > 14 and height > 0.0 and weight > 0.0 and level > 0 and level < 5:
brm = float(10 * weight + 6.25 * height - 5
* age + gender_no) * exercise_level(level)
brm_loss = brm - 500.0
brm_put_on = brm + 500.0
jarvis.say("Daily caloric intake : " + str(brm))
jarvis.say("Loss weight calories : " + str(brm_loss))
jarvis.say("Put on weight calories : " + str(brm_put_on))
else:
jarvis.say("Please add correct input!")
return None
def exercise_level(level):
multiplier = 1
if(level == 1):
multiplier = 1.2
elif(level == 2):
multiplier = 1.4
elif(level == 3):
multiplier = 1.6
else:
multiplier = 1.95
return multiplier
|
mit
|
Python
|
dde62362955ca4b10f3c1fec4e3b7777b03141f5
|
remove ContextDict since std has ChainMap
|
Jasily/jasily-python,Cologler/py.jasily.cologler
|
jasily/collection/__init__.py
|
jasily/collection/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <[email protected]>
# ----------
#
# ----------
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <[email protected]>
# ----------
#
# ----------
from collections import KeysView, ValuesView, ItemsView, MutableMapping
_NO_VALUE = object()
class ContextDict(MutableMapping):
'''context dict can override base_dict.'''
def __init__(self, base_dict: dict, *args, **kwargs):
if base_dict is None:
raise ValueError('base_dict cannot be None')
self._base_dict = base_dict
self._data = dict(*args, **kwargs) # data maybe not empty.
def __setitem__(self, key, value):
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def __getitem__(self, key):
value = self._data.get(key, _NO_VALUE)
if value is _NO_VALUE:
value = self._base_dict[key]
return value
def __iter__(self):
for k in self._data:
yield k
for k in self._base_dict:
if k not in self._data:
yield k
def __len__(self):
# base dict may change, so we cannot cache the size.
d1 = self._data
d2 = self._base_dict
d1_len = len(d1)
d2_len = len(d2)
if d1_len > d2_len: # ensure d1 < d2
d1, d2 = d2, d1
total_size = d1_len + d2_len
for k in d1:
if k in d2:
total_size -= 1
return total_size
def scope(self):
'''create a scoped dict.'''
return ContextDict(self)
def __enter__(self):
'''return a new context dict.'''
return self.scope()
def __exit__(self, *args):
pass
|
mit
|
Python
|
bca2ea9c72669c4877d6c9be74a2c58f8341ce61
|
Update Portuguese lexical attributes
|
spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,honnibal/spaCy
|
spacy/lang/pt/lex_attrs.py
|
spacy/lang/pt/lex_attrs.py
|
# coding: utf8
from __future__ import unicode_literals
from ...attrs import LIKE_NUM
_num_words = ['zero', 'um', 'dois', 'três', 'quatro', 'cinco', 'seis', 'sete',
'oito', 'nove', 'dez', 'onze', 'doze', 'treze', 'catorze',
'quinze', 'dezasseis', 'dezassete', 'dezoito', 'dezanove', 'vinte',
'trinta', 'quarenta', 'cinquenta', 'sessenta', 'setenta',
'oitenta', 'noventa', 'cem', 'mil', 'milhão', 'bilião', 'trilião',
'quadrilião']
_ord_words = ['primeiro', 'segundo', 'terceiro', 'quarto', 'quinto', 'sexto',
'sétimo', 'oitavo', 'nono', 'décimo', 'vigésimo', 'trigésimo',
'quadragésimo', 'quinquagésimo', 'sexagésimo', 'septuagésimo',
'octogésimo', 'nonagésimo', 'centésimo', 'ducentésimo',
'trecentésimo', 'quadringentésimo', 'quingentésimo', 'sexcentésimo',
'septingentésimo', 'octingentésimo', 'nongentésimo', 'milésimo',
'milionésimo', 'bilionésimo']
def like_num(text):
text = text.replace(',', '').replace('.', '')
if text.isdigit():
return True
if text.count('/') == 1:
num, denom = text.split('/')
if num.isdigit() and denom.isdigit():
return True
if text in _num_words:
return True
return False
LEX_ATTRS = {
LIKE_NUM: like_num
}
|
# coding: utf8
from __future__ import unicode_literals
# Number words
NUM_WORDS = set("""
zero um dois três quatro cinco seis sete oito nove dez onze doze treze catorze
quinze dezasseis dezassete dezoito dezanove vinte trinta quarenta cinquenta
sessenta setenta oitenta noventa cem mil milhão bilião trilião quadrilião
""".split())
# Ordinal words
ORDINAL_WORDS = set("""
primeiro segundo terceiro quarto quinto sexto sétimo oitavo nono décimo
vigésimo trigésimo quadragésimo quinquagésimo sexagésimo septuagésimo
octogésimo nonagésimo centésimo ducentésimo trecentésimo quadringentésimo
quingentésimo sexcentésimo septingentésimo octingentésimo nongentésimo
milésimo milionésimo bilionésimo
""".split())
|
mit
|
Python
|
e635d6a1c4ca8c138a5bd288250f94bcd82bb8a8
|
Remove unnecessary imports.
|
hjanime/VisTrails,Nikea/VisTrails,hjanime/VisTrails,hjanime/VisTrails,VisTrails/VisTrails,celiafish/VisTrails,celiafish/VisTrails,minesense/VisTrails,hjanime/VisTrails,minesense/VisTrails,Nikea/VisTrails,minesense/VisTrails,celiafish/VisTrails,VisTrails/VisTrails,VisTrails/VisTrails,Nikea/VisTrails,VisTrails/VisTrails,hjanime/VisTrails,celiafish/VisTrails,VisTrails/VisTrails,minesense/VisTrails,minesense/VisTrails,Nikea/VisTrails
|
vistrails/tests/resources/upgrades/init.py
|
vistrails/tests/resources/upgrades/init.py
|
from vistrails.core.modules.vistrails_module import Module
from vistrails.core.modules.config import IPort, OPort
from vistrails.core.upgradeworkflow import UpgradeModuleRemap
class TestUpgradeA(Module):
_input_ports = [IPort("aaa", "basic:String")]
_output_ports = [OPort("zzz", "basic:Integer")]
class TestUpgradeB(Module):
_input_ports = [IPort("b", "basic:Integer")]
_modules = [TestUpgradeA, TestUpgradeB]
_upgrades = {"TestUpgradeA":
[UpgradeModuleRemap('0.8', '0.9', '0.9', None,
function_remap={'a': 'aa'},
src_port_remap={'z': 'zz'}),
UpgradeModuleRemap('0.9', '1.0', '1.0', None,
function_remap={'aa': 'aaa'},
src_port_remap={'zz': 'zzz'})]}
|
from vistrails.core.modules.vistrails_module import Module
from vistrails.core.modules.config import IPort, OPort
from vistrails.core.upgradeworkflow import UpgradeWorkflowHandler, \
UpgradePackageRemap, UpgradeModuleRemap
class TestUpgradeA(Module):
_input_ports = [IPort("aaa", "basic:String")]
_output_ports = [OPort("zzz", "basic:Integer")]
class TestUpgradeB(Module):
_input_ports = [IPort("b", "basic:Integer")]
_modules = [TestUpgradeA, TestUpgradeB]
_upgrades = {"TestUpgradeA":
[UpgradeModuleRemap('0.8', '0.9', '0.9', None,
function_remap={'a': 'aa'},
src_port_remap={'z': 'zz'}),
UpgradeModuleRemap('0.9', '1.0', '1.0', None,
function_remap={'aa': 'aaa'},
src_port_remap={'zz': 'zzz'})]}
|
bsd-3-clause
|
Python
|
a2b19e7fd6b0004e4fa18b6d1b20f7347ca1964c
|
Fix wrong indentation
|
kelwang/pan-baidu-download,banbanchs/pan-baidu-download
|
command/export.py
|
command/export.py
|
#!/usr/bin/env python2
# coding=utf-8
import json
import urllib2
import logging
import base64
from config import global_config
from bddown_core import Pan, GetFilenameError
def export(links):
for link in links:
pan = Pan(link)
count = 1
while count != 0:
link, filename, count = pan.info
if not filename and not link:
raise GetFilenameError("无法获取下载地址或文件名!")
export_single(filename, link)
def export_single(filename, link):
jsonrpc_path = global_config.jsonrpc
jsonrpc_user = global_config.jsonrpc_user
jsonrpc_pass = global_config.jsonrpc_pass
if not jsonrpc_path:
print "请设置config.ini中的jsonrpc选项"
exit(1)
jsonreq = json.dumps(
[{
"jsonrpc": "2.0",
"method": "aria2.addUri",
"id": "qwer",
"params": [
[link],
{
"out": filename,
"header": "User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"
"\r\nReferer:http://pan.baidu.com/disk/home"
}]
}]
)
logging.debug(jsonreq)
try:
request = urllib2.Request(jsonrpc_path)
if jsonrpc_user and jsonrpc_pass:
base64string = base64.encodestring('%s:%s' % (jsonrpc_user, jsonrpc_pass)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
request.add_data(jsonreq)
req = urllib2.urlopen(request)
except urllib2.URLError as urle:
print urle
raise JsonrpcError("jsonrpc无法连接,请检查jsonrpc地址是否有误!")
if req.code == 200:
print "已成功添加到jsonrpc\n"
class JsonrpcError(Exception):
pass
|
#!/usr/bin/env python2
# coding=utf-8
import json
import urllib2
import logging
import base64
from config import global_config
from bddown_core import Pan, GetFilenameError
def export(links):
for link in links:
pan = Pan(link)
count = 1
while count != 0:
link, filename, count = pan.info
if not filename and not link:
raise GetFilenameError("无法获取下载地址或文件名!")
export_single(filename, link)
def export_single(filename, link):
jsonrpc_path = global_config.jsonrpc
jsonrpc_user = global_config.jsonrpc_user
jsonrpc_pass = global_config.jsonrpc_pass
if not jsonrpc_path:
print "请设置config.ini中的jsonrpc选项"
exit(1)
jsonreq = json.dumps(
[{
"jsonrpc": "2.0",
"method": "aria2.addUri",
"id": "qwer",
"params": [
[link],
{
"out": filename,
"header": "User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"
"\r\nReferer:http://pan.baidu.com/disk/home"
}]
}]
)
logging.debug(jsonreq)
try:
request = urllib2.Request(jsonrpc_path)
if jsonrpc_user and jsonrpc_pass:
base64string = base64.encodestring('%s:%s' % (jsonrpc_user, jsonrpc_pass)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
request.add_data(jsonreq)
req = urllib2.urlopen(request)
except urllib2.URLError as urle:
print urle
raise JsonrpcError("jsonrpc无法连接,请检查jsonrpc地址是否有误!")
if req.code == 200:
print "已成功添加到jsonrpc\n"
class JsonrpcError(Exception):
pass
|
mit
|
Python
|
920872db456987e5bd5002b3bf3fc2168dcbdff4
|
fix name
|
tomi77/python-t77-django
|
django_extra_tools/conf/defaults.py
|
django_extra_tools/conf/defaults.py
|
"""Default configuration"""
# auth.backends.ThroughSuperuserModelBackend username separator
AUTH_BACKEND_USERNAME_SEPARATOR = ':'
XHR_MIDDLEWARE_ALLOWED_ORIGINS = '*'
XHR_MIDDLEWARE_ALLOWED_METHODS = ['POST', 'GET', 'OPTIONS', 'PUT', 'DELETE']
XHR_MIDDLEWARE_ALLOWED_HEADERS = ['Content-Type', 'Authorization', 'Location', '*']
XHR_MIDDLEWARE_ALLOWED_CREDENTIALS = 'true'
XHR_MIDDLEWARE_EXPOSE_HEADERS = ['Location']
PRIVATE_IPS_PREFIX = ('10.', '172.', '192.', )
|
"""Default configuration"""
# auth.backends.SuperUserAuthenticateMixin username separator
AUTH_BACKEND_USERNAME_SEPARATOR = ':'
XHR_MIDDLEWARE_ALLOWED_ORIGINS = '*'
XHR_MIDDLEWARE_ALLOWED_METHODS = ['POST', 'GET', 'OPTIONS', 'PUT', 'DELETE']
XHR_MIDDLEWARE_ALLOWED_HEADERS = ['Content-Type', 'Authorization', 'Location', '*']
XHR_MIDDLEWARE_ALLOWED_CREDENTIALS = 'true'
XHR_MIDDLEWARE_EXPOSE_HEADERS = ['Location']
PRIVATE_IPS_PREFIX = ('10.', '172.', '192.', )
|
mit
|
Python
|
a4a01c466c916f5c4ff44d40bc5e052e98951f1d
|
Bump version
|
thombashi/sqlitebiter,thombashi/sqlitebiter
|
sqlitebiter/__version__.py
|
sqlitebiter/__version__.py
|
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.29.1"
__maintainer__ = __author__
__email__ = "[email protected]"
|
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.29.0"
__maintainer__ = __author__
__email__ = "[email protected]"
|
mit
|
Python
|
05458457f12618cc69970cd2bda87e25e29384a4
|
simplify the code (Thx Stefan)
|
Midafi/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,keflavich/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,emon10005/scikit-image,warmspringwinds/scikit-image,dpshelio/scikit-image,robintw/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,keflavich/scikit-image,SamHames/scikit-image,blink1073/scikit-image,dpshelio/scikit-image,youprofit/scikit-image,jwiggins/scikit-image,vighneshbirodkar/scikit-image,chriscrosscutler/scikit-image,Britefury/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,warmspringwinds/scikit-image,newville/scikit-image,oew1v07/scikit-image,oew1v07/scikit-image,youprofit/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,bennlich/scikit-image,WarrenWeckesser/scikits-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,ClinicalGraphics/scikit-image,paalge/scikit-image,paalge/scikit-image,Britefury/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,WarrenWeckesser/scikits-image,GaZ3ll3/scikit-image,robintw/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,michaelpacer/scikit-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,bennlich/scikit-image,jwiggins/scikit-image,paalge/scikit-image,rjeli/scikit-image,rjeli/scikit-image,newville/scikit-image
|
doc/examples/plot_peak_local_max.py
|
doc/examples/plot_peak_local_max.py
|
"""
====================
Finding local maxima
====================
The ``peak_local_max`` function returns the coordinates of local peaks (maxima)
in an image. A maximum filter is used for finding local maxima. This operation
dilates the original image and merges neighboring local maxima closer than the
size of the dilation. Locations where the original image is equal to the
dilated image are returned as local maxima.
"""
from scipy import ndimage
import matplotlib.pyplot as plt
from skimage.feature import peak_local_max
from skimage import data, img_as_float
im = img_as_float(data.coins())
# image_max is the dilation of im with a 20*20 structuring element
# It is used within peak_local_max function
image_max = ndimage.maximum_filter(im, size=20, mode='constant')
# Comparison between image_max and im to find the coordinates of local maxima
coordinates = peak_local_max(im, min_distance=20)
# display results
fig, ax = plt.subplots(1, 3, figsize=(8, 3))
ax1, ax2, ax3 = ax.ravel()
ax1.imshow(im, cmap=plt.cm.gray)
ax1.axis('off')
ax1.set_title('Original')
ax2.imshow(image_max, cmap=plt.cm.gray)
ax2.axis('off')
ax2.set_title('Maximum filter')
ax3.imshow(im, cmap=plt.cm.gray)
ax3.autoscale(False)
ax3.plot(coordinates[:, 1], coordinates[:, 0], 'r.')
ax3.axis('off')
ax3.set_title('Peak local max')
fig.subplots_adjust(wspace=0.02, hspace=0.02, top=0.9,
bottom=0.02, left=0.02, right=0.98)
plt.show()
|
"""
====================
Finding local maxima
====================
The ``peak_local_max`` function returns the coordinates of local peaks (maxima)
in an image. A maximum filter is used for finding local maxima. This operation
dilates the original image and merges neighboring local maxima closer than the
size of the dilation. Locations where the original image is equal to the
dilated image are returned as local maxima.
"""
from scipy import ndimage
import matplotlib.pyplot as plt
from skimage.feature import peak_local_max
from skimage import data, img_as_float
im = img_as_float(data.coins())
# image_max is the dilation of im with a 20*20 structuring element
# It is used within peak_local_max function
image_max = ndimage.maximum_filter(im, size=20, mode='constant')
# Comparison between image_max and im to find the coordinates of local maxima
coordinates = peak_local_max(im, min_distance=20)
# display results
fig, ax = plt.subplots(1, 3, figsize=(8, 3))
ax1, ax2, ax3 = ax.ravel()
ax1.imshow(im, cmap=plt.cm.gray)
ax1.axis('off')
ax1.set_title('Original')
ax2.imshow(image_max, cmap=plt.cm.gray)
ax2.axis('off')
ax2.set_title('Maximum filter')
ax3.imshow(im, cmap=plt.cm.gray)
ax3.autoscale(False)
ax3.plot([p[1] for p in coordinates], [p[0] for p in coordinates], 'r.')
ax3.axis('off')
ax3.set_title('Peak local max')
fig.subplots_adjust(wspace=0.02, hspace=0.02, top=0.9,
bottom=0.02, left=0.02, right=0.98)
plt.show()
|
bsd-3-clause
|
Python
|
e00a82a31de820f28474cb5de47c5715dafd8d18
|
use the largest remainder method for distributing change in ratio_split()
|
adamcharnock/django-hordak,adamcharnock/django-hordak
|
hordak/utilities/money.py
|
hordak/utilities/money.py
|
from decimal import Decimal
from hordak.defaults import DECIMAL_PLACES
def ratio_split(amount, ratios):
""" Split in_value according to the ratios specified in `ratios`
This is special in that it ensures the returned values always sum to
in_value (i.e. we avoid losses or gains due to rounding errors). As a
result, this method returns a list of `Decimal` values with length equal
to that of `ratios`.
Examples:
.. code-block:: python
>>> from hordak.utilities.money import ratio_split
>>> from decimal import Decimal
>>> ratio_split(Decimal('10'), [Decimal('1'), Decimal('2')])
[Decimal('3.33'), Decimal('6.67')]
Note the returned values sum to the original input of ``10``. If we were to
do this calculation in a naive fashion then the returned values would likely
be ``3.33`` and ``6.66``, which would sum to ``9.99``, thereby loosing
``0.01``.
Args:
amount (Decimal): The amount to be split
ratios (list[Decimal]): The ratios that will determine the split
Returns: list(Decimal)
"""
precision = Decimal(10) ** Decimal(-DECIMAL_PLACES)
assert amount == amount.quantize(precision)
# Distribute the amount according to the ratios:
ratio_total = sum(ratios)
values = [amount * ratio / ratio_total for ratio in ratios]
# Now round the values to the desired number of decimal places:
rounded = [v.quantize(precision) for v in values]
# The rounded values may not add up to the exact amount.
# Use the Largest Remainder algorithm to distribute the
# difference between participants with non-zero ratios:
participants = [i for i in range(len(ratios)) if ratios[i] != Decimal(0)]
for p in sorted(participants, key=lambda i: rounded[i] - values[i]):
total = sum(rounded)
if total < amount:
rounded[p] += precision
elif total > amount:
rounded[p] -= precision
else:
break
assert sum(rounded) == amount
return rounded
|
from decimal import Decimal
def ratio_split(amount, ratios):
""" Split in_value according to the ratios specified in `ratios`
This is special in that it ensures the returned values always sum to
in_value (i.e. we avoid losses or gains due to rounding errors). As a
result, this method returns a list of `Decimal` values with length equal
to that of `ratios`.
Examples:
.. code-block:: python
>>> from hordak.utilities.money import ratio_split
>>> from decimal import Decimal
>>> ratio_split(Decimal('10'), [Decimal('1'), Decimal('2')])
[Decimal('3.33'), Decimal('6.67')]
Note the returned values sum to the original input of ``10``. If we were to
do this calculation in a naive fashion then the returned values would likely
be ``3.33`` and ``6.66``, which would sum to ``9.99``, thereby loosing
``0.01``.
Args:
amount (Decimal): The amount to be split
ratios (list[Decimal]): The ratios that will determine the split
Returns: list(Decimal)
"""
ratio_total = sum(ratios)
divided_value = amount / ratio_total
values = []
for ratio in ratios:
value = divided_value * ratio
values.append(value)
# Now round the values, keeping track of the bits we cut off
rounded = [v.quantize(Decimal("0.01")) for v in values]
remainders = [v - rounded[i] for i, v in enumerate(values)]
remainder = sum(remainders)
# Give the last person the (positive or negative) remainder
rounded[-1] = (rounded[-1] + remainder).quantize(Decimal("0.01"))
assert sum(rounded) == amount
return rounded
|
mit
|
Python
|
29d151366d186ed75da947f2861741ed87af902b
|
Add missing import to settings
|
samchrisinger/osf.io,himanshuo/osf.io,jinluyuan/osf.io,chrisseto/osf.io,zachjanicki/osf.io,njantrania/osf.io,chrisseto/osf.io,reinaH/osf.io,billyhunt/osf.io,RomanZWang/osf.io,aaxelb/osf.io,arpitar/osf.io,mattclark/osf.io,sbt9uc/osf.io,jolene-esposito/osf.io,rdhyee/osf.io,amyshi188/osf.io,kwierman/osf.io,njantrania/osf.io,RomanZWang/osf.io,pattisdr/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,doublebits/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,KAsante95/osf.io,mattclark/osf.io,kwierman/osf.io,erinspace/osf.io,GaryKriebel/osf.io,ckc6cz/osf.io,jmcarp/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,abought/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,baylee-d/osf.io,reinaH/osf.io,lyndsysimon/osf.io,kushG/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,cldershem/osf.io,lyndsysimon/osf.io,dplorimer/osf,petermalcolm/osf.io,pattisdr/osf.io,jmcarp/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,GaryKriebel/osf.io,HarryRybacki/osf.io,adlius/osf.io,zkraime/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,abought/osf.io,ckc6cz/osf.io,kushG/osf.io,brandonPurvis/osf.io,emetsger/osf.io,doublebits/osf.io,sloria/osf.io,KAsante95/osf.io,dplorimer/osf,lyndsysimon/osf.io,wearpants/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,aaxelb/osf.io,KAsante95/osf.io,bdyetton/prettychart,jmcarp/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,reinaH/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,pattisdr/osf.io,cosenal/osf.io,mluke93/osf.io,caseyrollins/osf.io,felliott/osf.io,caneruguz/osf.io,danielneis/osf.io,revanthkolli/osf.io,alexschiller/osf.io,mluo613/osf.io,RomanZWang/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,cosenal/osf.io,Ghalko/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,caseyrygt/osf.io,barbour-em/osf.io,icereval/osf.io,fabianvf/osf.io,RomanZWang/osf.io,jnayak1/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,cslzchen/osf.io,Ghalko/osf.io,lamdnhan/osf.io,kushG/osf.io,mattclark/osf.io,himanshuo/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,fabianvf/osf.io,GageGaskins/osf.io,jeffreyliu3230/osf.io,emetsger/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,kch8qx/osf.io,barbour-em/osf.io,emetsger/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,samchrisinger/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,sloria/osf.io,rdhyee/osf.io,himanshuo/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,HarryRybacki/osf.io,jinluyuan/osf.io,binoculars/osf.io,arpitar/osf.io,binoculars/osf.io,binoculars/osf.io,jinluyuan/osf.io,adlius/osf.io,bdyetton/prettychart,amyshi188/osf.io,cwisecarver/osf.io,bdyetton/prettychart,abought/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,cosenal/osf.io,wearpants/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,Nesiehr/osf.io,barbour-em/osf.io,ckc6cz/osf.io,alexschiller/osf.io,erinspace/osf.io,amyshi188/osf.io,fabianvf/osf.io,SSJohns/osf.io,cslzchen/osf.io,acshi/osf.io,felliott/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,Johnetordoff/osf.io,arpitar/osf.io,revanthkolli/osf.io,samanehsan/osf.io,leb2dg/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,mluke93/osf.io,TomBaxter/osf.io,abought/osf.io,samanehsan/osf.io,barbour-em/osf.io,baylee-d/osf.io,felliott/osf.io,TomHeatwole/osf.io,dplorimer/osf,amyshi188/osf.io,SSJohns/osf.io,mfraezz/osf.io,kushG/osf.io,asanfilippo7/osf.io,himanshuo/osf.io,petermalcolm/osf.io,laurenrevere/osf.io,doublebits/osf.io,monikagrabowska/osf.io,jeffreyliu3230/osf.io,mfraezz/osf.io,zkraime/osf.io,haoyuchen1992/osf.io,cldershem/osf.io,revanthkolli/osf.io,alexschiller/osf.io,fabianvf/osf.io,kwierman/osf.io,ckc6cz/osf.io,mfraezz/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,mluo613/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,erinspace/osf.io,lyndsysimon/osf.io,cldershem/osf.io,jinluyuan/osf.io,acshi/osf.io,mluke93/osf.io,lamdnhan/osf.io,acshi/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,zamattiac/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,njantrania/osf.io,felliott/osf.io,DanielSBrown/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,KAsante95/osf.io,kch8qx/osf.io,bdyetton/prettychart,cslzchen/osf.io,mfraezz/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,icereval/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,cwisecarver/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,mluo613/osf.io,jnayak1/osf.io,chennan47/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,lamdnhan/osf.io,icereval/osf.io,Johnetordoff/osf.io,cosenal/osf.io,reinaH/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,kch8qx/osf.io,KAsante95/osf.io,billyhunt/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,zkraime/osf.io,arpitar/osf.io,MerlinZhang/osf.io,danielneis/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,chennan47/osf.io,jolene-esposito/osf.io,wearpants/osf.io,baylee-d/osf.io,doublebits/osf.io,hmoco/osf.io,acshi/osf.io,hmoco/osf.io,caneruguz/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,hmoco/osf.io,cwisecarver/osf.io,crcresearch/osf.io,HarryRybacki/osf.io,crcresearch/osf.io,mluo613/osf.io,chrisseto/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,mluke93/osf.io,leb2dg/osf.io,caseyrollins/osf.io,adlius/osf.io,danielneis/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,njantrania/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,revanthkolli/osf.io,GaryKriebel/osf.io,GaryKriebel/osf.io,doublebits/osf.io,Ghalko/osf.io,dplorimer/osf,brianjgeiger/osf.io,zkraime/osf.io,caseyrygt/osf.io,billyhunt/osf.io,jeffreyliu3230/osf.io,leb2dg/osf.io,chrisseto/osf.io,alexschiller/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,GageGaskins/osf.io
|
website/addons/badges/settings/__init__.py
|
website/addons/badges/settings/__init__.py
|
# -*- coding: utf-8 -*-
import logging
from .defaults import * # noqa
logger = logging.getLogger(__name__)
try:
from .local import * # noqa
except ImportError as error:
logger.warn('No local.py settings file found')
|
from .defaults import * # noqa
logger = logging.getLogger(__name__)
try:
from .local import * # noqa
except ImportError as error:
logger.warn('No local.py settings file found')
|
apache-2.0
|
Python
|
6c64674447bd988eef80a4a927acde2eabe04236
|
Modify error messag
|
googkit/googkit,googkit/googkit,googkit/googkit
|
googkit/lib/plugin.py
|
googkit/lib/plugin.py
|
import os
import googkit.lib.path
from googkit.lib.error import GoogkitError
INIT_FILE = '__init__.py'
COMMAND_FILE = 'command.py'
def load(tree):
base_dir = googkit.lib.path.plugin()
for filename in os.listdir(base_dir):
plugin_dir = os.path.join(base_dir, filename)
if not os.path.isdir(plugin_dir):
continue
init_path = os.path.join(plugin_dir, INIT_FILE)
if not os.path.exists(init_path):
raise GoogkitError('{init_path} is not found.'.format(init_path=init_path))
command_path = os.path.join(plugin_dir, COMMAND_FILE)
if not os.path.exists(command_path):
continue
module_name = 'plugins.{filename}.command'.format(filename=filename)
module = __import__(module_name, fromlist=['command'])
if not hasattr(module, 'register'):
raise GoogkitError('No register method found for plugin: ' + module_name)
module.register(tree)
|
import os
import googkit.lib.path
from googkit.lib.error import GoogkitError
INIT_FILE = '__init__.py'
COMMAND_FILE = 'command.py'
def load(tree):
base_dir = googkit.lib.path.plugin()
for filename in os.listdir(base_dir):
plugin_dir = os.path.join(base_dir, filename)
if not os.path.isdir(plugin_dir):
continue
init_path = os.path.join(plugin_dir, INIT_FILE)
if not os.path.exists(init_path):
raise GoogkitError('{init_path} is not found.'.format(init_path=init_path))
command_path = os.path.join(plugin_dir, COMMAND_FILE)
if not os.path.exists(command_path):
continue
module_name = 'plugins.{filename}.command'.format(filename=filename)
module = __import__(module_name, fromlist=['command'])
if not hasattr(module, 'register'):
msg = 'Invalid plugin {module_name} do not have register method.'.format(
module_name=module_name)
raise GoogkitError(msg)
module.register(tree)
|
mit
|
Python
|
1b84cc660848fdee7ed68c17772542956f47e89d
|
Add `lower` parameter to grab.tools.russian::slugify method
|
kevinlondon/grab,alihalabyah/grab,liorvh/grab,kevinlondon/grab,giserh/grab,lorien/grab,giserh/grab,huiyi1990/grab,DDShadoww/grab,huiyi1990/grab,subeax/grab,codevlabs/grab,DDShadoww/grab,shaunstanislaus/grab,shaunstanislaus/grab,raybuhr/grab,istinspring/grab,codevlabs/grab,lorien/grab,istinspring/grab,liorvh/grab,pombredanne/grab-1,maurobaraldi/grab,subeax/grab,subeax/grab,raybuhr/grab,pombredanne/grab-1,alihalabyah/grab,maurobaraldi/grab,SpaceAppsXploration/grab,SpaceAppsXploration/grab
|
grab/tools/russian.py
|
grab/tools/russian.py
|
# coding: utf-8
from __future__ import absolute_import
from ..tools.encoding import smart_unicode
from pytils.translit import translify
import re
MONTH_NAMES = u'января февраля марта апреля мая июня июля августа '\
u'сентября октября ноября декабря'.split()
RE_NOT_ENCHAR = re.compile(u'[^-a-zA-Z0-9]', re.U)
RE_NOT_ENRUCHAR = re.compile(u'[^-a-zA-Zа-яА-ЯёЁ0-9]', re.U)
RE_RUSSIAN_CHAR = re.compile(u'[а-яА-ЯёЁ]', re.U)
RE_DASH = re.compile(r'-+')
def slugify(value, limit=None, default='', lower=True):
value = smart_unicode(value)
# Replace all non russian/english chars with "-" char
# to help pytils not to crash
value = RE_NOT_ENRUCHAR.sub('-', value)
# Do transliteration
value = translify(value)
# Replace trash with safe "-" char
value = RE_NOT_ENCHAR.sub('-', value).strip('-')
if lower:
value = value.lower()
# Replace sequences of dashes
value = RE_DASH.sub('-', value)
if limit is not None:
value = value[:limit]
if value != "":
return value
else:
return default
def get_month_number(name):
return MONTH_NAMES.index(name) + 1
|
# coding: utf-8
from __future__ import absolute_import
from ..tools.encoding import smart_unicode
from pytils.translit import translify
import re
MONTH_NAMES = u'января февраля марта апреля мая июня июля августа '\
u'сентября октября ноября декабря'.split()
RE_NOT_ENCHAR = re.compile(u'[^-a-zA-Z0-9]', re.U)
RE_NOT_ENRUCHAR = re.compile(u'[^-a-zA-Zа-яА-ЯёЁ0-9]', re.U)
RE_RUSSIAN_CHAR = re.compile(u'[а-яА-ЯёЁ]', re.U)
RE_DASH = re.compile(r'-+')
def slugify(value, limit=None, default=''):
value = smart_unicode(value)
# Replace all non russian/english chars with "-" char
# to help pytils not to crash
value = RE_NOT_ENRUCHAR.sub('-', value)
# Do transliteration
value = translify(value)
# Replace trash with safe "-" char
value = RE_NOT_ENCHAR.sub('-', value).strip('-').lower()
# Replace sequences of dashes
value = RE_DASH.sub('-', value)
if limit is not None:
value = value[:limit]
if value != "":
return value
else:
return default
def get_month_number(name):
return MONTH_NAMES.index(name) + 1
|
mit
|
Python
|
959897478bbda18f02aa6e38f2ebdd837581f1f0
|
Fix test for changed SctVerificationResult
|
theno/ctutlz,theno/ctutlz
|
tests/test_sct_verify_signature.py
|
tests/test_sct_verify_signature.py
|
from os.path import join, dirname
from utlz import flo
from ctutlz.sct.verification import verify_signature
def test_verify_signature():
basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature')
signature_input = \
open(flo('{basedir}/signature_input_valid.bin'), 'rb').read()
signature = open(flo('{basedir}/signature.der'), 'rb').read()
pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read()
assert verify_signature(signature_input, signature, pubkey) is True
signature_input = b'some invalid signature input'
assert verify_signature(signature_input, signature, pubkey) is False
|
from os.path import join, dirname
from utlz import flo
from ctutlz.sct.verification import verify_signature
def test_verify_signature():
basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature')
signature_input = \
open(flo('{basedir}/signature_input_valid.bin'), 'rb').read()
signature = open(flo('{basedir}/signature.der'), 'rb').read()
pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read()
got_verified, got_output, got_cmd_res = \
verify_signature(signature_input, signature, pubkey)
assert got_verified is True
assert got_output == 'Verified OK\n'
assert got_cmd_res.exitcode == 0
signature_input = b'some invalid signature input'
got_verified, got_output, got_cmd_res = \
verify_signature(signature_input, signature, pubkey)
assert got_verified is False
assert got_output == 'Verification Failure\n'
assert got_cmd_res.exitcode == 1
|
mit
|
Python
|
9cb554c13ae3cec85fd2a3bf0afd9ae2b6cca96a
|
Refactor target.py
|
lstephen/construi
|
construi/target.py
|
construi/target.py
|
import construi.console as console
from compose.project import Project
from compose.cli.docker_client import docker_client
import dockerpty
import sys
class Target(object):
def __init__(self, config):
self.config = config
self.project = Project.from_dicts(
'construi', config.services, docker_client())
@property
def client(self):
return self.project.client
@property
def commands(self):
return self.config.construi['run']
@property
def name(self):
return self.config.construi['name']
@property
def service(self):
return self.project.get_service(self.name)
def run(self):
try:
self.setup()
for command in self.commands:
self.run_command(command)
console.progress('Done.')
except KeyboardInterrupt:
console.warn("\nBuild Interrupted.")
sys.exit(1)
finally:
self.cleanup()
def run_command(self, command):
console.progress("> %s" % command)
container = self.service.create_container(
one_off=True,
command=command,
tty=False,
stdin_open=True,
detach=False
)
try:
dockerpty.start(self.client, container.id, interactive=False)
if container.wait() != 0:
console.error("\nBuild Failed.")
sys.exit(1)
finally:
self.client.remove_container(container.id, force=True)
def setup(self):
console.progress('Building Images...')
self.project.build()
console.progress('Pulling Images...')
self.project.pull()
def cleanup(self):
console.progress('Cleaning up...')
self.project.kill()
self.project.remove_stopped(None, v=True)
|
import construi.console as console
from compose.project import Project
from compose.cli.docker_client import docker_client
import dockerpty
import sys
class Target(object):
def __init__(self, config):
self.config = config
self.project = Project.from_dicts(
'construi', config.services, docker_client())
def run(self):
try:
self.setup()
service = self.project.get_service(self.config.construi['name'])
for cmd in self.config.construi['run']:
console.progress("> %s" % cmd)
container = service.create_container(
one_off=True,
command=cmd,
tty=False,
stdin_open=True,
detach=False
)
dockerpty.start(
self.project.client, container.id, interactive=False)
exit_code = container.wait()
self.project.client.remove_container(container.id, force=True)
if exit_code != 0:
console.error("\nBuild Failed.")
sys.exit(1)
console.progress('Done.')
except KeyboardInterrupt:
console.warn("\nBuild Interrupted.")
sys.exit(1)
finally:
self.cleanup()
def setup(self):
console.progress('Building Images...')
self.project.build()
console.progress('Pulling Images...')
self.project.pull()
def cleanup(self):
console.progress('Cleaning up...')
self.project.kill()
self.project.remove_stopped(None, v=True)
|
apache-2.0
|
Python
|
6b6d3779cd23c188c808387b9f4095ea75da3284
|
Add a way to get the resources depended on by an output
|
openstack/heat,noironetworks/heat,openstack/heat,noironetworks/heat
|
heat/engine/output.py
|
heat/engine/output.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
from heat.common import exception
from heat.engine import function
class OutputDefinition(object):
"""A definition of a stack output, independent of any template format."""
def __init__(self, name, value, description=None):
self.name = name
self._value = value
self._resolved_value = None
self._description = description
self._deps = None
def validate(self, path=''):
"""Validate the output value without resolving it."""
function.validate(self._value, path)
def required_resource_names(self):
if self._deps is None:
try:
required_resources = function.dependencies(self._value)
self._deps = set(six.moves.map(lambda rp: rp.name,
required_resources))
except (exception.InvalidTemplateAttribute,
exception.InvalidTemplateReference):
# This output ain't gonna work anyway
self._deps = set()
return self._deps
def dep_attrs(self, resource_name):
"""Iterate over attributes of a given resource that this references.
Return an iterator over dependent attributes for specified
resource_name in the output's value field.
"""
return function.dep_attrs(self._value, resource_name)
def get_value(self):
"""Resolve the value of the output."""
if self._resolved_value is None:
self._resolved_value = function.resolve(self._value)
return self._resolved_value
def description(self):
"""Return a description of the output."""
if self._description is None:
return 'No description given'
return six.text_type(self._description)
def render_hot(self):
def items():
if self._description is not None:
yield 'description', self._description
yield 'value', copy.deepcopy(self._value)
return dict(items())
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
from heat.engine import function
class OutputDefinition(object):
"""A definition of a stack output, independent of any template format."""
def __init__(self, name, value, description=None):
self.name = name
self._value = value
self._resolved_value = None
self._description = description
def validate(self, path=''):
"""Validate the output value without resolving it."""
function.validate(self._value, path)
def dep_attrs(self, resource_name):
"""Iterate over attributes of a given resource that this references.
Return an iterator over dependent attributes for specified
resource_name in the output's value field.
"""
return function.dep_attrs(self._value, resource_name)
def get_value(self):
"""Resolve the value of the output."""
if self._resolved_value is None:
self._resolved_value = function.resolve(self._value)
return self._resolved_value
def description(self):
"""Return a description of the output."""
if self._description is None:
return 'No description given'
return six.text_type(self._description)
def render_hot(self):
def items():
if self._description is not None:
yield 'description', self._description
yield 'value', copy.deepcopy(self._value)
return dict(items())
|
apache-2.0
|
Python
|
74e4d69a6ab501e11ff266d1ad77992d0203729f
|
Include os stuff
|
CodingNinja/thumbor_rackspace
|
thumbor_rackspace/loaders/cloudfiles_loader.py
|
thumbor_rackspace/loaders/cloudfiles_loader.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2013 theiconic.com.au [email protected]
from os.path import join, expanduser
import pyrax
def load(context, path, callback):
if(context.config.RACKSPACE_PYRAX_REGION):
pyrax.set_default_region(context.config.RACKSPACE_PYRAX_REGION)
pyrax.set_credential_file(expanduser(context.config.RACKSPACE_PYRAX_CFG))
cf = pyrax.connect_to_cloudfiles(public=context.config.RACKSPACE_PYRAX_PUBLIC)
cont = cf.get_container(context.config.RACKSPACE_LOADER_CONTAINER)
file_abspath = normalize_path(context)
try:
logger.debug("[LOADER] getting from %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath))
obj = cont.get_object(file_abspath)
if obj:
logger.debug("[LOADER] Found object at %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath))
else:
logger.warning("[LOADER] Unable to find object %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath ))
except:
callback(None)
callback(obj.get())
def normalize_path(context):
path = join(context.config.RACKSPACE_LOADER_CONTAINER_ROOT.rstrip('/'), contenxt.request.url.lstrip('/'))
path = path.replace('http://', '')
return path
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2013 theiconic.com.au [email protected]
import pyrax
def load(context, path, callback):
if(context.config.RACKSPACE_PYRAX_REGION):
pyrax.set_default_region(context.config.RACKSPACE_PYRAX_REGION)
pyrax.set_credential_file(expanduser(context.config.RACKSPACE_PYRAX_CFG))
cf = pyrax.connect_to_cloudfiles(public=context.config.RACKSPACE_PYRAX_PUBLIC)
cont = cf.get_container(context.config.RACKSPACE_LOADER_CONTAINER)
file_abspath = normalize_path(context)
try:
logger.debug("[LOADER] getting from %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath))
obj = cont.get_object(file_abspath)
if obj:
logger.debug("[LOADER] Found object at %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath))
else:
logger.warning("[LOADER] Unable to find object %s/%s" % (context.config.RACKSPACE_LOADER_CONTAINER, file_abspath ))
except:
callback(None)
callback(obj.get())
def normalize_path(context):
path = join(context.config.RACKSPACE_LOADER_CONTAINER_ROOT.rstrip('/'), contenxt.request.url.lstrip('/'))
path = path.replace('http://', '')
return path
|
mit
|
Python
|
d55210495fde133b8b76ee1f55e593dd43389e0e
|
Update to use new HTTP APIs.
|
wolftankk/livestreamer,mmetak/streamlink,wlerin/streamlink,jtsymon/livestreamer,Masaz-/livestreamer,streamlink/streamlink,melmorabity/streamlink,javiercantero/streamlink,Masaz-/livestreamer,chhe/livestreamer,Klaudit/livestreamer,wlerin/streamlink,caorong/livestreamer,ethanhlc/streamlink,fishscene/streamlink,caorong/livestreamer,intact/livestreamer,Dobatymo/livestreamer,Saturn/livestreamer,okaywit/livestreamer,derrod/livestreamer,lyhiving/livestreamer,Dobatymo/livestreamer,sbstp/streamlink,Saturn/livestreamer,Feverqwe/livestreamer,back-to/streamlink,chrippa/livestreamer,hmit/livestreamer,gravyboat/streamlink,wolftankk/livestreamer,intact/livestreamer,hmit/livestreamer,blxd/livestreamer,charmander/livestreamer,chrippa/livestreamer,jtsymon/livestreamer,melmorabity/streamlink,mmetak/streamlink,programming086/livestreamer,back-to/streamlink,charmander/livestreamer,lyhiving/livestreamer,bastimeyer/streamlink,flijloku/livestreamer,chhe/streamlink,chhe/livestreamer,programming086/livestreamer,Feverqwe/livestreamer,chhe/streamlink,beardypig/streamlink,Klaudit/livestreamer,gravyboat/streamlink,fishscene/streamlink,gtmanfred/livestreamer,flijloku/livestreamer,streamlink/streamlink,gtmanfred/livestreamer,okaywit/livestreamer,ethanhlc/streamlink,sbstp/streamlink,derrod/livestreamer,blxd/livestreamer,bastimeyer/streamlink,beardypig/streamlink,javiercantero/streamlink
|
src/livestreamer/plugins/ongamenet.py
|
src/livestreamer/plugins/ongamenet.py
|
from livestreamer.exceptions import NoStreamsError
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http
from livestreamer.stream import RTMPStream
import re
class Ongamenet(Plugin):
StreamURL = "http://dostream.lab.so/stream.php"
SWFURL = "http://www.ongamenet.com/front/ongame/live/CJPlayer.swf"
PageURL = "http://www.ongamenet.com"
@classmethod
def can_handle_url(self, url):
return "ongamenet.com" in url
def _get_streams(self):
res = http.get(self.StreamURL, data={"from": "ongamenet"})
match = re.search("var stream = \"(.+?)\";", res.text)
if not match:
raise NoStreamsError(self.url)
stream = match.group(1)
match = re.search("var server = \"(.+?)\";", res.text)
if not match:
raise NoStreamsError(self.url)
server = match.group(1)
streams = {}
streams["live"] = RTMPStream(self.session, {
"rtmp": server,
"playpath": stream,
"swfUrl": self.SWFURL,
"pageUrl": self.PageURL,
"live": True,
})
return streams
__plugin__ = Ongamenet
|
from livestreamer.compat import str, bytes
from livestreamer.exceptions import PluginError, NoStreamsError
from livestreamer.plugin import Plugin
from livestreamer.stream import RTMPStream
from livestreamer.utils import urlget
import re
class Ongamenet(Plugin):
StreamURL = "http://dostream.lab.so/stream.php"
SWFURL = "http://www.ongamenet.com/front/ongame/live/CJPlayer.swf"
PageURL = "http://www.ongamenet.com"
@classmethod
def can_handle_url(self, url):
return "ongamenet.com" in url
def _get_streams(self):
res = urlget(self.StreamURL, data={"from": "ongamenet"})
match = re.search("var stream = \"(.+?)\";", res.text)
if not match:
raise NoStreamsError(self.url)
stream = match.group(1)
match = re.search("var server = \"(.+?)\";", res.text)
if not match:
raise NoStreamsError(self.url)
server = match.group(1)
streams = {}
streams["live"] = RTMPStream(self.session, {
"rtmp": server,
"playpath": stream,
"swfUrl": self.SWFURL,
"pageUrl": self.PageURL,
"live": True,
})
return streams
__plugin__ = Ongamenet
|
bsd-2-clause
|
Python
|
e78d613f66df5f10b59e47b6cfce619182d1297f
|
Update run.py
|
Terradue/dcs-python-ndvi
|
src/main/app-resources/py-ndvi/run.py
|
src/main/app-resources/py-ndvi/run.py
|
#!/usr/bin/env python
import site
import os
import sys
site.addsitedir('/application/share/python/lib/python2.6/site-packages')
#print sys.path
#os.environ['PYTHONUSERBASE'] = '/application/share/python'
#print 'Base:', site.USER_BASE
#print 'Site:', site.USER_SITE
import ndvi
sys.path.append('/usr/lib/ciop/python/')
import cioppy as ciop
ciop.log('INFO', 'Calculating NDVI')
# create an output folder for the results
output.path = os.environ['TMPDIR'] + '/output'
os.makedirs(output.path)
# input comes from STDIN (standard input)
for line in sys.stdin:
ciop.log('INFO', 'input: ' + line)
res = ciop.copy(line, os.environ['TMPDIR'])
ciop.log('DEBUG', 'local path:' + res[0].rstrip('\n'))
obj = ndvi.GDALCalcNDVI()
obj.calc_ndvi(res[0].rstrip(), '/tmp/pippo.tif')
pub = ciop.publish('/tmp/pippo.tif')
metadata = [ "ical:dtstart=2001-01-10T14:00:00",
"ical:dtend=2001-01-10T14:05:00",
"dc:identifier=mydataset",
"dct:spatial=MULTIPOLYGON(((25.55215 36.97701,24.740512 37.091395,24.496927 35.950137,25.284346 35.839142,25.55215 36.97701)))",
"dclite4g:onlineResource=" + pub[0].rstrip()]
metadata = [ "ical:dtstart=2001-01-10T14:00:00",
"ical:dtend=2001-01-10T14:05:00",
"dc:identifier=mydataset",
"dct:spatial=MULTIPOLYGON(((25.55215 36.97701,24.740512 37.091395,24.496927 35.950137,25.284346 35.839142,25.55215 36.97701)))",
"dclite4g:onlineResource=http://some.host.com/myproduct.tif"]
ciop.log('DEBUG', 'Going to register')
ciop.register('http://localhost/catalogue/sandbox/rdf',
'file:///application/py-ndvi/etc/series.rdf',
metadata)
ciop.publish('/tmp/pippo.tif', metalink = True)
ciop.log('INFO', 'Done my share of the work!')
|
#!/usr/bin/env python
import site
import os
import sys
site.addsitedir('/application/share/python/lib/python2.6/site-packages')
#print sys.path
#os.environ['PYTHONUSERBASE'] = '/application/share/python'
#print 'Base:', site.USER_BASE
#print 'Site:', site.USER_SITE
import ndvi
sys.path.append('/usr/lib/ciop/python/')
import cioppy as ciop
ciop.log('INFO', 'Hello World')
#myvar = ciop.getparam('param1')
#ciop.log('DEBUG', 'value is: ' + myvar)
# input comes from STDIN (standard input)
for line in sys.stdin:
ciop.log('INFO', 'input: ' + line)
res = ciop.copy(line, os.environ['TMPDIR'])
ciop.log('DEBUG', 'local path:' + res[0].rstrip('\n'))
obj = ndvi.GDALCalcNDVI()
obj.calc_ndvi(res[0].rstrip(), '/tmp/pippo.tif')
pub = ciop.publish('/tmp/pippo.tif')
metadata = [ "ical:dtstart=2001-01-10T14:00:00",
"ical:dtend=2001-01-10T14:05:00",
"dc:identifier=mydataset",
"dct:spatial=MULTIPOLYGON(((25.55215 36.97701,24.740512 37.091395,24.496927 35.950137,25.284346 35.839142,25.55215 36.97701)))",
"dclite4g:onlineResource=" + pub[0].rstrip()]
metadata = [ "ical:dtstart=2001-01-10T14:00:00",
"ical:dtend=2001-01-10T14:05:00",
"dc:identifier=mydataset",
"dct:spatial=MULTIPOLYGON(((25.55215 36.97701,24.740512 37.091395,24.496927 35.950137,25.284346 35.839142,25.55215 36.97701)))",
"dclite4g:onlineResource=http://some.host.com/myproduct.tif"]
ciop.log('DEBUG', 'Going to register')
ciop.register('http://localhost/catalogue/sandbox/rdf',
'file:///application/py-ndvi/etc/series.rdf',
metadata)
ciop.publish('/tmp/pippo.tif', metalink = True)
ciop.log('INFO', 'Done my share of the work!')
|
apache-2.0
|
Python
|
0048794fd6e71f58bf88d84ddefb1e9a0194efca
|
Fix the mock-image used in test-steps unittests.
|
transientskp/tkp,mkuiack/tkp,mkuiack/tkp,transientskp/tkp,bartscheers/tkp,bartscheers/tkp
|
tests/test_steps/test_source_extraction.py
|
tests/test_steps/test_source_extraction.py
|
import unittest
import numpy as np
from tkp.testutil import db_subs, data
from ConfigParser import SafeConfigParser
from tkp.config import parse_to_dict
from tkp.testutil.data import default_job_config
from tkp.testutil import Mock
import tkp.steps.source_extraction
import tkp.accessors
class MockImage(Mock):
def extract(self, *args, **kwargs):
return self.__call__(*args, **kwargs)
@property
def rmsmap(self, *args, **kwargs):
return np.zeros((1))
class TestSourceExtraction(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dataset_id = db_subs.create_dataset_8images()
config = SafeConfigParser()
config.read(default_job_config)
config = parse_to_dict(config)
cls.parset = config['source_extraction']
def test_extract_sources(self):
image_path = data.fits_file
tkp.steps.source_extraction.extract_sources(image_path, self.parset)
def test_for_appropriate_arguments(self):
# sourcefinder_image_from_accessor() should get a single positional
# argument, which is the accessor, and four kwargs: back_sizex,
# back_sizey, margin and radius.
# The object it returns has an extract() method, which should have
# been called with det, anl, force_beam and deblend_nthresh kwargs.
image_path = data.fits_file
mock_method = Mock(MockImage([]))
orig_method = tkp.steps.source_extraction.sourcefinder_image_from_accessor
tkp.steps.source_extraction.sourcefinder_image_from_accessor = mock_method
tkp.steps.source_extraction.extract_sources(image_path, self.parset)
tkp.steps.source_extraction.sourcefinder_image_from_accessor = orig_method
# Arguments to sourcefinder_image_from_accessor()
self.assertIn('radius', mock_method.callvalues[0][1])
self.assertIn('margin', mock_method.callvalues[0][1])
self.assertIn('back_size_x', mock_method.callvalues[0][1])
self.assertIn('back_size_y', mock_method.callvalues[0][1])
# Arguments to extract()
self.assertIn('det', mock_method.returnvalue.callvalues[0][1])
self.assertIn('anl', mock_method.returnvalue.callvalues[0][1])
self.assertIn('force_beam', mock_method.returnvalue.callvalues[0][1])
self.assertIn('deblend_nthresh', mock_method.returnvalue.callvalues[0][1])
|
import unittest
from tkp.testutil import db_subs, data
from ConfigParser import SafeConfigParser
from tkp.config import parse_to_dict
from tkp.testutil.data import default_job_config
from tkp.testutil import Mock
import tkp.steps.source_extraction
import tkp.accessors
class MockImage(Mock):
def extract(self, *args, **kwargs):
return self.__call__(*args, **kwargs)
class TestSourceExtraction(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dataset_id = db_subs.create_dataset_8images()
config = SafeConfigParser()
config.read(default_job_config)
config = parse_to_dict(config)
cls.parset = config['source_extraction']
def test_extract_sources(self):
image_path = data.fits_file
tkp.steps.source_extraction.extract_sources(image_path, self.parset)
def test_for_appropriate_arguments(self):
# sourcefinder_image_from_accessor() should get a single positional
# argument, which is the accessor, and four kwargs: back_sizex,
# back_sizey, margin and radius.
# The object it returns has an extract() method, which should have
# been called with det, anl, force_beam and deblend_nthresh kwargs.
image_path = data.fits_file
mock_method = Mock(MockImage([]))
orig_method = tkp.steps.source_extraction.sourcefinder_image_from_accessor
tkp.steps.source_extraction.sourcefinder_image_from_accessor = mock_method
tkp.steps.source_extraction.extract_sources(image_path, self.parset)
tkp.steps.source_extraction.sourcefinder_image_from_accessor = orig_method
# Arguments to sourcefinder_image_from_accessor()
self.assertIn('radius', mock_method.callvalues[0][1])
self.assertIn('margin', mock_method.callvalues[0][1])
self.assertIn('back_size_x', mock_method.callvalues[0][1])
self.assertIn('back_size_y', mock_method.callvalues[0][1])
# Arguments to extract()
self.assertIn('det', mock_method.returnvalue.callvalues[0][1])
self.assertIn('anl', mock_method.returnvalue.callvalues[0][1])
self.assertIn('force_beam', mock_method.returnvalue.callvalues[0][1])
self.assertIn('deblend_nthresh', mock_method.returnvalue.callvalues[0][1])
|
bsd-2-clause
|
Python
|
edf1e96e56272a10ad767f13e6e8cc886f98055c
|
Test consecutive Coordinator.heartbeat calls #17
|
numberoverzero/bloop,numberoverzero/bloop
|
tests/unit/test_stream/test_coordinator.py
|
tests/unit/test_stream/test_coordinator.py
|
import functools
from bloop.stream.shard import Shard
from . import build_get_records_responses
def test_coordinator_repr(coordinator):
coordinator.stream_arn = "repr-stream-arn"
assert repr(coordinator) == "<Coordinator[repr-stream-arn]>"
def test_heartbeat(coordinator, session):
find_records_id = "id-find-records"
no_records_id = "id-no-records"
has_sequence_id = "id-has-sequence"
# When "id-finds-records" gets a response, it should only advance once and return 3 records.
records = build_get_records_responses(3, 1)[0]
def mock_get_records(iterator_id):
return {
find_records_id: records,
no_records_id: {},
has_sequence_id: {}
}[iterator_id]
session.get_stream_records.side_effect = mock_get_records
make_shard = functools.partial(Shard, stream_arn=coordinator.stream_arn, shard_id="shard-id", session=session)
coordinator.active.extend([
# Has a sequence number, should not be called during a heartbeat
make_shard(iterator_id=has_sequence_id, iterator_type="at_sequence", sequence_number="sequence-number"),
# No sequence number, should find records during a heartbeat
make_shard(iterator_id=find_records_id, iterator_type="trim_horizon"),
# No sequence number, should not find records during a heartbeat
make_shard(iterator_id=no_records_id, iterator_type="latest"),
])
coordinator.heartbeat()
assert session.get_stream_records.call_count == 2
session.get_stream_records.assert_any_call(find_records_id)
session.get_stream_records.assert_any_call(no_records_id)
assert len(coordinator.buffer) == 3
pairs = [coordinator.buffer.pop() for _ in range(len(coordinator.buffer))]
sequence_numbers = [record["meta"]["sequence_number"] for (record, _) in pairs]
assert sequence_numbers == [0, 1, 2]
def test_heartbeat_until_sequence_number(coordinator, session):
"""After heartbeat() finds records for a shard, the shard doens't check during the next heartbeat."""
shard = Shard(stream_arn=coordinator.stream_arn, shard_id="shard-id", session=session,
iterator_id="iterator-id", iterator_type="latest")
coordinator.active.append(shard)
session.get_stream_records.side_effect = build_get_records_responses(1)
# First call fetches records from DynamoDB
coordinator.heartbeat()
assert coordinator.buffer
assert shard.sequence_number is not None
session.get_stream_records.assert_called_once_with("iterator-id")
# Second call ships the shard, since it now has a sequence_number.
coordinator.heartbeat()
assert session.get_stream_records.call_count == 1
|
import functools
from bloop.stream.shard import Shard
from . import build_get_records_responses
def test_coordinator_repr(coordinator):
coordinator.stream_arn = "repr-stream-arn"
assert repr(coordinator) == "<Coordinator[repr-stream-arn]>"
def test_heartbeat_latest(coordinator, session):
find_records_id = "id-find-records"
no_records_id = "id-no-records"
has_sequence_id = "id-has-sequence"
# When "id-finds-records" gets a response, it should only advance once and return 3 records.
records = build_get_records_responses(3, 1)[0]
def mock_get_records(iterator_id):
return {
find_records_id: records,
no_records_id: {},
has_sequence_id: {}
}[iterator_id]
session.get_stream_records.side_effect = mock_get_records
make_shard = functools.partial(Shard, stream_arn=coordinator.stream_arn, shard_id="shard-id", session=session)
coordinator.active.extend([
# Has a sequence number, should not be called during a heartbeat
make_shard(iterator_id=has_sequence_id, iterator_type="at_sequence", sequence_number="sequence-number"),
# No sequence number, should find records during a heartbeat
make_shard(iterator_id=find_records_id, iterator_type="trim_horizon"),
# No sequence number, should not find records during a heartbeat
make_shard(iterator_id=no_records_id, iterator_type="latest"),
])
coordinator.heartbeat()
assert session.get_stream_records.call_count == 2
session.get_stream_records.assert_any_call(find_records_id)
session.get_stream_records.assert_any_call(no_records_id)
assert len(coordinator.buffer) == 3
pairs = [coordinator.buffer.pop() for _ in range(len(coordinator.buffer))]
sequence_numbers = [record["meta"]["sequence_number"] for (record, _) in pairs]
assert sequence_numbers == [0, 1, 2]
|
mit
|
Python
|
213d6a42d505fb7ca320873cafdc187cf65d10ed
|
add unit tests for escaping curlies
|
pypyr/pypyr-cli,pypyr/pypyr-cli
|
tests/unit/pypyr/format/string_test.py
|
tests/unit/pypyr/format/string_test.py
|
""""string.py unit tests."""
import pypyr.format.string
import pytest
def test_string_interpolate_works():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping {key1} the {key2} wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping down the valleys wild', (
"string interpolation incorrect")
def test_string_interpolate_works_with_no_swaps():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping down the valleys wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping down the valleys wild', (
"string interpolation incorrect")
def test_string_interpolate_escapes_double_curly():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping {{ down the valleys wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping { down the valleys wild', (
"string interpolation incorrect")
def test_string_interpolate_escapes_double_curly_pair():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping {{down}} the valleys wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping {down} the valleys wild', (
"string interpolation incorrect")
def test_single_curly_should_throw():
"""pycode error should raise up to caller."""
with pytest.raises(ValueError):
context = {'key1': 'value1'}
input_string = '{key1} this { is {key2} string'
pypyr.format.string.get_interpolated_string(input_string, context)
def test_tag_not_in_context_should_throw():
"""pycode error should raise up to caller."""
with pytest.raises(KeyError):
context = {'key1': 'value1'}
input_string = '{key1} this is {key2} string'
pypyr.format.string.get_interpolated_string(input_string, context)
|
""""string.py unit tests."""
import pypyr.format.string
import pytest
def test_string_interpolate_works():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping {key1} the {key2} wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping down the valleys wild', (
"string interpolation incorrect")
def test_string_interpolate_works_with_no_swaps():
context = {'key1': 'down', 'key2': 'valleys', 'key3': 'value3'}
input_string = 'Piping down the valleys wild'
output = pypyr.format.string.get_interpolated_string(input_string, context)
assert output == 'Piping down the valleys wild', (
"string interpolation incorrect")
def test_tag_not_in_context_should_throw():
"""pycode error should raise up to caller."""
with pytest.raises(KeyError):
context = {'key1': 'value1'}
input_string = '{key1} this is {key2} string'
pypyr.format.string.get_interpolated_string(input_string, context)
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.