commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
4c9e18f39908e9b1a36989b3e4097ca458d94af4
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013, Jaime Marquínez Ferrándiz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013-{now:%Y}, Jaime Marquínez Ferrándiz'.format(now=datetime.datetime.now())
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
|
Use the current year in the copyright
|
docs: Use the current year in the copyright
|
Python
|
unlicense
|
jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,jaimeMF/youtube-dl-api-server
|
eaf74f092e73dcb832d624d9f19e9eaee5fbc244
|
pyfakefs/pytest_plugin.py
|
pyfakefs/pytest_plugin.py
|
"""A pytest plugin for using pyfakefs as a fixture
When pyfakefs is installed, the "fs" fixture becomes available.
:Usage:
def my_fakefs_test(fs):
fs.create_file('/var/data/xx1.txt')
assert os.path.exists('/var/data/xx1.txt')
"""
import py
import pytest
from pyfakefs.fake_filesystem_unittest import Patcher
Patcher.SKIPMODULES.add(py) # Ignore pytest components when faking filesystem
@pytest.fixture
def fs(request):
""" Fake filesystem. """
patcher = Patcher()
patcher.setUp()
request.addfinalizer(patcher.tearDown)
return patcher.fs
|
"""A pytest plugin for using pyfakefs as a fixture
When pyfakefs is installed, the "fs" fixture becomes available.
:Usage:
def my_fakefs_test(fs):
fs.create_file('/var/data/xx1.txt')
assert os.path.exists('/var/data/xx1.txt')
"""
import linecache
import py
import pytest
from pyfakefs.fake_filesystem_unittest import Patcher
Patcher.SKIPMODULES.add(py) # Ignore pytest components when faking filesystem
Patcher.SKIPMODULES.add(linecache) # Seems to be used by pytest internally
@pytest.fixture
def fs(request):
""" Fake filesystem. """
patcher = Patcher()
patcher.setUp()
request.addfinalizer(patcher.tearDown)
return patcher.fs
|
Add linecache module to skipped modules for pytest plugin
|
Add linecache module to skipped modules for pytest plugin
- see #381
- fixes the problem under Python 3, but not under Python 2
|
Python
|
apache-2.0
|
mrbean-bremen/pyfakefs,pytest-dev/pyfakefs,mrbean-bremen/pyfakefs,jmcgeheeiv/pyfakefs
|
0770a8e77463ee70851404a37138da050aead5bb
|
pymatgen/core/__init__.py
|
pymatgen/core/__init__.py
|
"""
This package contains core modules and classes for representing structures and
operations on them.
"""
__author__ = "Shyue Ping Ong"
__date__ = "Dec 15, 2010 7:21:29 PM"
from .periodic_table import *
from .composition import *
from .structure import *
from .structure_modifier import *
from .bonds import *
from .lattice import *
from .sites import *
from .operations import *
|
"""
This package contains core modules and classes for representing structures and
operations on them.
"""
__author__ = "Shyue Ping Ong"
__date__ = "Dec 15, 2010 7:21:29 PM"
from .periodic_table import *
from .composition import *
from .structure import *
from .structure_modifier import *
from .bonds import *
from .lattice import *
from .sites import *
from .operations import *
from .units import *
|
Add units to Core import.
|
Add units to Core import.
Former-commit-id: 0f1c678c7da36ebc85827601645f6729a11e5f41 [formerly 80676409b706f3927b463afef6aa844d00aeb107]
Former-commit-id: f99f3956f55a26845ce5ce583545a0413e4f36ce
|
Python
|
mit
|
tallakahath/pymatgen,matk86/pymatgen,aykol/pymatgen,matk86/pymatgen,gpetretto/pymatgen,setten/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,gVallverdu/pymatgen,dongsenfo/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,nisse3000/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,vorwerkc/pymatgen,setten/pymatgen,aykol/pymatgen,gpetretto/pymatgen,setten/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,aykol/pymatgen,mbkumar/pymatgen,Bismarrck/pymatgen,mbkumar/pymatgen,czhengsci/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,richardtran415/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,xhqu1981/pymatgen,Bismarrck/pymatgen,ndardenne/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen,gmatteo/pymatgen,montoyjh/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,xhqu1981/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,matk86/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,fraricci/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,mbkumar/pymatgen,czhengsci/pymatgen,Bismarrck/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,setten/pymatgen,montoyjh/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,dongsenfo/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen
|
7255033298cad9a4a7c51bdceafe84c0536e78ba
|
pytopkapi/infiltration.py
|
pytopkapi/infiltration.py
|
"""Infiltration module.
"""
import numpy as np
from scipy.optimize import fsolve
def green_ampt_cum_infiltration(F, psi, dtheta, K, t):
"""The Green-Ampt cumulative infiltration equation.
"""
tmp = psi*dtheta
# np.log(x) computes ln(x)
return F - tmp*np.log(1 + F/tmp) - K*t
if __name__ == '__main__':
psi = 16.7
dtheta = 0.34
K = 0.65
t = 1
F = K*t # initial guess
print fsolve(green_ampt_cum_infiltration,
F, args=(psi, dtheta, K, t), full_output=True)
|
"""Infiltration module.
"""
import numpy as np
from scipy.optimize import fsolve
def _green_ampt_cum_eq(F, psi, dtheta, K, t):
"""The Green-Ampt cumulative infiltration equation
"""
tmp = psi*dtheta
# np.log(x) computes ln(x)
return F - tmp*np.log(1 + F/tmp) - K*t
def green_ampt_cum_infiltration(psi, dtheta, K, t):
"""Compute the Green-Ampt cumulative infiltration
Compute the potential cumulative infiltration up to time `t`,
using Green-Ampt.
Parameters
----------
psi : array_like
Soil suction head at wetting front.
dtheta : array_like
Ratio of initial effective saturation to effective porosity.
K : array_like
Saturated hydraulic conductivity.
t : array_like
Time since beginning of event
Returns
-------
soln : array_like
Cumulative infiltration up to time `t`.
Raises
------
ValueError - If no solution can be found.
"""
F = K*t # initial guess
soln, infodict, ierr, mesg = fsolve(_green_ampt_cum_eq, F,
args=(psi, dtheta, K, t),
full_output=True)
if ierr == 1:
return soln
else:
raise ValueError(mesg)
def test_basic_green_ampt():
"""Test the Green-Ampt cumulative infiltration solution"""
psi = 16.7
dtheta = 0.34
K = 0.65
t = 1
result = green_ampt_cum_infiltration(psi, dtheta, K, t)
assert np.allclose(result, [3.16641923])
|
Change the API and add a test and documentation
|
ENH: Change the API and add a test and documentation
|
Python
|
bsd-3-clause
|
scottza/PyTOPKAPI,sahg/PyTOPKAPI
|
1223c77fb3ada03d32e6c9da0a08dd43bfc5ad7b
|
docs/test.py
|
docs/test.py
|
import sys, os
if sys.version_info >= (2, 4):
import doctest
else:
raise ImportError("Python 2.4 doctest required")
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def test():
for doc in ['SQLObject.txt']:
doctest.testfile(doc, optionflags=doctest.ELLIPSIS)
if __name__ == '__main__':
test()
|
import sys, os
if sys.version_info >= (2, 4):
import doctest
else:
raise ImportError("Python 2.4 doctest required")
sys.path.insert(
0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def test():
for doc in ['SQLObject.txt']:
doctest.testfile(doc, optionflags=doctest.ELLIPSIS)
if __name__ == '__main__':
test()
|
Make sure checkout is first on sys.path
|
Make sure checkout is first on sys.path
git-svn-id: fe2f45b2405132b4a9af5caedfc153c2e6f542f4@894 95a46c32-92d2-0310-94a5-8d71aeb3d4b3
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
49897f091c159220a7aa1fac2d5e03f42236053f
|
tests/dotnetexample/conf.py
|
tests/dotnetexample/conf.py
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/Identity/src/'
autoapi_keep_files = True
|
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
autoapi_type = 'dotnet'
autoapi_dir = 'example/Identity/src/'
autoapi_keep_files = True
import os
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
DIR = os.path.join(SITE_ROOT, autoapi_dir)
if not os.path.exists(DIR):
os.system('git clone https://github.com/aspnet/Identity %s' % os.path.join(SITE_ROOT, 'example/Identity'))
|
Make sure example repo exists
|
Make sure example repo exists
|
Python
|
mit
|
rtfd/sphinx-autoapi,rtfd/sphinx-autoapi,rtfd/sphinx-autoapi,rtfd/sphinx-autoapi
|
bb3ec131261f0619a86f21f549d6b1cb47f2c9ad
|
graph/serializers.py
|
graph/serializers.py
|
from rest_framework import serializers
from measurement.models import Measurement
from threshold_value.models import ThresholdValue
from calendar import timegm
from alarm.models import Alarm
class GraphSeriesSerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField('get_time')
y = serializers.SerializerMethodField('get_value')
class Meta:
fields = ['x', 'y']
def get_time(self, obj):
return int(timegm(obj.time.utctimetuple())) * 1000 # Milliseconds since epoch, UTC
def get_value(self, obj):
return obj.value
class MeasurementGraphSeriesSerializer(GraphSeriesSerializer):
alarm = serializers.SerializerMethodField('get_alarm')
def __init__(self, *args, **kwargs):
self.alarm_dict = kwargs.pop('alarm_dict', None)
super(MeasurementGraphSeriesSerializer, self).__init__(*args, **kwargs)
if not self.alarm_dict:
self.fields.pop('alarm')
def get_alarm(self, obj):
if obj.id in self.alarm_dict:
alarm = self.alarm_dict[obj.id]
serializer = SimpleAlarmSerializer(alarm)
return serializer.data
return None
class Meta(GraphSeriesSerializer.Meta):
model = Measurement
fields = GraphSeriesSerializer.Meta.fields + ['alarm']
class ThresholdValueGraphSeriesSerializer(GraphSeriesSerializer):
class Meta(GraphSeriesSerializer.Meta):
model = ThresholdValue
class SimpleAlarmSerializer(serializers.ModelSerializer):
class Meta:
model = Alarm
fields = ('id', 'time_created', 'is_treated', 'treated_text')
|
from rest_framework import serializers
from measurement.models import Measurement
from threshold_value.models import ThresholdValue
from calendar import timegm
from alarm.models import Alarm
class GraphSeriesSerializer(serializers.ModelSerializer):
x = serializers.SerializerMethodField('get_time')
y = serializers.SerializerMethodField('get_value')
class Meta:
fields = ['x', 'y']
def get_time(self, obj):
return int(timegm(obj.time.utctimetuple())) * 1000 # Milliseconds since epoch, UTC
def get_value(self, obj):
return obj.value
class MeasurementGraphSeriesSerializer(GraphSeriesSerializer):
alarm = serializers.SerializerMethodField('get_alarm')
def __init__(self, *args, **kwargs):
self.alarm_dict = kwargs.pop('alarm_dict', None)
super(MeasurementGraphSeriesSerializer, self).__init__(*args, **kwargs)
if not self.alarm_dict:
self.fields.pop('alarm')
def get_alarm(self, obj):
if obj.id in self.alarm_dict:
alarm = self.alarm_dict[obj.id]
serializer = SimpleAlarmSerializer(alarm)
return serializer.data
return None
class Meta(GraphSeriesSerializer.Meta):
model = Measurement
fields = GraphSeriesSerializer.Meta.fields + ['alarm']
class ThresholdValueGraphSeriesSerializer(GraphSeriesSerializer):
class Meta(GraphSeriesSerializer.Meta):
model = ThresholdValue
class SimpleAlarmSerializer(serializers.ModelSerializer):
class Meta:
model = Alarm
fields = ['is_treated']
|
Simplify SimpleAlarmSerializer to improve the performance of the graph_data endpoint
|
Simplify SimpleAlarmSerializer to improve the performance of the graph_data endpoint
|
Python
|
mit
|
sigurdsa/angelika-api
|
34667b59161f0079715e9bffa11237a1a8c5500f
|
fedimg/uploader.py
|
fedimg/uploader.py
|
#!/bin/env python
# -*- coding: utf8 -*-
import koji
def upload(builds):
""" Takes a list of one or more Koji build IDs (passed to it from
consumer.py) and sends the appropriate image files off to cloud
services. """
if isinstance(builds, list):
for build in builds:
pass
else:
# TODO: Not sure if this is the proper way to handle this.
raise Exception("Build upload function must take a list.")
return # TODO: Does this need to go here?
|
#!/bin/env python
# -*- coding: utf8 -*-
import koji
from pprint import pprint
def upload(builds):
""" Takes a list of one or more Koji build IDs (passed to it from
consumer.py) and sends the appropriate image files off to cloud
services. """
if isinstance(builds, list):
# Create a Koji connection to the Fedora Koji instance
koji_server = "https://koji.fedoraproject.org/kojihub"
koji_session = koji.ClientSession(koji_server)
if len(builds) == 1:
print "\nRESULTS FOR listTaskOutput():\n"
pprint(koji_session.listTaskOutput())
print "\nRESULTS FOR getTaskResult():\n"
pprint(koji_session.getTaskResult())
elif len(builds) >= 2:
#koji.multicall = True
for build in builds:
print "\nPLACEHOLDER FOR MULTICALL BUILD SITUATION\n"
#results = koji.multiCall()
#koji.multicall = False # TODO: Is this needed?
else:
# TODO: Not sure if this is the proper way to handle this.
raise Exception("Build upload function must take a list.")
return # TODO: Does this need to go here?
|
Add some koji API code for testing getting build results/output.
|
Add some koji API code for testing getting build results/output.
|
Python
|
agpl-3.0
|
fedora-infra/fedimg,fedora-infra/fedimg
|
0f9168caa5085cb225ea04b725a2379eef8c3b8d
|
app/Voltage/voltage.py
|
app/Voltage/voltage.py
|
import smbus
import time
ADDRESS = 4
CMD_READ_ANALOG = 1
VOLT12 = 650
VOLT18 = 978
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
class Voltage:
def __init__(self):
self.bus = smbus.SMBus(1)
def get_data(self):
voltage_time = time.time();
voltage_raw = self.bus.read_word_data(ADDRESS, CMD_READ_ANALOG)
voltage = map_range(voltage_raw, VOLT12, VOLT18, 12, 18)
return [
(voltage_time, "voltage_raw", voltage_raw),
(voltage_time, "voltage", voltage)
]
|
import smbus
import time
ADDRESS = 4
CMD_READ_ANALOG = 1
VOLT12 = 650
VOLT18 = 978
def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
class Voltage:
def __init__(self):
self.bus = smbus.SMBus(1)
def get_data(self):
voltage_time = time.time()
voltage_raw = self.bus.read_word_data(ADDRESS, CMD_READ_ANALOG)
voltage = map_range(voltage_raw, VOLT12, VOLT18, 12, 18)
return [
(voltage_time, "voltage_raw", voltage_raw),
(voltage_time, "voltage", voltage)
]
|
Fix some minor formatting issues
|
Fix some minor formatting issues
|
Python
|
mit
|
gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,thelonious/g2x
|
dc82d59b739934d093ed0d704583e7edf1278fc3
|
core/management/commands/delete_old_sessions.py
|
core/management/commands/delete_old_sessions.py
|
from datetime import datetime
from django.core.management.base import BaseCommand
from django.contrib.sessions.models import Session
class Command(BaseCommand):
args = '<count count ...>'
help = "Delete old sessions"
def handle(self, *args, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions):
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
|
from datetime import datetime
from django.core.management.base import NoArgsCommand
from django.contrib.sessions.models import Session
class Command(NoArgsCommand):
help = "Delete old sessions"
def handle_noargs(self, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions)[:10000]:
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
|
Add delete old sessions command
|
Add delete old sessions command
|
Python
|
mit
|
QLGu/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,pydanny/djangopackages,pydanny/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages
|
591a40b6e1f4ac8b1d21050ccfa10779dc9dbf7c
|
analytic_code.py
|
analytic_code.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 XCG Consulting (www.xcg-consulting.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 XCG Consulting (www.xcg-consulting.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class analytic_code(osv.Model):
_name = "analytic.code"
_columns = dict(
name=fields.char("Name", size=128, translate=True, required=True),
nd_id=fields.many2one(
"analytic.dimension", "Dimensions", ondelete="restrict"),
active=fields.boolean('Active'),
nd_name=fields.related('nd_id', 'name', type="char",
string="Dimension Name", store=False),
description=fields.char('Description', size=512),
)
_defaults = {
'active': 1,
}
|
Add string to display the name of the field Dimension during the import
|
Add string to display the name of the field Dimension during the import
|
Python
|
agpl-3.0
|
xcgd/analytic_structure
|
50146d9e3e43ca4d9c50d044c52714cf4234cee1
|
tests/window/window_util.py
|
tests/window/window_util.py
|
#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
|
#!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
|
Fix window test border _again_ (more fixed).
|
Fix window test border _again_ (more fixed).
|
Python
|
bsd-3-clause
|
adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,seeminglee/pyglet64,adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,seeminglee/pyglet64,niklaskorz/pyglet,niklaskorz/pyglet,seeminglee/pyglet64,niklaskorz/pyglet,niklaskorz/pyglet
|
6782ad40a405f79f07fa1527131634f96944ffd6
|
apps/innovate/views.py
|
apps/innovate/views.py
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html')
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html')
|
import random
import jingo
from users.models import Profile
from projects.models import Project
from events.models import Event
from feeds.models import Entry
def splash(request):
"""Display splash page. With featured project, event, person, blog post."""
def get_random(cls, **kwargs):
choices = cls.objects.filter(**kwargs)
return choices and random.choice(choices) or None
return jingo.render(request, 'innovate/splash.html', {
'featured_project': get_random(Project, featured=True),
'featured_event': get_random(Event, featured=True),
'featured_user': get_random(Profile, featured=True),
'entry': get_random(Entry, link__featured=True)
})
def about(request):
"""Display the about page. Simple direct to template."""
# NOTE: can't use ``django.views.generic.simple.direct_to_template``
# because we use jinja2 templates instead of Django templates.
return jingo.render(request, 'innovate/about.html')
def handle404(request):
"""Handle 404 responses."""
return jingo.render(request, 'handlers/404.html', status=404)
def handle500(request):
"""Handle server errors."""
return jingo.render(request, 'handlers/500.html', status=500)
|
Add status codes to the 404/500 error handlers.
|
Add status codes to the 404/500 error handlers.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
e84ca65440bb6b38bf0630373766789d6915a20d
|
dataset/dataset/spiders/dataset_spider.py
|
dataset/dataset/spiders/dataset_spider.py
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
Add text selection inside 'li' element
|
Add text selection inside 'li' element
|
Python
|
mit
|
MaxLikelihood/CODE
|
8d651ed493d2787da478f0c7c120917d3335b4d5
|
email_from_template/utils.py
|
email_from_template/utils.py
|
from . import app_settings
_render_method = None
def get_render_method():
global _render_method
if _render_method is None:
_render_method = from_dotted_path(app_settings.EMAIL_RENDER_METHOD)
return _render_method
_context_processors = None
def get_context_processors():
global _context_processors
if _context_processors is None:
_context_processors = [
from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS
]
return _context_processors
def from_dotted_path(fullpath):
"""
Returns the specified attribute of a module, specified by a string.
``from_dotted_path('a.b.c.d')`` is roughly equivalent to::
from a.b.c import d
except that ``d`` is returned and not entered into the current namespace.
"""
module, attr = fullpath.rsplit('.', 1)
return getattr(__import__(module, {}, {}, (attr,)), attr)
|
from django.utils.functional import memoize
from . import app_settings
def get_render_method():
return from_dotted_path(app_settings.EMAIL_RENDER_METHOD)
get_render_method = memoize(get_render_method, {}, 0)
def get_context_processors():
return [from_dotted_path(x) for x in app_settings.EMAIL_CONTEXT_PROCESSORS]
get_context_processors = memoize(get_context_processors, {}, 0)
def from_dotted_path(fullpath):
"""
Returns the specified attribute of a module, specified by a string.
``from_dotted_path('a.b.c.d')`` is roughly equivalent to::
from a.b.c import d
except that ``d`` is returned and not entered into the current namespace.
"""
module, attr = fullpath.rsplit('.', 1)
return getattr(__import__(module, {}, {}, (attr,)), attr)
|
Use Django's memoize over a custom one.
|
Use Django's memoize over a custom one.
Signed-off-by: Chris Lamb <[email protected]>
|
Python
|
bsd-3-clause
|
playfire/django-email-from-template,lamby/django-email-from-template
|
07d7eddac89d5ac54af62e185801cdbe71720b7c
|
hybridJaccardTest.py
|
hybridJaccardTest.py
|
import argparse
import sys
import hybridJaccard as hj
def main():
"Command line testinterface."
parser = argparse.ArgumentParser()
parser.add_argument('-c','--configFile', help="Configuration file (JSON).", required=False)
parser.add_argument('-i','--input', help="Input file of phrases to test.", required=True)
parser.add_argument('-r','--referenceFile', help="Reference file.", required=False)
args = parser.parse_args()
sm = hj.HybridJaccard(ref_path=args.referenceFile, config_path=args.configFile)
with open("input.txt") as input:
for line in input:
line = line.strip()
match = sm.findBestMatchStringCached(line)
if match is None:
match = "(NONE)"
print(line+" => "+match)
# call main() if this is run as standalone
if __name__ == "__main__":
sys.exit(main())
|
import argparse
import sys
import hybridJaccard as hj
def main():
"Command line testinterface."
parser = argparse.ArgumentParser()
parser.add_argument('-c','--configFile', help="Configuration file (JSON).", required=False)
parser.add_argument('-i','--input', help="Input file of phrases to test.", required=True)
parser.add_argument('-r','--referenceFile', help="Reference file.", required=False)
args = parser.parse_args()
sm = hj.HybridJaccard(ref_path=args.referenceFile, config_path=args.configFile)
with open(args.input) as input:
for line in input:
line = line.strip()
match = sm.findBestMatchStringCached(line)
if match is None:
match = "(NONE)"
print(line+" => "+match)
# call main() if this is run as standalone
if __name__ == "__main__":
sys.exit(main())
|
Read the intput file specified on the command line.
|
Read the intput file specified on the command line.
|
Python
|
apache-2.0
|
usc-isi-i2/hybrid-jaccard
|
fcbb2ec6ebceebea0012971a831f2941d1943708
|
src/knesset/links/managers.py
|
src/knesset/links/managers.py
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
class LinksManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all links for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().filter(active=True, content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_unicode(model._get_pk_val()))
return qs
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
class LinksManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all links for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().select_related('link_type').filter(
active=True, content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_unicode(model._get_pk_val()))
return qs
|
Use select related for link_type
|
Use select related for link_type
|
Python
|
bsd-3-clause
|
habeanf/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,navotsil/Open-Knesset,OriHoch/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,noamelf/Open-Knesset,daonb/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,MeirKriheli/Open-Knesset,DanaOshri/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,daonb/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,jspan/Open-Knesset,Shrulik/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,navotsil/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset
|
79dd629be9b858fd7bc73e7d16aecbb25de0d5db
|
fireplace/cards/wog/rogue.py
|
fireplace/cards/wog/rogue.py
|
from ..utils import *
##
# Minions
##
# Spells
class OG_073:
"Thistle Tea"
play = Draw(CONTROLLER).then(Give(CONTROLLER, Copy(Draw.CARD)) * 2)
|
from ..utils import *
##
# Minions
class OG_070:
"Bladed Cultist"
combo = Buff(SELF, "OG_070e")
OG_070e = buff(+1, +1)
class OG_267:
"Southsea Squidface"
deathrattle = Buff(FRIENDLY_WEAPON, "OG_267e")
OG_267e = buff(atk=2)
##
# Spells
class OG_073:
"Thistle Tea"
play = Draw(CONTROLLER).then(Give(CONTROLLER, Copy(Draw.CARD)) * 2)
class OG_176:
"Shadow Strike"
play = Hit(TARGET, 5)
|
Implement Bladed Cultist, Southsea Squidface, and Shadow Strike
|
Implement Bladed Cultist, Southsea Squidface, and Shadow Strike
|
Python
|
agpl-3.0
|
NightKev/fireplace,jleclanche/fireplace,beheh/fireplace
|
b8ad378a796ee867acfa3198e04d47a500dd90d3
|
mla/neuralnet/activations.py
|
mla/neuralnet/activations.py
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def leakyrelu(z, a=0.01):
return np.maximum(z * a, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')
|
Add Leaky ReLU activation. Differentiation with autograd package confirmed to work correctly.
|
Add Leaky ReLU activation.
Differentiation with autograd package confirmed to work correctly.
|
Python
|
mit
|
rushter/MLAlgorithms
|
f6cad3a2bfeb4238da359c882fe7cbbaedb5d8b7
|
setuptools/extension.py
|
setuptools/extension.py
|
from distutils.core import Extension as _Extension
from dist import _get_unpatched
_Extension = _get_unpatched(_Extension)
try:
from Pyrex.Distutils.build_ext import build_ext
except ImportError:
have_pyrex = False
else:
have_pyrex = True
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
if not have_pyrex:
# convert .pyx extensions to .c
def __init__(self,*args,**kw):
_Extension.__init__(self,*args,**kw)
sources = []
for s in self.sources:
if s.endswith('.pyx'):
sources.append(s[:-3]+'c')
else:
sources.append(s)
self.sources = sources
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
import sys, distutils.core, distutils.extension
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension
|
from distutils.core import Extension as _Extension
from setuptools.dist import _get_unpatched
_Extension = _get_unpatched(_Extension)
try:
from Pyrex.Distutils.build_ext import build_ext
except ImportError:
have_pyrex = False
else:
have_pyrex = True
class Extension(_Extension):
"""Extension that uses '.c' files in place of '.pyx' files"""
if not have_pyrex:
# convert .pyx extensions to .c
def __init__(self,*args,**kw):
_Extension.__init__(self,*args,**kw)
sources = []
for s in self.sources:
if s.endswith('.pyx'):
sources.append(s[:-3]+'c')
else:
sources.append(s)
self.sources = sources
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
import sys, distutils.core, distutils.extension
distutils.core.Extension = Extension
distutils.extension.Extension = Extension
if 'distutils.command.build_ext' in sys.modules:
sys.modules['distutils.command.build_ext'].Extension = Extension
|
Fix import that was breaking py3k
|
Fix import that was breaking py3k
--HG--
branch : distribute
extra : rebase_source : 76bf8f9213536189bce76a41e798c44c5f468cbd
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
bd78b8c1bab94b5f048f8bc4895657f1fd36ddfc
|
project_generator/commands/clean.py
|
project_generator/commands/clean.py
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by pgen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", required = True, help="Clean project files for specified tool")
|
# Copyright 2014-2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from ..generate import Generator
help = 'Clean generated projects'
def run(args):
if os.path.exists(args.file):
generator = Generator(args.file)
for project in generator.generate(args.project):
project.clean(args.tool)
else:
# not project known by pgen
logging.warning("%s not found." % args.file)
return -1
return 0
def setup(subparser):
subparser.add_argument("-f", "--file", help="YAML projects file", default='projects.yaml')
subparser.add_argument("-p", "--project", required = True, help="Specify which project to be removed")
subparser.add_argument(
"-t", "--tool", help="Clean project files")
|
Clean - tool is not required, as tool_supported are there
|
Clean - tool is not required, as tool_supported are there
|
Python
|
apache-2.0
|
0xc0170/project_generator,sarahmarshy/project_generator,hwfwgrp/project_generator,molejar/project_generator,project-generator/project_generator,ohagendorf/project_generator
|
2bf883741ce763bde729f2930af913c44a807cb5
|
jiraconfig-sample.py
|
jiraconfig-sample.py
|
JIRA = {
"server": "https://example.com/jira/",
"user": "user",
"password": "password"
}
|
import keyring
JIRA = {
"server": "https://example.com/jira/",
"user": "user",
"password": keyring.get_password("system", "user")
}
|
Add keyring to example config
|
Add keyring to example config
|
Python
|
mit
|
mrts/ask-jira
|
235f8061caa667f7c9bc1f424e14326c22932547
|
Examples/Infovis/Python/cone_layout.py
|
Examples/Infovis/Python/cone_layout.py
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
reader.Update()
print reader.GetOutput()
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
from vtk import *
reader = vtkXMLTreeReader()
reader.SetFileName("vtkclasses.xml")
view = vtkGraphLayoutView()
view.AddRepresentationFromInputConnection(reader.GetOutputPort())
view.SetVertexLabelArrayName("id")
view.SetVertexLabelVisibility(True)
view.SetVertexColorArrayName("vertex id")
view.SetColorVertices(True)
view.SetLayoutStrategyToCone()
view.SetInteractionModeTo3D() # Left mouse button causes 3D rotate instead of zoom
view.SetLabelPlacementModeToLabelPlacer()
theme = vtkViewTheme.CreateMellowTheme()
theme.SetCellColor(.2,.2,.6)
theme.SetLineWidth(2)
theme.SetPointSize(10)
view.ApplyViewTheme(theme)
theme.FastDelete()
window = vtkRenderWindow()
window.SetSize(600, 600)
view.SetupRenderWindow(window)
view.GetRenderer().ResetCamera()
window.Render()
window.GetInteractor().Start()
|
Remove errant printout in python cone layout example.
|
ENH: Remove errant printout in python cone layout example.
|
Python
|
bsd-3-clause
|
daviddoria/PointGraphsPhase1,jmerkow/VTK,mspark93/VTK,hendradarwin/VTK,aashish24/VTK-old,Wuteyan/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,msmolens/VTK,ashray/VTK-EVM,msmolens/VTK,hendradarwin/VTK,sumedhasingla/VTK,ashray/VTK-EVM,johnkit/vtk-dev,sumedhasingla/VTK,SimVascular/VTK,sankhesh/VTK,candy7393/VTK,jmerkow/VTK,spthaolt/VTK,candy7393/VTK,mspark93/VTK,mspark93/VTK,spthaolt/VTK,msmolens/VTK,Wuteyan/VTK,gram526/VTK,jeffbaumes/jeffbaumes-vtk,biddisco/VTK,sumedhasingla/VTK,demarle/VTK,aashish24/VTK-old,gram526/VTK,sankhesh/VTK,candy7393/VTK,cjh1/VTK,keithroe/vtkoptix,ashray/VTK-EVM,keithroe/vtkoptix,naucoin/VTKSlicerWidgets,biddisco/VTK,cjh1/VTK,msmolens/VTK,jeffbaumes/jeffbaumes-vtk,ashray/VTK-EVM,arnaudgelas/VTK,jeffbaumes/jeffbaumes-vtk,spthaolt/VTK,spthaolt/VTK,sankhesh/VTK,candy7393/VTK,keithroe/vtkoptix,spthaolt/VTK,SimVascular/VTK,collects/VTK,sankhesh/VTK,gram526/VTK,ashray/VTK-EVM,sumedhasingla/VTK,demarle/VTK,Wuteyan/VTK,jmerkow/VTK,candy7393/VTK,msmolens/VTK,berendkleinhaneveld/VTK,spthaolt/VTK,biddisco/VTK,SimVascular/VTK,collects/VTK,daviddoria/PointGraphsPhase1,jmerkow/VTK,hendradarwin/VTK,demarle/VTK,msmolens/VTK,biddisco/VTK,SimVascular/VTK,sumedhasingla/VTK,Wuteyan/VTK,johnkit/vtk-dev,msmolens/VTK,sankhesh/VTK,cjh1/VTK,aashish24/VTK-old,naucoin/VTKSlicerWidgets,keithroe/vtkoptix,johnkit/vtk-dev,sankhesh/VTK,jeffbaumes/jeffbaumes-vtk,daviddoria/PointGraphsPhase1,demarle/VTK,berendkleinhaneveld/VTK,cjh1/VTK,demarle/VTK,msmolens/VTK,aashish24/VTK-old,keithroe/vtkoptix,daviddoria/PointGraphsPhase1,biddisco/VTK,berendkleinhaneveld/VTK,naucoin/VTKSlicerWidgets,berendkleinhaneveld/VTK,cjh1/VTK,collects/VTK,johnkit/vtk-dev,naucoin/VTKSlicerWidgets,SimVascular/VTK,SimVascular/VTK,johnkit/vtk-dev,gram526/VTK,Wuteyan/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,ashray/VTK-EVM,candy7393/VTK,jmerkow/VTK,keithroe/vtkoptix,arnaudgelas/VTK,sumedhasingla/VTK,naucoin/VTKSlicerWidgets,arnaudgelas/VTK,daviddoria/PointGraphsPhase1,johnkit/vtk-dev,gram526/VTK,gram526/VTK,jmerkow/VTK,sumedhasingla/VTK,arnaudgelas/VTK,hendradarwin/VTK,spthaolt/VTK,gram526/VTK,cjh1/VTK,biddisco/VTK,collects/VTK,berendkleinhaneveld/VTK,sankhesh/VTK,demarle/VTK,sumedhasingla/VTK,aashish24/VTK-old,aashish24/VTK-old,biddisco/VTK,mspark93/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,mspark93/VTK,candy7393/VTK,candy7393/VTK,jmerkow/VTK,collects/VTK,keithroe/vtkoptix,keithroe/vtkoptix,Wuteyan/VTK,mspark93/VTK,collects/VTK,gram526/VTK,johnkit/vtk-dev,SimVascular/VTK,arnaudgelas/VTK,ashray/VTK-EVM,mspark93/VTK,hendradarwin/VTK,Wuteyan/VTK,mspark93/VTK,berendkleinhaneveld/VTK,SimVascular/VTK,naucoin/VTKSlicerWidgets,demarle/VTK,arnaudgelas/VTK,sankhesh/VTK,demarle/VTK,ashray/VTK-EVM,daviddoria/PointGraphsPhase1
|
72d0ca4e2f4be7969498b226af4243315f2dff0c
|
tests/test_colors.py
|
tests/test_colors.py
|
"""Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(result["colors"]["color0"], "#0D191B")
if __name__ == "__main__":
unittest.main()
|
"""Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(len(result["colors"]["color0"]), 7)
if __name__ == "__main__":
unittest.main()
|
Check color length instead of value since the tests will fail on other versions of imageamgick
|
tests: Check color length instead of value since the tests will fail on other versions of imageamgick
|
Python
|
mit
|
dylanaraps/pywal,dylanaraps/pywal,dylanaraps/pywal
|
323cc3f50fa0bbd072bfe243443adf12e1b25220
|
bluebottle/projects/migrations/0019_auto_20170118_1537.py
|
bluebottle/projects/migrations/0019_auto_20170118_1537.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-18 14:37
from __future__ import unicode_literals
import binascii
import os
from django.db import migrations
def generate_key():
return binascii.hexlify(os.urandom(20)).decode()
def create_auth_token(apps, schema_editor):
Member = apps.get_model('members', 'member')
Token = apps.get_model('authtoken', 'token')
member = Member.objects.create(
email='[email protected]',
username='accounting'
)
token = Token.objects.create(
user=member,
key=generate_key()
)
class Migration(migrations.Migration):
dependencies = [
('projects', '0018_merge_20170118_1533'),
('authtoken', '0001_initial'),
]
operations = [
migrations.RunPython(create_auth_token)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-18 14:37
from __future__ import unicode_literals
import binascii
import os
from django.db import migrations
def generate_key():
return binascii.hexlify(os.urandom(20)).decode()
def create_auth_token(apps, schema_editor):
Member = apps.get_model('members', 'member')
Token = apps.get_model('authtoken', 'token')
member = Member.objects.create(
email='[email protected]',
username='accounting'
)
token = Token.objects.create(
user=member,
key=generate_key()
)
class Migration(migrations.Migration):
dependencies = [
('projects', '0018_merge_20170118_1533'),
('authtoken', '0001_initial'),
('quotes', '0005_auto_20180717_1017'),
('slides', '0006_auto_20180717_1017'),
]
operations = [
migrations.RunPython(create_auth_token)
]
|
Add dependency on different migrations
|
Add dependency on different migrations
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
23e1766731dbd08d3d6c55d9d1fe2bbf1be42614
|
sncosmo/tests/test_builtins.py
|
sncosmo/tests/test_builtins.py
|
import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
bp = sncosmo.get_bandpass('f087')
|
import pytest
import sncosmo
@pytest.mark.might_download
def test_hst_bands():
""" check that the HST and JWST bands are accessible """
for bandname in ['f606w', 'uvf606w', 'f125w', 'f127m',
'f115w']: # jwst nircam
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_jwst_miri_bands():
for bandname in ['f1130w']:
sncosmo.get_bandpass(bandname)
@pytest.mark.might_download
def test_ztf_bandpass():
bp = sncosmo.get_bandpass('ztfg')
@pytest.mark.might_download
def test_roman_bandpass():
sncosmo.get_bandpass('f062')
sncosmo.get_bandpass('f087')
sncosmo.get_bandpass('f106')
sncosmo.get_bandpass('f129')
sncosmo.get_bandpass('f158')
sncosmo.get_bandpass('f184')
sncosmo.get_bandpass('f213')
sncosmo.get_bandpass('f146')
|
Update tests to cover every Roman WFI filter
|
Update tests to cover every Roman WFI filter
|
Python
|
bsd-3-clause
|
sncosmo/sncosmo,sncosmo/sncosmo,sncosmo/sncosmo
|
d4033694f7686fe1ad48a185ae740c4d966d40d8
|
classes/dnsresolver.py
|
classes/dnsresolver.py
|
import dns
import dns.resolver
import dns.rdatatype
from typing import Union, List
class DNSResolver(dns.resolver.Resolver):
def __init__(self, filename='/etc/resolv.conf', configure=False,
nameservers: Union[str, List[str]] = None):
# Run the dns.resolver.Resolver superclass init call to configure
# the object. Then, depending on the value in configure argument,
# do something with the nameservers argument, which is unique to this
# class object instead.
super(DNSResolver, self).__init__(filename, configure)
if not configure:
if isinstance(nameservers, str):
self.nameservers = [nameservers]
elif isinstance(nameservers, list):
self.nameservers = nameservers
else:
self.nameservers = ['8.8.8.8, 8.8.4.4']
def dns_resolve(domain: str, resolver: DNSResolver = DNSResolver(configure=True)) -> list:
addrs = []
for answer in resolver.query(domain, 'A').response.answer:
for item in answer:
addrs.append(item.address)
for answer in resolver.query(domain, 'AAAA').response.answer:
for item in answer:
addrs.append(item.address)
return addrs
|
import dns
import dns.resolver
import dns.rdatatype
from typing import Union, List
class DNSResolver(dns.resolver.Resolver):
def __init__(self, filename='/etc/resolv.conf', configure=False,
nameservers: Union[str, List[str]] = None):
# Run the dns.resolver.Resolver superclass init call to configure
# the object. Then, depending on the value in configure argument,
# do something with the nameservers argument, which is unique to this
# class object instead.
super(DNSResolver, self).__init__(filename, configure)
if not configure:
if isinstance(nameservers, str):
self.nameservers = [nameservers]
elif isinstance(nameservers, list):
self.nameservers = nameservers
else:
self.nameservers = ['8.8.8.8, 8.8.4.4']
def dns_resolve(domain: str, resolver: DNSResolver = DNSResolver(configure=True)) -> list:
addrs = []
try:
for answer in resolver.query(domain, 'A').response.answer:
for item in answer:
if item.rdtype == dns.rdatatype.A:
addrs.append(item.address)
except dns.resolver.NoAnswer:
pass
try:
for answer in resolver.query(domain, 'AAAA').response.answer:
for item in answer:
if item.rdtype == dns.rdatatype.AAAA:
addrs.append(item.address)
except dns.resolver.NoAnswer:
pass
return addrs
|
Implement rdatatype-aware and NoAnswer-aware DNS handling
|
Implement rdatatype-aware and NoAnswer-aware DNS handling
This will work for CNAME entries because CNAMEs hit by A or AAAA lookups behave like `dig` does - they will trigger a second resultset for the CNAME entry in order to return the IP address.
This also is amended to handle a "NoAnswer" response - i.e. if there are no IPv4 or IPv6 addresses for a given CNAME or records lookup. The list will therefore have all the CNAME-resolved IP addresses as independent strings.
|
Python
|
apache-2.0
|
Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector
|
c81fff4ff4cccc51faf47c7ca9a63cd9eb6a2699
|
projects/tests/factories.py
|
projects/tests/factories.py
|
import factory
from django.contrib.auth.models import User
from accounts.tests.factories import UserFactory
from .. import models
class OrganizationFactory(factory.DjangoModelFactory):
"""Organization factory"""
FACTORY_FOR = models.Organization
name = factory.Sequence(lambda n: 'organization {}'.format(n))
@factory.post_generation
def users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.users.add(user)
else:
self.users = UserFactory.create_batch(10)
class ProjectFactory(factory.DjangoModelFactory):
"""Project factory"""
FACTORY_FOR = models.Project
name = factory.Sequence(lambda n: 'project{}'.format(n))
url = factory.Sequence(lambda n: 'http://test{}.com'.format(n))
organization = factory.SubFactory(OrganizationFactory)
@factory.sequence
def owner(n):
return User.objects.create_user('user{}'.format(n))
|
import factory
from django.contrib.auth.models import User
from accounts.tests.factories import UserFactory
from .. import models
class OrganizationFactory(factory.DjangoModelFactory):
"""Organization factory"""
FACTORY_FOR = models.Organization
name = factory.Sequence(lambda n: 'organization {}'.format(n))
@factory.post_generation
def users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.users.add(user)
else:
self.users = UserFactory.create_batch(10)
class ProjectFactory(factory.DjangoModelFactory):
"""Project factory"""
FACTORY_FOR = models.Project
name = factory.Sequence(lambda n: 'project{}'.format(n))
url = factory.Sequence(lambda n: 'http://test{}.com'.format(n))
organization = factory.SubFactory(OrganizationFactory)
run_here = False
@factory.sequence
def owner(n):
return User.objects.create_user('user{}'.format(n))
|
Change project factory default values
|
Change project factory default values
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
1c6fcd2e1ab02fef60e3507ba57cb9224b19d616
|
elephantblog/context_processors.py
|
elephantblog/context_processors.py
|
from django.utils import translation
from feincms.module.page.models import Page
def blog_page(request):
""" Used to get the feincms page navigation within the blog app. """
from feincms.module.page.models import Page
try:
return {'blog_page' : Page.objects.get(slug='blog', language=translation.get_language())}
except:
return {}
|
from feincms.module.page.models import Page
from feincms.translations import short_language_code
def blog_page(request):
""" Used to get the feincms page navigation within the blog app. """
from feincms.module.page.models import Page
return {'blog_page': Page.objects.get(slug='blog', language=short_language_code())}
except:
try:
return {'blog_page': Page.objects.get(slug='blog')}
except:
return {}
|
Handle page module without translations extension too
|
blog_page: Handle page module without translations extension too
|
Python
|
bsd-3-clause
|
matthiask/feincms-elephantblog,feincms/feincms-elephantblog,joshuajonah/feincms-elephantblog,matthiask/feincms-elephantblog,feincms/feincms-elephantblog,michaelkuty/feincms-elephantblog,matthiask/feincms-elephantblog,sbaechler/feincms-elephantblog,joshuajonah/feincms-elephantblog,sbaechler/feincms-elephantblog,sbaechler/feincms-elephantblog,michaelkuty/feincms-elephantblog,joshuajonah/feincms-elephantblog,michaelkuty/feincms-elephantblog
|
c9aa7b60e3e985883854e7aba38838c7a45aa6fa
|
matches/models.py
|
matches/models.py
|
from django.db import models
from wrestlers.models import WrestlingEntity
class Card(models.Model):
date = models.DateField()
def __unicode__(self):
return unicode(self.date)
class Match(models.Model):
card = models.ForeignKey(Card)
participants = models.ManyToManyField(WrestlingEntity)
winner = models.ForeignKey(WrestlingEntity, related_name="won_matches",
null=True, blank=True)
def __unicode__(self):
return " vs. ".join([p.name for p in self.participants.all()])
|
from django.contrib.auth.models import User
from django.db import models
from wrestlers.models import WrestlingEntity
class Review(models.Model):
reviewed_by = models.ForeignKey(User)
reviewed_at = models.DateTimeField()
class Meta:
abstract = True
class Card(models.Model):
date = models.DateField()
def __unicode__(self):
return unicode(self.date)
class Match(Review):
card = models.ForeignKey(Card)
participants = models.ManyToManyField(WrestlingEntity)
winner = models.ForeignKey(WrestlingEntity, related_name="won_matches",
null=True, blank=True)
def __unicode__(self):
return " vs. ".join([p.name for p in self.participants.all()])
|
Add basic Review model and use it for matches.
|
Add basic Review model and use it for matches.
|
Python
|
agpl-3.0
|
OddBloke/moore
|
74b1d24cde1e58a4829ce6ae0b7e3b52b8ced40f
|
nipy/modalities/fmri/__init__.py
|
nipy/modalities/fmri/__init__.py
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import fmri, hrf, utils
import fmristat
from nipy.testing import Tester
test = Tester().test
bench = Tester().bench
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import fmri, hrf, utils, formula
import fmristat
from nipy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Fix missing import of formula in fmri.
|
Fix missing import of formula in fmri.
|
Python
|
bsd-3-clause
|
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
|
11238c63240fa19b87fc478916bac3a4bdd86df5
|
django_project/realtime/tasks/test/test_celery_tasks.py
|
django_project/realtime/tasks/test/test_celery_tasks.py
|
# coding=utf-8
import logging
import unittest
from django import test
from timeout_decorator import timeout_decorator
from realtime.app_settings import LOGGER_NAME
from realtime.tasks import check_realtime_broker
from realtime.tasks.realtime.celery_app import app as realtime_app
from realtime.utils import celery_worker_connected
__author__ = 'Rizky Maulana Nugraha <[email protected]>'
__date__ = '12/4/15'
LOGGER = logging.getLogger(LOGGER_NAME)
# minutes test timeout
LOCAL_TIMEOUT = 10 * 60
class CeleryTaskTest(test.SimpleTestCase):
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-realtime'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = check_realtime_broker.delay()
self.assertTrue(result.get())
|
# coding=utf-8
import logging
import unittest
from django import test
from timeout_decorator import timeout_decorator
from realtime.app_settings import LOGGER_NAME
from realtime.tasks import check_realtime_broker, \
retrieve_felt_earthquake_list
from realtime.tasks.realtime.celery_app import app as realtime_app
from realtime.utils import celery_worker_connected
__author__ = 'Rizky Maulana Nugraha <[email protected]>'
__date__ = '12/4/15'
LOGGER = logging.getLogger(LOGGER_NAME)
# minutes test timeout
LOCAL_TIMEOUT = 10 * 60
class CeleryTaskTest(test.SimpleTestCase):
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-realtime'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = check_realtime_broker.delay()
self.assertTrue(result.get())
@timeout_decorator.timeout(LOCAL_TIMEOUT)
@unittest.skipUnless(
celery_worker_connected(realtime_app, 'inasafe-django'),
'Realtime Worker needs to be run')
def test_indicator(self):
"""Test broker connection."""
result = retrieve_felt_earthquake_list.delay()
self.assertTrue(result.get())
|
Add unittests for BMKG EQ List Scrapper
|
Add unittests for BMKG EQ List Scrapper
|
Python
|
bsd-2-clause
|
AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django
|
38ce0d6b0433a68787c18691407c815d4eb1fdb2
|
txscrypt/__init__.py
|
txscrypt/__init__.py
|
"""
A Twisted-friendly wrapper for scrypt.
"""
from txscrypt.wrapper import computeKey, verifyPassword
from txscrypt._version import __version__
__all__ = ["computeKey", "verifyPassword"]
|
"""
A Twisted-friendly wrapper for scrypt.
"""
from txscrypt.wrapper import checkPassword, computeKey
from txscrypt._version import __version__
__all__ = ["verifyPassword", "computeKey"]
|
Make checkPassword the only public API, remove verifyPassword
|
Make checkPassword the only public API, remove verifyPassword
|
Python
|
isc
|
lvh/txscrypt
|
f360c61cbe0a895ca3d8efe5be97f08ea7ff5682
|
packages/vic/git/__init__.py
|
packages/vic/git/__init__.py
|
from mykde import Action
class Action(Action):
name = 'git'
description = "Git with helper programs and custom settings"
packages = ['git', 'gitk', 'giggle']
def proceed(self):
# useful aliases
self.call('git config --global alias.ci "commit -a"')
self.call('git config --global alias.co checkout')
self.call('git config --global alias.st status')
self.call('git config --global alias.br branch')
# push only current branch
self.call('git config --global push.default current')
# colorize UI
self.call('git config --global color.ui true')
|
from mykde import Action
class Action(Action):
name = 'git'
description = "Git with helper programs and custom settings"
packages = ['git', 'gitk', 'giggle']
def proceed(self):
# useful aliases
self.call('git config --global alias.ci "commit -a"')
self.call('git config --global alias.co checkout')
self.call('git config --global alias.st status')
self.call('git config --global alias.br branch')
# push only current branch
self.call('git config --global push.default current')
# colorize UI
self.call('git config --global color.ui true')
# do not call pager for content less than one page
self.call('git config --global --add core.pager "less -F -X"')
|
Add one more default option for git.
|
Add one more default option for git.
|
Python
|
bsd-3-clause
|
warvariuc/mykde,warvariuc/mykde
|
b2f1f97000c8d3479e1df6778f0cc85ec0680571
|
garden-watering01/mybuddy.py
|
garden-watering01/mybuddy.py
|
import machine
def setntptime(maxretries=10):
# ntptime is a helper module which gets packaged into the firmware
# Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py
import ntptime
for i in range (maxretries):
try:
ntptime.settime()
break
except:
if i+1 == maxretries:
raise
def deepsleep(sleeptime=15*60*1000):
# configure RTC.ALARM0 to be able to wake the device
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
# set RTC.ALARM0 to fire after some time. Time is given in milliseconds here
rtc.alarm(rtc.ALARM0, sleeptime)
#Make sure you have GPIO16 connected RST to wake from deepSleep.
# put the device to sleep
print ("Going into Sleep now")
machine.deepsleep()
|
import machine
def have_internet():
import urequests
try:
resp = urequests.request("HEAD", "http://jsonip.com/")
return True
except:
return False
def setntptime(maxretries=10):
# ntptime is a helper module which gets packaged into the firmware
# Check https://raw.githubusercontent.com/micropython/micropython/master/esp8266/scripts/ntptime.py
import ntptime
for i in range (maxretries):
try:
ntptime.settime()
break
except:
if i+1 == maxretries:
raise
def deepsleep(sleeptime=15*60*1000):
# configure RTC.ALARM0 to be able to wake the device
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
# set RTC.ALARM0 to fire after some time. Time is given in milliseconds here
rtc.alarm(rtc.ALARM0, sleeptime)
#Make sure you have GPIO16 connected RST to wake from deepSleep.
# put the device to sleep
print ("Going into Sleep now")
machine.deepsleep()
|
Add a function to check status of internet connectivity
|
Add a function to check status of internet connectivity
|
Python
|
mit
|
fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout
|
66d159fb1800b5e55c057316ac53fb62f1eb6b6e
|
metakernel/utils/kernel.py
|
metakernel/utils/kernel.py
|
import pkgutil
import os
def install_kernel_resources(destination):
"""
Copy the resource files to the kernelspec folder.
"""
for filename in ["logo-64x64.png", "logo-32x32.png"]:
data = pkgutil.get_data("metakernel", filename)
with open(os.path.join(destination, filename), "w") as fp:
fp.write(data)
|
import pkgutil
import os
def install_kernel_resources(destination):
"""
Copy the resource files to the kernelspec folder.
"""
for filename in ["logo-64x64.png", "logo-32x32.png"]:
data = pkgutil.get_data("metakernel", filename)
with open(os.path.join(destination, filename), "wb") as fp:
fp.write(data)
|
Write bytes; make Travis happy
|
Write bytes; make Travis happy
|
Python
|
bsd-3-clause
|
Calysto/metakernel
|
8db3ee0d6b73b864a91cd3617342138f05175d9d
|
accounts/models.py
|
accounts/models.py
|
# coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
"""
A user account. Used to store any information related to users.
"""
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
|
# coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
"""
A user account. Used to store any information related to users.
"""
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
def __unicode__(self):
return u'{}'.format(self.user.username)
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
|
Add __unicode__ method to UserAccount model
|
Add __unicode__ method to UserAccount model
|
Python
|
agpl-3.0
|
coders4help/volunteer_planner,alper/volunteer_planner,klinger/volunteer_planner,pitpalme/volunteer_planner,volunteer-planner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,flindenberg/volunteer_planner,klinger/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,coders4help/volunteer_planner,flindenberg/volunteer_planner,volunteer-planner/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,volunteer-planner/volunteer_planner,klinger/volunteer_planner,flindenberg/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner
|
ec1a25c541770a82953c743f13d525a447f3bd2d
|
syntacticframes_project/syntacticframes/management/commands/update_members_and_translations.py
|
syntacticframes_project/syntacticframes/management/commands/update_members_and_translations.py
|
"""
Updates members and translations for all classes
When LVF and LADL mappings change, everything under this change could change.
When a frameset is hidden or shown, everything in that class could change.
When the algorithm changes, everything in VerbeNet could change.
This command ensures that after an algorithmic change, everything is
consistent.
"""
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
|
"""
Updates members and translations for all classes
When LVF and LADL mappings change, everything under this change could change.
When a frameset is hidden or shown, everything in that class could change.
When the algorithm changes, everything in VerbeNet could change.
This command ensures that after an algorithmic change, everything is
consistent.
"""
from time import gmtime, strftime
from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
class Command(BaseCommand):
def handle(self, *args, **options):
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Start full update of verbs (members and translations)".format(when))
for vn_class in VerbNetClass.objects.all():
print(vn_class.name)
vn_class.update_members_and_translations()
when = strftime("%d/%m/%Y %H:%M:%S", gmtime())
verb_logger.info("{}: Ended full update of verbs (members and translations)".format(when))
|
Include time of update start/end
|
Include time of update start/end
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
743f4affcd89aa3d9fd37774e2e5f8e05525cb04
|
api/sync_wallet.py
|
api/sync_wallet.py
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK' }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
|
Clean up return value for API
|
Clean up return value for API
|
Python
|
agpl-3.0
|
ripper234/omniwallet,maran/omniwallet,maran/omniwallet,Nevtep/omniwallet,FuzzyBearBTC/omniwallet,FuzzyBearBTC/omniwallet,achamely/omniwallet,curtislacy/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,ripper234/omniwallet,habibmasuro/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,curtislacy/omniwallet,OmniLayer/omniwallet,dexX7/omniwallet,arowser/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,arowser/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,maran/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,achamely/omniwallet,arowser/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,dexX7/omniwallet,curtislacy/omniwallet
|
d81fe16eda36d3a5fa23d163de27bd46f84c4815
|
app.py
|
app.py
|
from flask import Flask, render_template
import os
app = Flask(__name__)
@app.route('/')
def webprint():
return(render_template('index.html'))
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
from flask import Flask, render_template
import os
app = Flask(__name__)
@app.route('/')
def webprint():
return 'Hello world!'
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Return text message on /
|
Return text message on /
|
Python
|
mit
|
fablabjoinville/groselha,fablabjoinville/groselha,fablabjoinville/groselha,fablabjoinville/groselha
|
cfcee83354f4917e719c3ef4236a2644dc98e153
|
ophyd/__init__.py
|
ophyd/__init__.py
|
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from . import *
# Signals
from .signal import (Signal, EpicsSignal, EpicsSignalRO)
# Positioners
from .positioner import Positioner
from .epics_motor import EpicsMotor
from .pv_positioner import (PVPositioner, PVPositionerPC)
from .pseudopos import (PseudoPositioner, PseudoSingle)
# Devices
from .scaler import EpicsScaler
from .device import (Device, Component, DynamicDeviceComponent)
from .mca import EpicsMCA, EpicsDXP
# Areadetector-related
from .areadetector import *
from ._version import get_versions
from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos,
log_pos_diff, log_pos_mov)
__version__ = get_versions()['version']
del get_versions
|
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
from . import *
# Signals
from .signal import (Signal, EpicsSignal, EpicsSignalRO)
# Positioners
from .positioner import Positioner
from .epics_motor import EpicsMotor
from .pv_positioner import (PVPositioner, PVPositionerPC)
from .pseudopos import (PseudoPositioner, PseudoSingle)
# Devices
from .scaler import EpicsScaler
from .device import (Device, Component, DynamicDeviceComponent)
from .ophydobj import StatusBase
from .mca import EpicsMCA, EpicsDXP
# Areadetector-related
from .areadetector import *
from ._version import get_versions
from .commands import (mov, movr, set_pos, wh_pos, set_lm, log_pos,
log_pos_diff, log_pos_mov)
__version__ = get_versions()['version']
del get_versions
|
Add StatusBase to top-level API.
|
MNT: Add StatusBase to top-level API.
|
Python
|
bsd-3-clause
|
dchabot/ophyd,dchabot/ophyd
|
17c90fd954441c2623495e50a2f89790e1ff5489
|
projects/tests/test_tools.py
|
projects/tests/test_tools.py
|
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.assertIsNone(self.mixin.check_can_access(
MagicMock(user=self.user),
))
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
with self.assertRaises(PermissionDenied):
self.mixin.check_can_access(MagicMock(user=self.user))
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
import sure
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.mixin.check_can_access(
MagicMock(user=self.user),
).should.be.none
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
self.mixin.check_can_access.when\
.called_with(MagicMock(user=self.user))\
.should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
Use sure in project tools cases
|
Use sure in project tools cases
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
db08c5ae962c2e66c8ad2e668f530d08934200af
|
geometry.py
|
geometry.py
|
from geom2d import *
l1 = []
for i in range(-5, 6):
l1.append(Point(i, i*i))
l2 = []
for el in l1:
l2.append(Point(el.x, -el.y))
print(l1)
print(l2)
# List comprehension
l1c = [Point(i, i*i) for i in range(-5, 6)]
l2c = [Point(el.x, -el.y) for el in l1c]
print("List comprehension")
print(l1c)
print(l2c)
|
from geom2d import *
l1 = list(map(lambda i: Point(i, i*i), range(-5, 6)))
# l2 = list(map(lambda p: Point(p.x, -p.y), l1))
# l2 = list(filter(lambda p: p.x > 0, l1))
l2 = list(filter(lambda p: p.x % 2 == 0, l1))
print(l1)
print(l2)
|
Work with lists in functional way (map, filter)
|
Work with lists in functional way (map, filter)
|
Python
|
apache-2.0
|
maciekp85/python-for-testers
|
98870a8dc7fe51936fcdcdceef3484fa28947eaf
|
get_data.py
|
get_data.py
|
#!/usr/bin/env python
from rajab_roza import RajabRoza
lat = 51.0 + 32.0/60.0
lng = -22.0/60.0
start_year = 1400
end_year = 1500
filename = "london-durations.yml"
if __name__ == '__main__':
rajab_roza = RajabRoza(lat, lng, start_year, end_year)
rajab_roza.get_roza_durations()
rajab_roza.save_to_yaml(filename)
|
#!/usr/bin/env python
from rajab_roza import RajabRoza
lat = 51.0 + 32.0/60.0
lng = -22.0/60.0
start_year = 1400
end_year = 1500
filename = "data/london-durations.yml"
if __name__ == '__main__':
rajab_roza = RajabRoza(lat, lng, start_year, end_year)
rajab_roza.get_roza_durations()
rajab_roza.save_to_yaml(filename)
|
Correct output filename for data.
|
Correct output filename for data.
|
Python
|
mit
|
mygulamali/rajab_roza
|
0636d474764c1dd6f795ebf5c4f73e2a101ae023
|
correlations/server.py
|
correlations/server.py
|
#!/usr/bin/python3
from flask import Flask, jsonify, request
from funds_correlations import correlations, parse_performances_from_dict
import traceback
app = Flask(__name__)
@app.route("/correlations", methods=['POST'])
def correlation_api():
req_json = request.get_json()
perf_list = parse_performances_from_dict(req_json)
if len(perf_list) < 2:
return jsonify({
'error': 'not enough valid data'
}), 400
try:
corr, min_size, limiting = correlations(perf_list)
except Exception:
traceback.print_exc()
return jsonify({
'error': 'Internal error'
}), 500
data = {
'correlations': corr,
'min_size': min_size,
'limiting': limiting
}
return jsonify(data)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
#!/usr/bin/python3
from flask import Flask, jsonify, request
from funds_correlations import correlations, parse_performances_from_dict
import traceback
app = Flask(__name__)
@app.route("/correlations", methods=['POST'])
def correlation_api():
try:
req_json = request.get_json()
valid_input = True
perf_list = []
if req_json:
perf_list = parse_performances_from_dict(req_json)
if len(perf_list) < 2:
valid_input = False
else:
valid_input = False
if not valid_input:
return jsonify({
'error': 'not enough valid data'
}), 400
corr, min_size, limiting = correlations(perf_list)
data = {
'correlations': corr,
'min_size': min_size,
'limiting': limiting
}
return jsonify(data)
except Exception:
traceback.print_exc()
return jsonify({
'error': 'Internal error'
}), 500
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
Improve API robustness Case where no JSON is sent
|
Improve API robustness
Case where no JSON is sent
|
Python
|
apache-2.0
|
egenerat/portfolio,egenerat/portfolio,egenerat/portfolio
|
2812f11bdc86495dd9ef62b4b45d90335bcbda7d
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
try:
from django import setup
except ImportError:
def setup():
pass
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests():
setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
try:
from django import setup
except ImportError:
def setup():
pass
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests():
setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Set middleware classes to suppress warning on 1.7+
|
Set middleware classes to suppress warning on 1.7+
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable
|
aa89bed3502e4a94ab41005dd9265bfee58fd784
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
from django.core.management import call_command
if __name__ == '__main__':
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
if hasattr(django, 'setup'):
django.setup()
call_command('test', nomigrations=True)
|
#!/usr/bin/env python
import os
from django.core.management import execute_from_command_line
if __name__ == '__main__':
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
execute_from_command_line(['manage.py', 'test', '--nomigrations'])
|
Use a higher level command line api
|
Use a higher level command line api
|
Python
|
mit
|
henriquebastos/django-test-without-migrations,henriquebastos/django-test-without-migrations
|
e98201ae47f3af4fe8756c69464986dc524206e5
|
corehq/apps/hqwebapp/management/commands/list_waf_allow_patterns.py
|
corehq/apps/hqwebapp/management/commands/list_waf_allow_patterns.py
|
import re
from django.core.management import BaseCommand
from django.urls import get_resolver
from corehq.apps.hqwebapp.decorators import waf_allow
class Command(BaseCommand):
def handle(self, *args, **options):
resolver = get_resolver()
for kind, views in waf_allow.views.items():
print(kind)
print('--------')
patterns = []
for view in views:
if isinstance(view, str):
# waf_allow(kind, hard_code_pattern=r'^/url/pattern/$')
patterns.append(view)
else:
# @waf_allow(kind)
for urlmatch in resolver.reverse_dict.getlist(view):
patterns.append(resolver.regex.pattern + urlmatch[1])
patterns = sorted(_remove_regex_groups(pattern) for pattern in patterns)
for pattern in patterns:
print(pattern)
def _remove_regex_groups(regex_string):
return re.sub(r'\?P<[^>]+>', '', regex_string)
|
import re
from django.core.management import BaseCommand
from django.urls import get_resolver
from corehq.apps.hqwebapp.decorators import waf_allow
class Command(BaseCommand):
def handle(self, *args, **options):
resolver = get_resolver()
for kind, views in waf_allow.views.items():
print(kind)
print('--------')
patterns = []
for view in views:
if isinstance(view, str):
# waf_allow(kind, hard_code_pattern=r'^/url/pattern/$')
patterns.append(view)
else:
# @waf_allow(kind)
for urlmatch in resolver.reverse_dict.getlist(view):
patterns.append(str(resolver.pattern) + urlmatch[1])
patterns = sorted(_remove_regex_groups(pattern) for pattern in patterns)
for pattern in patterns:
print(pattern)
def _remove_regex_groups(regex_string):
return re.sub(r'\?P<[^>]+>', '', regex_string)
|
Fix issue: 'URLResolver' object has no attribute 'regex'
|
Fix issue: 'URLResolver' object has no attribute 'regex'
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
04d85784eeeb619e0e273aa0ffb41f12ffeada43
|
ureport/polls/migrations/0051_auto_20180316_0912.py
|
ureport/polls/migrations/0051_auto_20180316_0912.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-16 09:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0050_auto_20170615_1455'),
]
def populate_default_backend(apps, schema_editor):
PollResult = apps.get_model("polls", "PollResult")
PollResult.objects.all().update(backend='rapidpro')
operations = [
migrations.AddField(
model_name='poll',
name='backend',
field=models.CharField(default='rapidpro', max_length=16),
),
migrations.AddField(
model_name='pollresult',
name='backend',
field=models.CharField(null=True, max_length=16),
),
migrations.RunPython(populate_default_backend),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-16 09:12
from __future__ import unicode_literals
from django.db import migrations, models
from ureport.utils import chunk_list
class Migration(migrations.Migration):
dependencies = [
('polls', '0050_auto_20170615_1455'),
]
def populate_default_backend(apps, schema_editor):
PollResult = apps.get_model("polls", "PollResult")
result_ids = PollResult.objects.all().values_list('id', flat=True)
start = time.time()
i = 0
for res_id_batch in chunk_list(result_ids, 1000):
PollResult.objects.filter(id__in=res_id_batch).update(backend='rapidpro')
i += len(res_id_batch)
print "Processed poll results update %d in %ds" % (i, time.time() - start)
operations = [
migrations.AddField(
model_name='poll',
name='backend',
field=models.CharField(default='rapidpro', max_length=16),
),
migrations.AddField(
model_name='pollresult',
name='backend',
field=models.CharField(null=True, max_length=16),
),
migrations.RunPython(populate_default_backend),
]
|
Update pull results default value in batches
|
Update pull results default value in batches
|
Python
|
agpl-3.0
|
rapidpro/ureport,Ilhasoft/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport
|
606feda80b4631f9079021214c7b6078beb9a3f4
|
api/v2/views/maintenance_record.py
|
api/v2/views/maintenance_record.py
|
import django_filters
from rest_framework import filters
from rest_framework.serializers import ValidationError
from core.models import AtmosphereUser, MaintenanceRecord
from core.query import only_current
from api.permissions import CanEditOrReadOnly
from api.v2.serializers.details import MaintenanceRecordSerializer
from api.v2.views.base import AuthOptionalViewSet
class MaintenanceRecordFilterBackend(filters.BaseFilterBackend):
"""
Filter MaintenanceRecords using the request_user and 'query_params'
"""
def filter_queryset(self, request, queryset, view):
request_params = request.query_params
active = request_params.get('active')
if isinstance(active, basestring) and active.lower() == 'true'\
or isinstance(active, bool) and active:
queryset = MaintenanceRecord.active()
return queryset
class MaintenanceRecordViewSet(AuthOptionalViewSet):
"""
API endpoint that allows records to be viewed or edited.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'head', 'options', 'trace']
queryset = MaintenanceRecord.objects.order_by('-start_date')
permission_classes = (CanEditOrReadOnly,)
serializer_class = MaintenanceRecordSerializer
filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
|
import django_filters
from rest_framework import filters
from rest_framework.serializers import ValidationError
from core.models import AtmosphereUser, MaintenanceRecord
from core.query import only_current
from api.permissions import CanEditOrReadOnly
from api.v2.serializers.details import MaintenanceRecordSerializer
from api.v2.views.base import AuthOptionalViewSet
class MaintenanceRecordFilterBackend(filters.BaseFilterBackend):
"""
Filter MaintenanceRecords using the request_user and 'query_params'
"""
def filter_queryset(self, request, queryset, view):
request_params = request.query_params
active = request_params.get('active')
if isinstance(active, basestring) and active.lower() == 'true'\
or isinstance(active, bool) and active:
queryset = MaintenanceRecord.active()
return queryset
class MaintenanceRecordViewSet(AuthOptionalViewSet):
"""
API endpoint that allows records to be viewed or edited.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace']
queryset = MaintenanceRecord.objects.order_by('-start_date')
permission_classes = (CanEditOrReadOnly,)
serializer_class = MaintenanceRecordSerializer
filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
|
Add 'DELETE' operation to Maintenance Record
|
[ATMO-1201] Add 'DELETE' operation to Maintenance Record
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
65abd52d1bfd54097ca6bd01b1924e6ffcad8840
|
pytablewriter/_csv_writer.py
|
pytablewriter/_csv_writer.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import dataproperty
from ._text_writer import TextTableWriter
class CsvTableWriter(TextTableWriter):
"""
Concrete class of a table writer for CSV format.
:Examples:
:ref:`example-csv-table-writer`
"""
@property
def support_split_write(self):
return True
def __init__(self):
super(CsvTableWriter, self).__init__()
self.indent_string = u""
self.column_delimiter = u","
self.is_padding = False
self.is_write_header_separator_row = False
def _verify_header(self):
pass
def _write_header(self):
if dataproperty.is_empty_list_or_tuple(self.header_list):
return
super(CsvTableWriter, self)._write_header()
def _get_opening_row_item_list(self):
return []
def _get_value_row_separator_item_list(self):
return []
def _get_closing_row_item_list(self):
return []
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import dataproperty
from ._text_writer import TextTableWriter
class CsvTableWriter(TextTableWriter):
"""
Concrete class of a table writer for CSV format.
:Examples:
:ref:`example-csv-table-writer`
"""
@property
def support_split_write(self):
return True
def __init__(self):
super(CsvTableWriter, self).__init__()
self.indent_string = u""
self.column_delimiter = u","
self.is_padding = False
self.is_write_header_separator_row = False
def _write_header(self):
if dataproperty.is_empty_list_or_tuple(self.header_list):
return
super(CsvTableWriter, self)._write_header()
def _get_opening_row_item_list(self):
return []
def _get_value_row_separator_item_list(self):
return []
def _get_closing_row_item_list(self):
return []
|
Delete redundant lines of code
|
Delete redundant lines of code
|
Python
|
mit
|
thombashi/pytablewriter
|
a990362d80980b90b04907c0e7717a55c421bf9d
|
quran_tafseer/serializers.py
|
quran_tafseer/serializers.py
|
from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
ayah_number = serializers.IntegerField(source='ayah.pk')
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url',
'ayah_number', 'text']
|
from django.urls import reverse
from rest_framework import serializers
from .models import Tafseer, TafseerText
class TafseerSerializer(serializers.ModelSerializer):
class Meta:
model = Tafseer
fields = ['id', 'name']
class TafseerTextSerializer(serializers.ModelSerializer):
tafseer_id = serializers.IntegerField(source='tafseer.id')
tafseer_name = serializers.CharField(source='tafseer.name')
ayah_url = serializers.SerializerMethodField()
ayah_number = serializers.IntegerField(source='ayah.number')
def get_ayah_url(self, obj):
return reverse('ayah-detail', kwargs={'number': obj.ayah.number,
'sura_num': obj.ayah.sura.pk})
class Meta:
model = TafseerText
fields = ['tafseer_id', 'tafseer_name', 'ayah_url',
'ayah_number', 'text']
|
Return Ayah's pk instead of number
|
[FIX] Return Ayah's pk instead of number
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
cdb55b385074d50a98f87027fd46021d663f9df8
|
bin/commands/utils/messages.py
|
bin/commands/utils/messages.py
|
from __future__ import print_function
import sys
def error(message, exit=True):
"""Print an error message and optionally exit."""
assert isinstance(message, str), "message must be a str"
assert isinstance(exit, bool), "exit must be a bool"
print("error:", message, file=sys.stderr)
if exit:
sys.exit(1)
def info(message, quiet=False):
"""Print a simple info message."""
if not quiet:
print(message)
|
from __future__ import print_function
import sys
def error(message, exit=True):
"""Print an error message and optionally exit."""
assert isinstance(message, str), "message must be a str"
assert isinstance(exit, bool), "exit must be a bool"
print("error:", message, file=sys.stderr)
if exit:
sys.exit(1)
def warn(message):
"""Print a simple warning message."""
info('warn: {}'.format(message), False)
def usage(message):
"""Print a simple usage message."""
info('usage: {}'.format(message), False)
def info(message, quiet=False):
"""Print a simple info message."""
if not quiet:
print(message)
|
Add warn and usage message options
|
Add warn and usage message options
|
Python
|
mit
|
Brickstertwo/git-commands
|
3cc1cb9894fdb1b88a84ad8315669ad2f0858fdb
|
cloud_logging.py
|
cloud_logging.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.cloud.logging as glog
import logging
import contextlib
import io
import sys
import os
LOGGING_PROJECT = os.environ.get('LOGGING_PROJECT', '')
def configure(project=LOGGING_PROJECT):
if not project:
print('!! Error: The $LOGGING_PROJECT enviroment '
'variable is required in order to set up cloud logging. '
'Cloud logging is disabled.')
return
logging.basicConfig(level=logging.INFO)
try:
# if this fails, redirect stderr to /dev/null so no startup spam.
with contextlib.redirect_stderr(io.StringIO()):
client = glog.Client(project)
client.setup_logging(logging.INFO)
except:
print('!! Cloud logging disabled')
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.cloud.logging as glog
import logging
import contextlib
import io
import sys
import os
LOGGING_PROJECT = os.environ.get('LOGGING_PROJECT', '')
def configure(project=LOGGING_PROJECT):
if not project:
sys.stderr.write('!! Error: The $LOGGING_PROJECT enviroment '
'variable is required in order to set up cloud logging. '
'Cloud logging is disabled.\n')
return
logging.basicConfig(level=logging.INFO)
try:
# if this fails, redirect stderr to /dev/null so no startup spam.
with contextlib.redirect_stderr(io.StringIO()):
client = glog.Client(project)
client.setup_logging(logging.INFO)
except:
sys.stderr.write('!! Cloud logging disabled\n')
|
Change some errors to go to stderr.
|
Change some errors to go to stderr.
These non-fatal errors violated GTP protocol.
|
Python
|
apache-2.0
|
tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo,tensorflow/minigo
|
6d13b3b041e3e6cd6089814ad3276a905aa10bc3
|
troposphere/fms.py
|
troposphere/fms.py
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
'ORGUNIT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
Update AWS::FMS::Policy per 2020-06-18 changes
|
Update AWS::FMS::Policy per 2020-06-18 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
fe4cc596e65f6dc5ec7f99d40f7346143b695633
|
slackbot.py
|
slackbot.py
|
#! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, statement)
return p.status_code
|
#! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, data=statement.encode('utf-8'))
return p.status_code
|
Fix unicode encoding of Slack message posts
|
Fix unicode encoding of Slack message posts
|
Python
|
apache-2.0
|
TheConnMan/destalinator,royrapoport/destalinator,randsleadershipslack/destalinator,royrapoport/destalinator,TheConnMan/destalinator,randsleadershipslack/destalinator,underarmour/destalinator
|
29c977a7f7293f1ce45f393a4c8464bbb9691f9e
|
linkedevents/urls.py
|
linkedevents/urls.py
|
from django.conf.urls import url, include
from django.views.generic import RedirectView
from .api import LinkedEventsAPIRouter
from django.contrib import admin
admin.autodiscover()
api_router = LinkedEventsAPIRouter()
urlpatterns = [
url(r'^(?P<version>(v0.1|v1))/', include(api_router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
url(r'^$', RedirectView.as_view(url='/v1/', permanent=False)),
]
|
from django.core.urlresolvers import reverse
from django.conf.urls import url, include
from django.views.generic import RedirectView
from .api import LinkedEventsAPIRouter
from django.contrib import admin
admin.autodiscover()
api_router = LinkedEventsAPIRouter()
class RedirectToAPIRootView(RedirectView):
permanent = False
def get_redirect_url(self, *args, **kwargs):
return reverse('api-root', kwargs={'version': 'v1'})
urlpatterns = [
url(r'^(?P<version>(v0.1|v1))/', include(api_router.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
url(r'^$', RedirectToAPIRootView.as_view()),
]
|
Make redirect-to-API work even with URL prefix
|
Make redirect-to-API work even with URL prefix
|
Python
|
mit
|
aapris/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents
|
63ee144892ed0e740b87cb87895cf07d78b87d1f
|
lib/slack.py
|
lib/slack.py
|
from lib.config import Config
from slackclient import SlackClient
class Tubey():
def __init__(self, **kwargs):
### Cache the client in memory ###
self._client = None
def get_client(self):
### Fetch a cached slack client or create one and return it ###
if self._client is not None:
return self._client
token = Config.get_variable('tubey_credentials', 'bot_oauth_token')
sc = SlackClient(token)
self._client = sc
return self._client
def send_message(self, message):
### Sends message to the user/channel ###
params = {'channel': 'tubeydev', 'text': message}
#client = get_client(self)
self.get_client().api_call("chat.postMessage", **params)
if __name__ == "__main__":
#tubey = Tubey()
#tubey.send_message("This better work")
# params = {'channel': 'tubeydev', 'text': "Hi everybody! I'm a faige!"}
# client.api_call("chat.postMessage", **params)
|
from lib.config import Config
from slackclient import SlackClient
class Tubey():
def __init__(self, **kwargs):
# cache the client in memory
self._client = None
def send_message(self, message):
raise NotImplemented
def get_client(self):
### Fetch a cached slack client or create one and return it ###
if self._client is not None:
return self._client
token = Config.get_variable('tubey_credentials', 'bot_oauth_token')
sc = SlackClient(token)
self._client = sc
return self._client
if __name__ == "__main__":
# params = {'channel': 'tubeydev', 'text': "Hi everybody! I'm a faige!"}
# client.api_call("chat.postMessage", **params)
|
Revert "Implement send message functionality"
|
Revert "Implement send message functionality"
|
Python
|
mit
|
ImShady/Tubey
|
6e1a1c2ac4eaa32e0af4fb9349844fe3ce7df7c0
|
matching/__init__.py
|
matching/__init__.py
|
import os
import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
app.logger.debug("\nConfiguration\n%s\n" % app.config)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
import os
import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
app.logger.debug("\nConfiguration\n%s\n" % app.config)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
Add proxy fix as in lr this will run with reverse proxy
|
Add proxy fix as in lr this will run with reverse proxy
|
Python
|
mit
|
LandRegistry-Attic/matching-alpha,LandRegistry-Attic/matching-alpha,LandRegistry-Attic/matching-alpha
|
4eab1fb42f58d6203a0862aa9caf304193d3442b
|
libcloud/common/maxihost.py
|
libcloud/common/maxihost.py
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
return headers
|
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.base import JsonResponse
from libcloud.common.base import ConnectionKey
class MaxihostResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_error(self):
if self.status == httplib.UNAUTHORIZED:
body = self.parse_body()
raise InvalidCredsError(body['message'])
else:
body = self.parse_body()
if 'message' in body:
error = '%s (code: %s)' % (body['message'], self.status)
else:
error = body
return error
def success(self):
return self.status in self.valid_response_codes
class MaxihostConnection(ConnectionKey):
"""
Connection class for the Maxihost driver.
"""
host = 'api.maxihost.com'
responseCls = MaxihostResponse
def add_default_headers(self, headers):
"""
Add headers that are necessary for every request
This method adds apikey to the request.
"""
headers['Authorization'] = 'Bearer %s' % (self.key)
headers['Content-Type'] = 'application/json'
headers['Accept']: 'application/vnd.maxihost.v1.1+json'
return headers
|
Add Accept header to use version 1.1
|
Add Accept header to use version 1.1
|
Python
|
apache-2.0
|
ByteInternet/libcloud,andrewsomething/libcloud,ByteInternet/libcloud,Kami/libcloud,apache/libcloud,andrewsomething/libcloud,mistio/libcloud,Kami/libcloud,mistio/libcloud,andrewsomething/libcloud,apache/libcloud,apache/libcloud,ByteInternet/libcloud,Kami/libcloud,mistio/libcloud
|
6c4081f6c1fde79e9f14da98f0d670a8bdef8e13
|
pipeline/settings/production.py
|
pipeline/settings/production.py
|
from .base import *
DEBUG = False
try:
from .local import *
except ImportError:
pass
|
from .base import *
DEBUG = False
WAGTAIL_ENABLE_UPDATE_CHECK = False
try:
from .local import *
except ImportError:
pass
|
Disable Wagtail update check in prod
|
Disable Wagtail update check in prod
|
Python
|
mit
|
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
|
f576004e7d1352c7e8c1e203ae0a8b6769ce1b08
|
cla_backend/apps/core/views.py
|
cla_backend/apps/core/views.py
|
from django.views import defaults
from sentry_sdk import capture_message
def page_not_found(*args, **kwargs):
capture_message("Page not found", level="error")
return defaults.page_not_found(*args, **kwargs)
|
from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
|
Set some event data on 404 logging
|
Set some event data on 404 logging
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
55c7681304d66ad076372be7aa8f319baef153eb
|
polyaxon/runner/management/commands/clean_project_jobs.py
|
polyaxon/runner/management/commands/clean_project_jobs.py
|
from django.core.management import BaseCommand
from django.db import ProgrammingError
from django.db.models import Q
from projects.models import Project
from runner.schedulers import notebook_scheduler, tensorboard_scheduler
class Command(BaseCommand):
@staticmethod
def _clean():
for project in Project.objects.exclude(Q(tensorboard=None) | Q(notebook=None)):
if project.has_notebook:
notebook_scheduler.stop_notebook(project, update_status=False)
if project.has_tensorboard:
tensorboard_scheduler.stop_tensorboard(project, update_status=False)
def handle(self, *args, **options):
try:
self._clean()
except ProgrammingError:
pass
|
from django.core.management import BaseCommand
from django.db import ProgrammingError
from django.db.models import Q
from projects.models import Project
from runner.schedulers import notebook_scheduler, tensorboard_scheduler
class Command(BaseCommand):
@staticmethod
def _clean():
filters = Q(tensorboard_jobs=None) | Q(notebook_jobs=None)
for project in Project.objects.exclude(filters):
if project.has_notebook:
notebook_scheduler.stop_notebook(project, update_status=False)
if project.has_tensorboard:
tensorboard_scheduler.stop_tensorboard(project, update_status=False)
def handle(self, *args, **options):
try:
self._clean()
except ProgrammingError:
pass
|
Update clean project jobs command
|
Update clean project jobs command
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
1aa121daa3c99849173d5cd4c6a80d6bf94f5186
|
saleor/attribute/__init__.py
|
saleor/attribute/__init__.py
|
class AttributeInputType:
"""The type that we expect to render the attribute's values as."""
DROPDOWN = "dropdown"
MULTISELECT = "multiselect"
FILE = "file"
REFERENCE = "reference"
CHOICES = [
(DROPDOWN, "Dropdown"),
(MULTISELECT, "Multi Select"),
(FILE, "File"),
(REFERENCE, "Reference"),
]
# list of the input types that can be used in variant selection
ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN]
class AttributeType:
PRODUCT_TYPE = "product-type"
PAGE_TYPE = "page-type"
CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")]
class AttributeEntityType:
"""Type of a reference entity type. Must match the name of the graphql type."""
PAGE = "Page"
PRODUCT = "Product"
CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
|
class AttributeInputType:
"""The type that we expect to render the attribute's values as."""
DROPDOWN = "dropdown"
MULTISELECT = "multiselect"
FILE = "file"
REFERENCE = "reference"
CHOICES = [
(DROPDOWN, "Dropdown"),
(MULTISELECT, "Multi Select"),
(FILE, "File"),
(REFERENCE, "Reference"),
]
# list of the input types that can be used in variant selection
ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN]
class AttributeType:
PRODUCT_TYPE = "product-type"
PAGE_TYPE = "page-type"
CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")]
class AttributeEntityType:
"""Type of a reference entity type. Must match the name of the graphql type.
After adding new value, `REFERENCE_VALUE_NAME_MAPPING`
and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py
must be updated.
"""
PAGE = "Page"
PRODUCT = "Product"
CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
|
Add info about required updates in AttributeEntityType
|
Add info about required updates in AttributeEntityType
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
ceb848021d5323b5bad8518ac7ed850a51fc89ca
|
raco/myrial/myrial_test.py
|
raco/myrial/myrial_test.py
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
Add compile_to_json invocation in Myrial test fixture
|
Add compile_to_json invocation in Myrial test fixture
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
8dba7c06d1f6fd6365c7150d0934eb055e1b2fd3
|
inthe_am/taskmanager/middleware.py
|
inthe_am/taskmanager/middleware.py
|
from inthe_am.taskmanager.models import TaskStore
class AuthenticationTokenMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
if hasattr(request, 'user') and request.user.is_authenticated():
store = TaskStore.get_for_user(request.user)
response.set_cookie('authentication_token', store.api_key.key)
else:
response.set_cookie('authentication_token', '')
return response
|
from inthe_am.taskmanager.models import TaskStore
class AuthenticationTokenMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
if hasattr(request, 'user') and request.user.is_authenticated():
store = TaskStore.get_for_user(request.user)
response.set_cookie('authentication_token', store.api_key.key)
else:
response.delete_cookie('authentication_token')
return response
|
Delete cookie instead of setting to empty string.
|
Delete cookie instead of setting to empty string.
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
3f5a6dcd622d7b1c890ced67468ecebd02b1806f
|
mastertickets/db_default.py
|
mastertickets/db_default.py
|
# Created by Noah Kantrowitz on 2007-07-04.
# Copyright (c) 2007 Noah Kantrowitz. All rights reserved.
from trac.db import Table, Column
name = 'mastertickets'
version = 2
tables = [
Table('mastertickets', key=('source','dest'))[
Column('source', type='integer'),
Column('dest', type='integer'),
],
]
def convert_to_int(data):
"""Convert both source and dest in the mastertickets table to ints."""
for row in data['mastertickets'][1]:
for i, (n1, n2) in enumerate(row):
row[i] = [int(n1), int(n2)]
migrations = [
(xrange(1,2), convert_to_int),
]
|
# Created by Noah Kantrowitz on 2007-07-04.
# Copyright (c) 2007 Noah Kantrowitz. All rights reserved.
from trac.db import Table, Column
name = 'mastertickets'
version = 2
tables = [
Table('mastertickets', key=('source','dest'))[
Column('source', type='integer'),
Column('dest', type='integer'),
],
]
def convert_to_int(data):
"""Convert both source and dest in the mastertickets table to ints."""
rows = data['mastertickets'][1]
for i, (n1, n2) in enumerate(rows):
rows[i] = [int(n1), int(n2)]
migrations = [
(xrange(1,2), convert_to_int),
]
|
Fix the migration to actual work.
|
Fix the migration to actual work.
|
Python
|
bsd-3-clause
|
SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin
|
9d9825d7f08c7cc3c078f9b43b3a67019335f75d
|
nodeconductor/core/models.py
|
nodeconductor/core/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from uuidfield import UUIDField
class UuidMixin(models.Model):
"""
Mixin to identify models by UUID.
"""
class Meta(object):
abstract = True
uuid = UUIDField(auto=True, unique=True)
class User(UuidMixin, AbstractUser):
alternative_name = models.CharField(_('alternative name'), max_length=40, blank=True)
civil_number = models.CharField(_('civil number'), max_length=40, blank=True)
phone_number = models.CharField(_('phone number'), max_length=40, blank=True)
description = models.TextField(_('description'), blank=True)
organization = models.CharField(_('organization'), max_length=80, blank=True)
job_title = models.CharField(_('job title'), max_length=40, blank=True)
@python_2_unicode_compatible
class SshPublicKey(UuidMixin, models.Model):
"""
User public key.
Used for injection into VMs for remote access.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_index=True)
name = models.CharField(max_length=50, blank=True)
public_key = models.TextField(max_length=2000)
def __str__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import AbstractUser
from django.conf import settings
from uuidfield import UUIDField
class UuidMixin(models.Model):
"""
Mixin to identify models by UUID.
"""
class Meta(object):
abstract = True
uuid = UUIDField(auto=True, unique=True)
class User(UuidMixin, AbstractUser):
alternative_name = models.CharField(_('alternative name'), max_length=60, blank=True)
civil_number = models.CharField(_('civil number'), max_length=40, blank=True)
phone_number = models.CharField(_('phone number'), max_length=40, blank=True)
description = models.TextField(_('description'), blank=True)
organization = models.CharField(_('organization'), max_length=80, blank=True)
job_title = models.CharField(_('job title'), max_length=40, blank=True)
@python_2_unicode_compatible
class SshPublicKey(UuidMixin, models.Model):
"""
User public key.
Used for injection into VMs for remote access.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, db_index=True)
name = models.CharField(max_length=50, blank=True)
public_key = models.TextField(max_length=2000)
def __str__(self):
return self.name
|
Increase size of the alternative field
|
Increase size of the alternative field
- NC-87
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
a662eded2841b87ccbccdd6dfb21315725d0a0c5
|
python/pyspark_llap/__init__.py
|
python/pyspark_llap/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark_llap.sql.session import HiveWarehouseSession
__all__ = ['HiveWarehouseSession']
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark_llap.sql.session import HiveWarehouseSession
# These are aliases so that importing this module exposes those attributes below directly.
DATAFRAME_TO_STREAM = HiveWarehouseSession.DATAFRAME_TO_STREAM
HIVE_WAREHOUSE_CONNECTOR = HiveWarehouseSession.HIVE_WAREHOUSE_CONNECTOR
STREAM_TO_STREAM = HiveWarehouseSession.STREAM_TO_STREAM
__all__ = [
'HiveWarehouseSession',
'DATAFRAME_TO_STREAM',
'HIVE_WAREHOUSE_CONNECTOR',
'STREAM_TO_STREAM',
]
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Add aliases for HIVE_WAREHOUSE_CONNECTOR, DATAFRAME_TO_STREAM and STREAM_TO_STREAM
|
Python
|
apache-2.0
|
hortonworks-spark/spark-llap,hortonworks-spark/spark-llap,hortonworks-spark/spark-llap
|
5a7291b9c305445aebe77ef020017ac9cffd35e2
|
pythonparser/test/test_utils.py
|
pythonparser/test/test_utils.py
|
# coding:utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
unicode = type("")
class BytesOnly(bytes):
def __new__(cls, s):
if isinstance(s, unicode):
s = s.encode()
return bytes.__new__(BytesOnly, s)
def __eq__(self, o):
return isinstance(o, bytes) and bytes.__eq__(self, o)
def __ne__(self, o):
return not self == o
class UnicodeOnly(unicode):
def __eq__(self, o):
return isinstance(o, unicode) and unicode.__eq__(self, o)
def __ne__(self, o):
return not self == o
if sys.version_info >= (3,):
LongOnly = int
else:
class LongOnly(long):
def __eq__(self, o):
return isinstance(o, long) and long.__cmp__(self, o) == 0
def __ne__(self, o):
return not self == o
|
# coding:utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
unicode = type("")
class BytesOnly(bytes):
def __new__(cls, s):
if isinstance(s, unicode):
s = s.encode()
return bytes.__new__(BytesOnly, s)
def __eq__(self, o):
return isinstance(o, bytes) and bytes.__eq__(self, o)
def __ne__(self, o):
return not self == o
class UnicodeOnly(unicode):
def __eq__(self, o):
return isinstance(o, unicode) and unicode.__eq__(self, o)
def __ne__(self, o):
return not self == o
try:
class LongOnly(long): # Python 2
def __eq__(self, o):
return isinstance(o, long) and long.__cmp__(self, o) == 0
def __ne__(self, o):
return not self == o
except NameError: # Python 3
LongOnly = int
|
Fix indentation error in LongOnly.__ne__()
|
Fix indentation error in LongOnly.__ne__()
Also follow Python porting best practice [__use feature detection instead of version detection__](https://docs.python.org/3/howto/pyporting.html#use-feature-detection-instead-of-version-detection).
|
Python
|
mit
|
m-labs/pythonparser
|
d6b801a6f327d2a87266cca9ada780ba2c9c9309
|
skylines/lib/helpers/__init__.py
|
skylines/lib/helpers/__init__.py
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SkyLines."""
from __future__ import absolute_import
import datetime
import simplejson as json
from urllib import urlencode
from tg import flash
from webhelpers import date, feedgenerator, html, number, misc, text
from .string import *
from .country import *
from skylines.lib.formatter.numbers import *
from skylines.lib.formatter.datetime import *
from skylines.lib.formatter.units import *
from skylines.lib.markdown import markdown
# The dict implementation of Jinja2 only works for keyword parameters,
# but not for merging to dictionaries. We export the builtin Python dict()
# function here to get around that problem for building URLs.
dict = dict
# Jinja2 doesn't seem to have min/max... strange!
min = min
max = max
def url(base_url='/', params={}):
if not isinstance(base_url, basestring) and hasattr(base_url, '__iter__'):
base_url = '/'.join(base_url)
if params:
return '?'.join((base_url, urlencode(params)))
return base_url
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SkyLines."""
from __future__ import absolute_import
import datetime
import simplejson as json
from urllib import urlencode
from flask import flash
from webhelpers import date, feedgenerator, html, number, misc, text
from .string import *
from .country import *
from skylines.lib.formatter.numbers import *
from skylines.lib.formatter.datetime import *
from skylines.lib.formatter.units import *
from skylines.lib.markdown import markdown
# The dict implementation of Jinja2 only works for keyword parameters,
# but not for merging to dictionaries. We export the builtin Python dict()
# function here to get around that problem for building URLs.
dict = dict
# Jinja2 doesn't seem to have min/max... strange!
min = min
max = max
def url(base_url='/', params={}):
if not isinstance(base_url, basestring) and hasattr(base_url, '__iter__'):
base_url = '/'.join(base_url)
if params:
return '?'.join((base_url, urlencode(params)))
return base_url
|
Use flask.flash instead of tg.flash
|
lib/helpers: Use flask.flash instead of tg.flash
|
Python
|
agpl-3.0
|
TobiasLohner/SkyLines,snip/skylines,Turbo87/skylines,skylines-project/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,RBE-Avionik/skylines,kerel-fs/skylines,RBE-Avionik/skylines,Harry-R/skylines,Harry-R/skylines,Harry-R/skylines,RBE-Avionik/skylines,skylines-project/skylines,Turbo87/skylines,snip/skylines,skylines-project/skylines,Turbo87/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,snip/skylines,RBE-Avionik/skylines,shadowoneau/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,kerel-fs/skylines,shadowoneau/skylines,kerel-fs/skylines
|
c0549cc670af0735753e08c2c375d32989f04c9c
|
shopify_python/__init__.py
|
shopify_python/__init__.py
|
# Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.1.2'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
|
# Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.2.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
|
Increase version number to 0.2.0
|
Increase version number to 0.2.0
|
Python
|
mit
|
Shopify/shopify_python
|
57c34cae582764b69bb32faa712110a46df69dde
|
chaser/__init__.py
|
chaser/__init__.py
|
__version__ = "0.1"
|
__version__ = "0.1"
import requests
import io
import tarfile
import ccr
def get_source_files(pkgname, workingdir):
"""Download the source tarball and extract it"""
r = requests.get(ccr.getpkgurl(pkgname))
tar = tarfile.open(mode='r', fileobj=io.BytesIO(r.content))
tar.extractall(workingdir)
|
Add initial function for get_source_files
|
Add initial function for get_source_files
|
Python
|
bsd-3-clause
|
rshipp/chaser,rshipp/chaser
|
b16b701f6ad80d0df27ab6ea1d9f115a6e2b9106
|
pymatgen/__init__.py
|
pymatgen/__init__.py
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.2"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.3dev"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
|
Increase minor version number + dev.
|
Increase minor version number + dev.
Former-commit-id: 44023123016583dcb692ce23c19978e6f5d90abd [formerly 01b7fa7fe0778c195d9f75d35d43618691778ef8]
Former-commit-id: a96aa4b8265bf7b15143879b0a3b98e30a9e5953
|
Python
|
mit
|
blondegeek/pymatgen,tallakahath/pymatgen,dongsenfo/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,mbkumar/pymatgen,johnson1228/pymatgen,setten/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,blondegeek/pymatgen,aykol/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,setten/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,gpetretto/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,setten/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,gmatteo/pymatgen,fraricci/pymatgen,aykol/pymatgen,gpetretto/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,xhqu1981/pymatgen,setten/pymatgen,montoyjh/pymatgen,blondegeek/pymatgen,Bismarrck/pymatgen,vorwerkc/pymatgen,vorwerkc/pymatgen,ndardenne/pymatgen,nisse3000/pymatgen,gVallverdu/pymatgen,aykol/pymatgen,ndardenne/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,matk86/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,richardtran415/pymatgen,johnson1228/pymatgen,tallakahath/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,matk86/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen,gpetretto/pymatgen,gVallverdu/pymatgen,matk86/pymatgen
|
19e84f0c528fd1c19dba709972f31343284c0a40
|
pymatgen/__init__.py
|
pymatgen/__init__.py
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, Geoffroy Hautier, Will Richards, Dan Gunter, Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jun 28, 2012"
__version__ = "2.0.0"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
|
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, Geoffroy Hautier, Will Richards, Dan Gunter, Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jun 28, 2012"
__version__ = "2.0.0"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import file_open_zip_aware as openz
|
Add an alias to file_open_zip_aware as openz.
|
Add an alias to file_open_zip_aware as openz.
Former-commit-id: 97796b7a5593858b2fc15c8009658926afa3eda0 [formerly 1ce26a0b0cbddb49047da0f8bac8214fb298c646]
Former-commit-id: 7bdb412108a247f3ebc9d3d9906f03c222178449
|
Python
|
mit
|
gVallverdu/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,aykol/pymatgen,gpetretto/pymatgen,johnson1228/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,gVallverdu/pymatgen,blondegeek/pymatgen,aykol/pymatgen,vorwerkc/pymatgen,dongsenfo/pymatgen,nisse3000/pymatgen,czhengsci/pymatgen,setten/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,blondegeek/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,vorwerkc/pymatgen,tallakahath/pymatgen,czhengsci/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,matk86/pymatgen,gpetretto/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,setten/pymatgen,tallakahath/pymatgen,Bismarrck/pymatgen,setten/pymatgen,johnson1228/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,gmatteo/pymatgen,ndardenne/pymatgen,matk86/pymatgen,nisse3000/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,setten/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,aykol/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,xhqu1981/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,johnson1228/pymatgen,nisse3000/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,richardtran415/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,fraricci/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,tallakahath/pymatgen,mbkumar/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen
|
5753ecdcb71ea3b64e0fb902cf873dfff124160d
|
skcode/utility/__init__.py
|
skcode/utility/__init__.py
|
"""
SkCode utilities library.
"""
# Auto paragraphs utility
from .paragraphs import (PARAGRAPH_NODE_NAME,
ParagraphTagOptions,
make_paragraphs)
# TODO replace cosmetic utility (maybe mixin for postrender callback instead?)
# TODO replace smiley utility (maybe mixin for postrender callback instead?)
# TODO replace links utility
# TODO extract titles utility
# TODO extract footnotes utility
# TODO extract figures utility
# TODO extract acronyms utility
|
"""
SkCode utilities library.
"""
# Auto paragraphs utilities
from .paragraphs import (PARAGRAPH_NODE_NAME,
ParagraphTagOptions,
make_paragraphs)
# TODO replace cosmetic utility (maybe mixin for postrender callback instead?)
# TODO replace smiley utility (maybe mixin for postrender callback instead?)
# TODO replace links utility
# Footnotes utilities
from .footnotes import (extract_footnotes,
render_footnotes_html,
render_footnotes_text)
# Acronyms utilities
from .acronyms import extract_acronyms
# Titles utilities
from .titles import (extract_titles,
make_titles_hierarchy,
make_auto_title_ids)
# TODO extract figures utility, plus auto ID generation
|
Update utility module friendly imports.
|
Update utility module friendly imports.
|
Python
|
agpl-3.0
|
TamiaLab/PySkCode
|
b8f67c96febd1f7bc2ce1e87f1df0a468faddb87
|
src/taskmaster/util.py
|
src/taskmaster/util.py
|
"""
taskmaster.util
~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
def import_target(target, default=None):
"""
>>> import_target('foo.bar:blah', 'get_jobs')
<function foo.bar.blah>
>>> import_target('foo.bar', 'get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo.bar:get_jobs')
<function foo.bar.get_jobs>
"""
if ':' not in target:
target += ':%s' % default
else:
raise ValueError('target must be in form of `path.to.module:function_name`')
mod_path, func_name = target.split(':', 1)
module = __import__(mod_path, {}, {}, [func_name], -1)
callback = getattr(module, func_name)
return callback
|
"""
taskmaster.util
~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
import imp
import sys
from os.path import exists
def import_target(target, default=None):
"""
>>> import_target('foo.bar:blah', 'get_jobs')
<function foo.bar.blah>
>>> import_target('foo.bar', 'get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo.bar:get_jobs')
<function foo.bar.get_jobs>
>>> import_target('foo/bar.py:get_jobs')
<function get_jobs>
"""
if ':' not in target:
target += ':%s' % default
else:
raise ValueError('target must be in form of `path.to.module:function_name`')
path, func_name = target.split(':', 1)
if exists(path):
module_name = path.rsplit('/', 1)[-1].split('.', 1)[0]
module = imp.new_module(module_name)
module.__file__ = path
try:
execfile(path, module.__dict__)
except IOError, e:
e.strerror = 'Unable to load file (%s)' % e.strerror
raise
sys.modules[module_name] = module
else:
module = __import__(path, {}, {}, [func_name], -1)
callback = getattr(module, func_name)
return callback
|
Allow targets to be specified as files
|
Allow targets to be specified as files
|
Python
|
apache-2.0
|
alex/taskmaster,dcramer/taskmaster
|
b5cb4fe7abaa9fe1a4c387148af6ee494f69bd07
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
astropy/nddata/convolution/tests/test_make_kernel.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from ....tests.compat import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
|
Fix compatibility with Numpy 1.4.1
|
Fix compatibility with Numpy 1.4.1
|
Python
|
bsd-3-clause
|
AustereCuriosity/astropy,MSeifert04/astropy,pllim/astropy,DougBurke/astropy,mhvk/astropy,larrybradley/astropy,kelle/astropy,pllim/astropy,funbaker/astropy,stargaser/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,saimn/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,AustereCuriosity/astropy,larrybradley/astropy,lpsinger/astropy,joergdietrich/astropy,funbaker/astropy,AustereCuriosity/astropy,DougBurke/astropy,joergdietrich/astropy,bsipocz/astropy,larrybradley/astropy,funbaker/astropy,DougBurke/astropy,saimn/astropy,tbabej/astropy,joergdietrich/astropy,AustereCuriosity/astropy,lpsinger/astropy,DougBurke/astropy,stargaser/astropy,StuartLittlefair/astropy,bsipocz/astropy,kelle/astropy,lpsinger/astropy,pllim/astropy,larrybradley/astropy,funbaker/astropy,tbabej/astropy,AustereCuriosity/astropy,astropy/astropy,MSeifert04/astropy,MSeifert04/astropy,kelle/astropy,mhvk/astropy,StuartLittlefair/astropy,mhvk/astropy,astropy/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,mhvk/astropy,saimn/astropy,stargaser/astropy,dhomeier/astropy,saimn/astropy,larrybradley/astropy,dhomeier/astropy,kelle/astropy,astropy/astropy,tbabej/astropy,mhvk/astropy,dhomeier/astropy,pllim/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,tbabej/astropy,saimn/astropy,astropy/astropy,tbabej/astropy,stargaser/astropy,MSeifert04/astropy,joergdietrich/astropy,bsipocz/astropy,pllim/astropy,bsipocz/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,astropy/astropy,dhomeier/astropy,kelle/astropy
|
38b236c9fb0f944b41b6300963fbf5e67d0f3fe7
|
mwstools/requesters/utils.py
|
mwstools/requesters/utils.py
|
import os
from mws.mws import DictWrapper
requesters_dir = os.path.dirname(os.path.abspath(__file__))
responses_dir = os.path.join(requesters_dir, 'responses')
def write_response(response, fname):
with open(os.path.join(responses_dir, fname), 'wb') as f:
if isinstance(response, DictWrapper):
f.write(response.original)
else:
f.write(response.content)
|
import os
from mws.mws import DictWrapper
requesters_dir = os.path.dirname(os.path.abspath(__file__))
responses_dir = os.path.join(requesters_dir, 'responses')
def write_response(response, fname):
return
with open(os.path.join(responses_dir, fname), 'wb') as f:
if isinstance(response, DictWrapper):
f.write(response.original)
else:
f.write(response.content)
|
Write response now returns None since after packaging, the code becomes unusable
|
Write response now returns None since after packaging, the code becomes unusable
|
Python
|
unlicense
|
ziplokk1/python-amazon-mws-tools
|
13ad993a0ca542d5b7e0901afe4c889c18deff5f
|
source/services/tmdb_service.py
|
source/services/tmdb_service.py
|
import os
import requests
class TmdbService:
__TMDB_API = 'https://api.themoviedb.org/3/movie/'
__IMAGE_URL = 'https://image.tmdb.org/t/p/w396/'
def __init__(self, movie_id):
self.id = movie_id
def get_artwork(self):
api_key = os.environ.get('TMDB_API_KEY')
payload = {'api_key': api_key}
response = requests.get(self.__TMDB_API + self.id + '?', data=payload)
movie_info = response.json()
artwork_url = movie_info['poster_path']
return self.__IMAGE_URL + artwork_url
|
import os
import requests
class TmdbService:
__TMDB_API = 'https://api.themoviedb.org/3/movie/'
__IMAGE_URL = 'https://image.tmdb.org/t/p/w396'
def __init__(self, movie_id):
self.id = movie_id
def get_artwork(self):
api_key = os.environ.get('TMDB_API_KEY')
payload = {'api_key': api_key}
response = requests.get(self.__TMDB_API + self.id + '?', data=payload)
movie_info = response.json()
artwork_url = movie_info['poster_path']
return self.__IMAGE_URL + artwork_url
|
Remove extra slash from url (poster url returned from api is preceded by one)
|
Remove extra slash from url (poster url returned from api is preceded by one)
|
Python
|
mit
|
jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu
|
0a3eb4b966dff69cbe582c60bf4444facb4b683d
|
tcconfig/_tc_command_helper.py
|
tcconfig/_tc_command_helper.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import, unicode_literals
import subprocrunner as spr
from ._common import find_bin_path
from ._const import TcSubCommand
from ._error import NetworkInterfaceNotFoundError
def get_tc_base_command(tc_subcommand):
if tc_subcommand not in TcSubCommand:
raise ValueError("the argument must be a TcSubCommand value")
return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value)
def run_tc_show(subcommand, device, tc_command_output):
from ._network import verify_network_interface
verify_network_interface(device, tc_command_output)
runner = spr.SubprocessRunner(
"{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)
)
if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1:
# reach here if the device does not exist at the system and netiface
# not installed.
raise NetworkInterfaceNotFoundError(target=device)
return runner.stdout
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import, unicode_literals
import subprocrunner as spr
from ._common import find_bin_path
from ._const import TcSubCommand
from ._error import NetworkInterfaceNotFoundError
def get_tc_base_command(tc_subcommand):
if not isinstance(tc_subcommand, TcSubCommand):
raise ValueError("the argument must be a TcSubCommand value")
return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value)
def run_tc_show(subcommand, device, tc_command_output):
from ._network import verify_network_interface
verify_network_interface(device, tc_command_output)
runner = spr.SubprocessRunner(
"{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)
)
if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1:
# reach here if the device does not exist at the system and netiface
# not installed.
raise NetworkInterfaceNotFoundError(target=device)
return runner.stdout
|
Change to avoid a DeprecationWarning
|
Change to avoid a DeprecationWarning
|
Python
|
mit
|
thombashi/tcconfig,thombashi/tcconfig
|
42cb96833b71365745aa2a5a741bfe5eeb506098
|
statdyn/figures/colour.py
|
statdyn/figures/colour.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <[email protected]>
#
# Distributed under terms of the MIT license.
"""Create functions to colourize figures."""
import logging
import numpy as np
from hsluv import hpluv_to_hex
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
HEX_VALUES_DARK = np.array([hpluv_to_hex((value, 85, 65)) for value in range(360)])
HEX_VALUES_LIGHT = np.array([hpluv_to_hex((value, 85, 85)) for value in range(360)])
def colour_orientation(orientations, light_colours=False):
"""Get a colour from an orientation."""
orientations = orientations % 2 * np.pi
if light_colours:
return HEX_VALUES_LIGHT[np.floor(orientations / np.pi * 180).astype(int)]
return HEX_VALUES_DARK[np.floor(orientations / np.pi * 180).astype(int)]
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <[email protected]>
#
# Distributed under terms of the MIT license.
"""Create functions to colourize figures."""
import logging
import numpy as np
from hsluv import hpluv_to_hex
from ..analysis.order import get_z_orientation
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
HEX_VALUES_DARK = np.array([hpluv_to_hex((value, 85, 65)) for value in range(360)])
HEX_VALUES_LIGHT = np.array([hpluv_to_hex((value, 85, 85)) for value in range(360)])
def clean_orientation(snapshot):
"""Convert an orientation to a sensible format."""
orientations = get_z_orientation(snapshot.particles.orientation)
nmol = max(snapshot.particles.body)+1
o_dict = {body: orient for body, orient in zip(
snapshot.particles.body[:nmol],
orientations
)}
orientation = np.array([o_dict[body] for body in snapshot.particles.body])
return orientation
def colour_orientation(orientations, light_colours=False):
"""Get a colour from an orientation."""
orientations = orientations % 2 * np.pi
if light_colours:
return HEX_VALUES_LIGHT[np.floor(orientations / np.pi * 180).astype(int)]
return HEX_VALUES_DARK[np.floor(orientations / np.pi * 180).astype(int)]
|
Revert deletion of clean_orientation function
|
Revert deletion of clean_orientation function
The clean_orientation function is needed for the active configuration
plot. I am yet to change that to using the plot function.
|
Python
|
mit
|
malramsay64/MD-Molecules-Hoomd,malramsay64/MD-Molecules-Hoomd
|
5a3a91fe075aa6d0c29cccb3b9bdfc5b40e3dba9
|
leapp/libraries/stdlib/__init__.py
|
leapp/libraries/stdlib/__init__.py
|
"""
:py:mod:`leapp.libraries.stdlib`
represents a location for functions that otherwise would be defined multiple times across leapp actors
and at the same time, they are really useful for other actors.
"""
import six
import subprocess
import os
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
|
"""
:py:mod:`leapp.libraries.stdlib`
represents a location for functions that otherwise would be defined multiple times across leapp actors
and at the same time, they are really useful for other actors.
"""
import six
import subprocess
import os
from leapp.libraries.stdlib import api
def call(args, split=True):
"""
Call an external program, capture and automatically utf-8 decode its ouput.
Then, supress output to stderr and redirect to /dev/null.
:param args: Command to execute
:type args: list
:param split: Split the output on newlines
:type split: bool
:return: stdout output, 'utf-8' decoded, split by lines if split=True
:rtype: unicode/str or [unicode/str] if split=True
"""
r = None
with open(os.devnull, mode='w') as err:
if six.PY3:
r = subprocess.check_output(args, stderr=err, encoding='utf-8')
else:
r = subprocess.check_output(args, stderr=err).decode('utf-8')
if split:
return r.splitlines()
return r
|
Make api directly available in stdlib
|
stdlib: Make api directly available in stdlib
|
Python
|
lgpl-2.1
|
leapp-to/prototype,leapp-to/prototype,leapp-to/prototype,leapp-to/prototype
|
931d6e4f4ce2cf0d3ea53a3e18ac050e2004741e
|
splunk_handler/__init__.py
|
splunk_handler/__init__.py
|
import datetime
import json
import logging
import socket
import traceback
from threading import Thread
from splunklib import client
_client = None
class SplunkFilter(logging.Filter):
"""
A logging filter for Splunk's debug logs on the root logger to avoid recursion
"""
def filter(self, record):
return not (record.module == 'binding' and record.levelno == logging.DEBUG)
class SplunkHandler(logging.Handler):
"""
A logging handler to send events to a Splunk Enterprise instance
"""
def __init__(self, host, port, username, password, index):
logging.Handler.__init__(self)
self.host = host
self.port = port
self.username = username
self.password = password
self.index = index
def emit(self, record):
thread = Thread(target=self._async_emit, args=(record, ))
thread.start()
def _init_client(self):
return client.connect(
host=self.host,
port=self.port,
username=self.username,
password=self.password)
def _async_emit(self, record):
global _client
if not _client:
_client = self._init_client()
try:
_client.indexes[self.index].submit(
self.format(record),
host=socket.gethostname(),
source=record.pathname,
sourcetype='json')
except Exception, e:
print "Traceback:\n" + traceback.format_exc()
print "Exception in Splunk logging handler: %s" % str(e)
|
import logging
import socket
import traceback
from threading import Thread
from splunklib import client
_client = None
class SplunkFilter(logging.Filter):
"""
A logging filter for Splunk's debug logs on the root logger to avoid recursion
"""
def filter(self, record):
return not (record.module == 'binding' and record.levelno == logging.DEBUG)
class SplunkHandler(logging.Handler):
"""
A logging handler to send events to a Splunk Enterprise instance
"""
def __init__(self, host, port, username, password, index):
logging.Handler.__init__(self)
self.host = host
self.port = port
self.username = username
self.password = password
self.index = index
def emit(self, record):
thread = Thread(target=self._async_emit, args=(record, ))
thread.start()
def _init_client(self):
return client.connect(
host=self.host,
port=self.port,
username=self.username,
password=self.password)
def _async_emit(self, record):
global _client
if not _client:
_client = self._init_client()
try:
_client.indexes[self.index].submit(
self.format(record),
host=socket.gethostname(),
source=record.pathname,
sourcetype='json')
except Exception, e:
print "Traceback:\n" + traceback.format_exc()
print "Exception in Splunk logging handler: %s" % str(e)
|
Remove a couple useless imports
|
Remove a couple useless imports
|
Python
|
mit
|
zach-taylor/splunk_handler,sullivanmatt/splunk_handler
|
10b0d6102b391f489e98fd1c2e08b766e77c87e9
|
osbrain/logging.py
|
osbrain/logging.py
|
import os
from .core import Agent
from .core import Proxy
def pyro_log():
os.environ["PYRO_LOGFILE"] = "pyro_osbrain.log"
os.environ["PYRO_LOGLEVEL"] = "DEBUG"
def log_handler(agent, message, topic):
# TODO: handle INFO, ERROR... differently?
agent.log_history.append(message)
def run_logger(name, nsaddr=None, addr=None):
"""
Ease the logger creation process.
This function will create a new logger, start the process and then run
its main loop through a proxy.
Parameters
----------
name : str
Logger name or alias.
nsaddr : SocketAddress, default is None
Name server address.
addr : SocketAddress, default is None
New logger address, if it is to be fixed.
Returns
-------
proxy
A proxy to the new logger.
"""
Agent(name, nsaddr, addr).start()
proxy = Proxy(name, nsaddr)
proxy.set_attr('log_history', [])
handlers = {
'INFO': log_handler,
'ERROR': log_handler
}
proxy.bind('SUB', 'logger_sub_socket', handlers)
proxy.run()
return proxy
|
import os
from .core import Agent
from .core import Proxy
from .core import BaseAgent
from .core import run_agent
def pyro_log():
os.environ["PYRO_LOGFILE"] = "pyro_osbrain.log"
os.environ["PYRO_LOGLEVEL"] = "DEBUG"
class Logger(BaseAgent):
def on_init(self):
self.log_history = []
handlers = {
'INFO': self.log_handler,
'ERROR': self.log_handler
}
self.bind('SUB', 'logger_sub_socket', handlers)
def log_handler(self, message, topic):
# TODO: handle INFO, ERROR... differently?
self.log_history.append(message)
def run_logger(name, nsaddr=None, addr=None, base=Logger):
"""
Ease the logger creation process.
This function will create a new logger, start the process and then run
its main loop through a proxy.
Parameters
----------
name : str
Logger name or alias.
nsaddr : SocketAddress, default is None
Name server address.
addr : SocketAddress, default is None
New logger address, if it is to be fixed.
Returns
-------
proxy
A proxy to the new logger.
"""
return run_agent(name, nsaddr, addr, base)
|
Allow Logger to be subclassed and modified more easily
|
Allow Logger to be subclassed and modified more easily
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
e582ef07d4b9f537e31d31c1546df870a2bd361c
|
tests/plugins/async_plugin/asyncplugin.py
|
tests/plugins/async_plugin/asyncplugin.py
|
from senpy.plugins import AnalysisPlugin
import multiprocessing
class AsyncPlugin(AnalysisPlugin):
def _train(self, process_number):
return process_number
def _do_async(self, num_processes):
with multiprocessing.Pool(processes=num_processes) as pool:
values = pool.map(self._train, range(num_processes))
return values
def activate(self):
self.value = self._do_async(4)
def analyse_entry(self, entry, params):
values = self._do_async(2)
entry.async_values = values
yield entry
|
from senpy.plugins import AnalysisPlugin
import multiprocessing
def _train(process_number):
return process_number
class AsyncPlugin(AnalysisPlugin):
def _do_async(self, num_processes):
pool = multiprocessing.Pool(processes=num_processes)
values = pool.map(_train, range(num_processes))
return values
def activate(self):
self.value = self._do_async(4)
def analyse_entry(self, entry, params):
values = self._do_async(2)
entry.async_values = values
yield entry
|
Fix multiprocessing tests in python2.7
|
Fix multiprocessing tests in python2.7
Closes #28 for python 2.
Apparently, process pools are not contexts in python 2.7.
On the other hand, in py2 you cannot pickle instance methods, so
you have to implement Pool tasks as independent functions.
|
Python
|
apache-2.0
|
gsi-upm/senpy,gsi-upm/senpy,gsi-upm/senpy
|
1a534a3ac6ab1617e9d48e84ce34c0b482730e4d
|
pritunl_node/call_buffer.py
|
pritunl_node/call_buffer.py
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, call_id, response):
callback = self.call_waiters.pop(call_id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
return call_id
def cancel_call(self, call_id):
self.call_waiters.pop(call_id, None)
|
Add cancel call to call buffer
|
Add cancel call to call buffer
|
Python
|
agpl-3.0
|
pritunl/pritunl-node,pritunl/pritunl-node
|
716f953069b4fceebe4fec1a1ea2402e77cbb629
|
docs/src/conf.py
|
docs/src/conf.py
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
execfile(conf, globs, locs)
prepare(globals(), locals())
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
exec(compile(open(conf).read(), conf, 'exec'), globs, locs)
prepare(globals(), locals())
|
Replace execfile with py3 equivalent
|
Replace execfile with py3 equivalent
|
Python
|
mit
|
Erebot/Plop
|
a02739cc7b1384e51f44d86a05af5a9845469fca
|
pygame/__init__.py
|
pygame/__init__.py
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
# map our exceptions on pygame's default
error = SDLError
|
""" XXX: fish """
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import *
from pygame import (
display, color, surface, time, event, constants, sprite,
mouse, locals, image, transform, pkgdata, font, mixer,
cursors, key, draw
)
from pygame.base import (
init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder,
register_quit
)
from pygame._error import get_error, set_error, SDLError
from pygame.mask import Mask
# map our exceptions on pygame's default
error = SDLError
|
Add Mask to toplevel pygame namespace
|
Add Mask to toplevel pygame namespace
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi
|
bd11f978535e4a64537eccd000b7eb50e6dab95f
|
pocket2pinboard/bookmarks.py
|
pocket2pinboard/bookmarks.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
LOG = logging.getLogger(__name__)
def update(pinboard_client, items):
for i in items:
if not i.tags:
# Skip anything that isn't tagged.
continue
LOG.info('%s: %s' % (i.title, i.tags))
LOG.debug('%r', i)
pinboard_client.posts.add(
url=i.url,
description=i.title,
extended=i.excerpt,
tags=u', '.join(i.tags),
date=str(i.time_updated.date()),
)
LOG.debug('')
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
LOG = logging.getLogger(__name__)
def update(pinboard_client, items):
for i in items:
if not i.tags:
# Skip anything that isn't tagged.
continue
LOG.info('%s - %s: %s' % (i.time_updated.date(), i.title, i.tags))
LOG.debug('%r', i)
pinboard_client.posts.add(
url=i.url,
description=i.title,
extended=i.excerpt,
tags=u', '.join(i.tags),
date=str(i.time_updated.date()),
)
LOG.debug('')
|
Add date to info reported for new links
|
Add date to info reported for new links
|
Python
|
apache-2.0
|
dhellmann/pocket2pinboard
|
409182019048a5cb84499258f6f8daaffb62aeae
|
tests/test_simulation_forward.py
|
tests/test_simulation_forward.py
|
import os
import pytest
import pandas as pd
from glob import glob
import numpy as np
from gypsy import DATA_DIR
from gypsy.forward_simulation import simulate_forwards_df
TEST_FILES = glob(os.path.join(DATA_DIR, 'forward_simulation_files', '*.csv'))
TEST_FILES = [(item) for item in TEST_FILES]
CHART_FILES = glob(os.path.join(DATA_DIR, 'output', 'comparisons*.csv'))
CHART_FILES = [(item) for item in CHART_FILES]
@pytest.mark.parametrize("test_file", TEST_FILES)
def test_compare_forward_simulation(test_file):
input_df = pd.read_csv(test_file)
expected_data_path = os.path.join(
DATA_DIR, 'output',
'comparisons_{}'.format(os.path.basename(test_file))
)
plot_id = str(int(input_df.loc[0, 'PlotID']))
result = simulate_forwards_df(input_df, simulation_choice='yes')[plot_id]
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert np.allclose(
expected.values.astype(np.float64), result.values.astype(np.float64),
equal_nan=True
)
# regenerate output files
# result.to_csv(expected_data_path)
|
import os
import pytest
import pandas as pd
from glob import glob
import numpy as np
from gypsy import DATA_DIR
from gypsy.forward_simulation import simulate_forwards_df
TEST_FILES = glob(os.path.join(DATA_DIR, 'forward_simulation_files', '*.csv'))
TEST_FILES = [(item) for item in TEST_FILES]
CHART_FILES = glob(os.path.join(DATA_DIR, 'output', 'comparisons*.csv'))
CHART_FILES = [(item) for item in CHART_FILES]
@pytest.mark.parametrize("test_file", TEST_FILES)
def test_compare_forward_simulation(test_file):
input_df = pd.read_csv(test_file)
expected_data_path = os.path.join(
DATA_DIR, 'output',
'comparisons_{}'.format(os.path.basename(test_file))
)
plot_id = str(int(input_df.loc[0, 'PlotID']))
result = simulate_forwards_df(input_df, simulation_choice='yes')[plot_id]
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert np.testing.assert_allclose(
expected.values, result.values,
rtol=0, atol=1e-4,
equal_nan=True
)
# regenerate output files
# result.to_csv(expected_data_path)
|
Revise tests to use np.testing.assert_allclose
|
Revise tests to use np.testing.assert_allclose
this is better - if na values mismatch (e,g, na in result where expected
has a value) this errors and gives a message to that effect. the
previous one just errored and it was very hard to tell why
|
Python
|
mit
|
tesera/pygypsy,tesera/pygypsy
|
0f9cb6eb32ce014cb6ae8d24aefed2347efe68d9
|
src/python/cargo/condor/host.py
|
src/python/cargo/condor/host.py
|
"""
cargo/condor/host.py
Host individual condor jobs.
@author: Bryan Silverthorn <[email protected]>
"""
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
job = pickle.load(sys.stdin)
job.run()
if __name__ == "__main__":
main()
|
"""
cargo/condor/host.py
Host individual condor jobs.
@author: Bryan Silverthorn <[email protected]>
"""
import os
import sys
import cPickle as pickle
def main():
"""
Application entry point.
"""
# make the job identifier obvious
process_number = int(os.environ["CONDOR_PROCESS"])
cluster_number = int(os.environ["CONDOR_CLUSTER"])
identifier_path = "JOB_IS_%i.%i" % (cluster_number, process_number)
open(identifier_path, "w").close()
# load and run the job
with open("job.pickle") as job_file:
job = pickle.load(job_file)
job.run()
if __name__ == "__main__":
main()
|
Load job from a job file instead of stdin.
|
Load job from a job file instead of stdin.
|
Python
|
mit
|
borg-project/cargo,borg-project/cargo
|
354fb43cc95d68b06b85e8d1fa2426ca663ef8b9
|
common/__init__.py
|
common/__init__.py
|
VERSION = (0, 0, 0)
__version__ = '.'.join(map(str, VERSION))
from django import template
template.add_to_builtins('common.templatetags.common')
template.add_to_builtins('common.templatetags.development')
|
VERSION = (0, 1, 0)
__version__ = '.'.join(map(str, VERSION))
from django import template
template.add_to_builtins('common.templatetags.common')
template.add_to_builtins('common.templatetags.development')
# Add db_name to options for use in model.Meta class
import django.db.models.options as options
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ('db_name',)
|
Add db_name to options for use in model.Meta class
|
Add db_name to options for use in model.Meta class
|
Python
|
bsd-3-clause
|
baskoopmans/djcommon,baskoopmans/djcommon,baskoopmans/djcommon
|
02d184f94e2e5a0521e2ec06e2c10ca644ba2cef
|
python/balcaza/t2wrapper.py
|
python/balcaza/t2wrapper.py
|
from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
|
from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
self.task[flow.name] = NestedWorkflow(flow)
nested = self.task[flow.name]
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
|
Use [] notation in wrapper module for task management
|
Use [] notation in wrapper module for task management
|
Python
|
lgpl-2.1
|
jongiddy/balcazapy,jongiddy/balcazapy,jongiddy/balcazapy
|
3048bf667ec24c93d1c60f08124d68b6d1fc458d
|
src/python/borg/defaults.py
|
src/python/borg/defaults.py
|
"""@author: Bryan Silverthorn <[email protected]>"""
import os
machine_speed = 1.0
minimum_fake_run_budget = 1800.0 # XXX
proc_poll_period = 1.0
root_log_level = os.environ.get("BORG_LOG_ROOT_LEVEL", "NOTSET")
try:
from borg_site_defaults import *
except ImportError:
pass
|
"""@author: Bryan Silverthorn <[email protected]>"""
import os
machine_speed = 1.0
proc_poll_period = 1.0
root_log_level = os.environ.get("BORG_LOG_ROOT_LEVEL", "NOTSET")
try:
from borg_site_defaults import *
except ImportError:
pass
|
Remove an ancient configuration setting.
|
Remove an ancient configuration setting.
|
Python
|
mit
|
borg-project/borg
|
640b5c45727b0c84ab77a759f8b910212762c4c6
|
rest-api/config.py
|
rest-api/config.py
|
"""Configuration parameters.
Contains things such as the database to connect to.
"""
CLOUDSQL_INSTANCE = 'pmi-drc-api-test:us-central1:pmi-rdr'
CLOUDSQL_SOCKET = '/cloudsql/' + CLOUDSQL_INSTANCE
CLOUDSQL_USER = 'api'
PYTHON_TEST_CLIENT_ID = '116540421226121250670'
ALLOWED_CLIENT_IDS = [PYTHON_TEST_CLIENT_ID]
# TODO: Move all authentication into the datastore.
ALLOWED_USERS = [
'[email protected]',
]
|
"""Configuration parameters.
Contains things such as the database to connect to.
"""
CLOUDSQL_INSTANCE = 'pmi-drc-api-test:us-central1:pmi-rdr'
CLOUDSQL_SOCKET = '/cloudsql/' + CLOUDSQL_INSTANCE
CLOUDSQL_USER = 'api'
PYTHON_TEST_CLIENT_ID = '116540421226121250670'
ALLOWED_CLIENT_IDS = [PYTHON_TEST_CLIENT_ID]
# TODO: Move all authentication into the datastore.
ALLOWED_USERS = [
'[email protected]',
'[email protected]',
]
|
Add the staging service account to the account whitelist.
|
Add the staging service account to the account whitelist.
|
Python
|
bsd-3-clause
|
all-of-us/raw-data-repository,all-of-us/raw-data-repository,all-of-us/raw-data-repository
|
525f7fff89e02e54ad2a731533e6b817424594f1
|
tomviz/python/RotationAlign.py
|
tomviz/python/RotationAlign.py
|
# Perform alignment to the estimated rotation axis
#
# Developed as part of the tomviz project (www.tomviz.com).
def transform_scalars(dataset, SHIFT=None, rotation_angle=90.0):
from tomviz import utils
from scipy import ndimage
import numpy as np
data_py = utils.get_array(dataset) # Get data as numpy array.
if data_py is None: #Check if data exists
raise RuntimeError("No data array found!")
data_py_return = np.empty_like(data_py)
ndimage.interpolation.shift(data_py, SHIFT, order=0, output=data_py_return)
rotation_axis = 2 # This operator always assumes the rotation axis is Z
if rotation_angle == []: # If tilt angle not given, assign it to 90 degrees.
rotation_angle = 90
axis1 = (rotation_axis + 1) % 3
axis2 = (rotation_axis + 2) % 3
axes = (axis1, axis2)
shape = utils.rotate_shape(data_py_return, rotation_angle, axes=axes)
data_py_return2 = np.empty(shape, data_py_return.dtype, order='F')
ndimage.interpolation.rotate(
data_py_return, rotation_angle, output=data_py_return2, axes=axes)
utils.set_array(dataset, data_py_return2)
|
# Perform alignment to the estimated rotation axis
#
# Developed as part of the tomviz project (www.tomviz.com).
def transform_scalars(dataset, SHIFT=None, rotation_angle=90.0):
from tomviz import utils
from scipy import ndimage
import numpy as np
data_py = utils.get_array(dataset) # Get data as numpy array.
if data_py is None: #Check if data exists
raise RuntimeError("No data array found!")
if SHIFT is None:
SHIFT = np.zeros(len(data_py.shape), dtype=np.int)
data_py_return = np.empty_like(data_py)
ndimage.interpolation.shift(data_py, SHIFT, order=0, output=data_py_return)
rotation_axis = 2 # This operator always assumes the rotation axis is Z
if rotation_angle == []: # If tilt angle not given, assign it to 90 degrees.
rotation_angle = 90
axis1 = (rotation_axis + 1) % 3
axis2 = (rotation_axis + 2) % 3
axes = (axis1, axis2)
shape = utils.rotate_shape(data_py_return, rotation_angle, axes=axes)
data_py_return2 = np.empty(shape, data_py_return.dtype, order='F')
ndimage.interpolation.rotate(
data_py_return, rotation_angle, output=data_py_return2, axes=axes)
utils.set_array(dataset, data_py_return2)
|
Fix ndimage complaining about shift being of NoneType
|
Fix ndimage complaining about shift being of NoneType
|
Python
|
bsd-3-clause
|
OpenChemistry/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,mathturtle/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
|
669b95d2092f67bcc220b5fa106064d6c3df6a63
|
rolca_core/urls.py
|
rolca_core/urls.py
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns( # pylint: disable=invalid-name
'',
url(r'^$', 'uploader.views.upload_app', name="upload_app"),
url(r'^potrditev$',
TemplateView.as_view(template_name='uploader/upload_confirm.html'),
name="upload_confirm"),
# url(r'^$', 'uploader.views.upload', name="upload"),
url(r'^seznam$', 'uploader.views.list_select', name="list_select"),
url(r'^seznam/(?P<salon_id>\d+)$', 'uploader.views.list_details',
name="list_datails"),
url(r'^razpisi$',
TemplateView.as_view(template_name="uploader/notices.html"),
name="notices"),
url(r'^razpisi/os$',
TemplateView.as_view(template_name="uploader/notice_os.html"),
name="notice_os"),
url(r'^razpisi/ss$',
TemplateView.as_view(template_name="uploader/notice_ss.html"),
name="notice_ss"),
)
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from django.views.generic import TemplateView
from . import views as core_views
urlpatterns = [ # pylint: disable=invalid-name
url(r'^$', core_views.upload_app, name="upload_app"),
url(r'^potrditev$',
TemplateView.as_view(template_name='uploader/upload_confirm.html'),
name="upload_confirm"),
# url(r'^seznam$', 'uploader.views.list_select', name="list_select"),
# url(r'^seznam/(?P<salon_id>\d+)$', 'uploader.views.list_details',
# name="list_datails"),
# url(r'^razpisi$',
# TemplateView.as_view(template_name="uploader/notices.html"),
# name="notices"),
# url(r'^razpisi/os$',
# TemplateView.as_view(template_name="uploader/notice_os.html"),
# name="notice_os"),
# url(r'^razpisi/ss$',
# TemplateView.as_view(template_name="uploader/notice_ss.html"),
# name="notice_ss"),
]
|
Rewrite urlpatterns to new format
|
Rewrite urlpatterns to new format
|
Python
|
apache-2.0
|
dblenkus/rolca,dblenkus/rolca,dblenkus/rolca
|
54046bfb8834f5fc2a93841ae56e2790ae82eecf
|
shared/api.py
|
shared/api.py
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
jobRepo = btr3baseball.JobRepository(jobTable)
dsRepo = btr3baseball.DatasourceRepository()
def main(event, context):
method = event['method']
if 'data' in event:
data = event['data']
else:
data = None
if method == 'submitJob':
return submitJob(data, context)
elif method == 'getJob':
return getJob(data, context)
elif method == 'listDatasources':
return listDatasources(data, context)
elif method == 'getDatasource':
return getDatasource(data, context)
else:
return None
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = jobRepo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
return jobRepo.updateWithMessageId(jobId, response.get('MessageId'))
def getJob(event, context):
return jobRepo.getJob(event['jobId'])
def listDatasources(event, context):
return dsRepo.listDatasources()
def getDatasource(event, context):
return dsRepo.getDatasource(event['datasourceId'])
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
jobRepo = btr3baseball.JobRepository(jobTable)
dsRepo = btr3baseball.DatasourceRepository()
def main(event, context):
method = event['method']
if 'data' in event:
data = event['data']
else:
data = None
print(data)
if method == 'submitJob':
return submitJob(data, context)
elif method == 'getJob':
return getJob(data, context)
elif method == 'listDatasources':
return listDatasources(data, context)
elif method == 'getDatasource':
return getDatasource(data, context)
else:
return None
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = jobRepo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
return jobRepo.updateWithMessageId(jobId, response.get('MessageId'))
def getJob(event, context):
return jobRepo.getJob(event['jobId'])
def listDatasources(event, context):
return dsRepo.listDatasources()
def getDatasource(event, context):
return dsRepo.getDatasource(event['datasourceId'])
|
Add debug print of data
|
Add debug print of data
|
Python
|
apache-2.0
|
bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball
|
94ca5bc46570175a5b9577d64163cf691c787a8a
|
virtool/handlers/files.py
|
virtool/handlers/files.py
|
import os
import virtool.file
import virtool.utils
from virtool.handlers.utils import json_response, not_found
async def find(req):
db = req.app["db"]
query = {
"ready": True
}
file_type = req.query.get("type", None)
if file_type:
query["type"] = file_type
cursor = db.files.find(query, virtool.file.PROJECTION)
found_count = await cursor.count()
documents = [virtool.file.processor(d) for d in await cursor.to_list(15)]
return json_response({
"documents": documents,
"found_count": found_count
})
async def remove(req):
file_id = req.match_info["file_id"]
file_path = os.path.join(req.app["settings"].get("data_path"), "files", file_id)
delete_result = await req.app["db"].files.delete_one({"_id": file_id})
virtool.utils.rm(file_path)
if delete_result.deleted_count == 0:
return not_found("Document does not exist")
await req.app["dispatcher"].dispatch("files", "remove", [file_id])
return json_response({
"file_id": file_id,
"removed": True
})
|
import os
import virtool.file
import virtool.utils
from virtool.handlers.utils import json_response, not_found
async def find(req):
db = req.app["db"]
query = {
"ready": True
}
file_type = req.query.get("type", None)
if file_type:
query["type"] = file_type
cursor = db.files.find(query, virtool.file.PROJECTION)
found_count = await cursor.count()
documents = [virtool.utils.base_processor(d) for d in await cursor.to_list(15)]
return json_response({
"documents": documents,
"found_count": found_count
})
async def remove(req):
file_id = req.match_info["file_id"]
file_path = os.path.join(req.app["settings"].get("data_path"), "files", file_id)
delete_result = await req.app["db"].files.delete_one({"_id": file_id})
virtool.utils.rm(file_path)
if delete_result.deleted_count == 0:
return not_found("Document does not exist")
await req.app["dispatcher"].dispatch("files", "remove", [file_id])
return json_response({
"file_id": file_id,
"removed": True
})
|
Replace call to deprecated virtool.file.processor
|
Replace call to deprecated virtool.file.processor
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.