commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
1b479a607d068b51d342a15e3544ea198a88fbbd
|
wsgi/foodcheck_proj/urls.py
|
wsgi/foodcheck_proj/urls.py
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'foodcheck.views.home', name='home'),
# url(r'^foodcheck/', include('foodcheck.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
|
from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'foodcheck_app.views.home', name='home'),
# url(r'^foodcheck/', include('foodcheck.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
|
Update with name of app
|
Update with name of app
|
Python
|
agpl-3.0
|
esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck
|
d097f773260d06b898ab70e99596a07b056a7cb3
|
ccdproc/__init__.py
|
ccdproc/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The ccdproc package is a collection of code that will be helpful in basic CCD
processing. These steps will allow reduction of basic CCD data as either a
stand-alone processing or as part of a pipeline.
"""
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
# set up namespace, unless we are in setup...
if not _ASTROPY_SETUP_:
from .core import *
from .ccddata import *
from .combiner import *
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The ccdproc package is a collection of code that will be helpful in basic CCD
processing. These steps will allow reduction of basic CCD data as either a
stand-alone processing or as part of a pipeline.
"""
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
# set up namespace, unless we are in setup...
if not _ASTROPY_SETUP_:
from .core import *
from .ccddata import *
from .combiner import *
from .image_collection import *
|
Add ImageFileCollection to ccdproc namespace
|
Add ImageFileCollection to ccdproc namespace
|
Python
|
bsd-3-clause
|
indiajoe/ccdproc,mwcraig/ccdproc,astropy/ccdproc,evertrol/ccdproc,crawfordsm/ccdproc,pulsestaysconstant/ccdproc
|
16b21e6e3ddf0e26cb1412bffbe2be4acca1deb6
|
app/readers/basereader.py
|
app/readers/basereader.py
|
from lxml import etree
from app import formatting
def get_namespace_from_top(fn, key='xmlns'):
ac, el = next(etree.iterparse(fn))
return {'xmlns': el.nsmap[key]}
def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None):
"""
Calls xmltag generator for multiple files.
"""
# Deprecate?
for fn in input_files:
return generate_xmltags(fn, tag, ignore_tags, ns)
def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags):
"""
Creates stringified xml output of elements with certain tag.
"""
for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns):
yield formatting.string_and_clear(el, ns)
def generate_xmltags(fn, tag, ignore_tags, ns=None):
"""
Base generator for percolator xml psm, peptide, protein output,
as well as for mzML, mzIdentML.
ignore_tags are the ones that are not cleared when met by parser.
"""
if ns is None:
xmlns = ''
else:
xmlns = '{%s}' % ns['xmlns']
for ac, el in etree.iterparse(fn):
if el.tag == '{0}{1}'.format(xmlns, tag):
yield el
elif el.tag in ['{0}{1}'.format(xmlns, x) for x in
ignore_tags]:
formatting.clear_el(el)
|
from lxml import etree
import itertools
from app import formatting
def get_namespace_from_top(fn, key='xmlns'):
ac, el = next(etree.iterparse(fn))
return {'xmlns': el.nsmap[key]}
def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None):
"""
Calls xmltag generator for multiple files.
"""
return itertools.chain.from_iterable([generate_xmltags(
fn, tag, ignore_tags, ns) for fn in input_files])
def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags):
"""
Creates stringified xml output of elements with certain tag.
"""
for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns):
yield formatting.string_and_clear(el, ns)
def generate_xmltags(fn, tag, ignore_tags, ns=None):
"""
Base generator for percolator xml psm, peptide, protein output,
as well as for mzML, mzIdentML.
ignore_tags are the ones that are not cleared when met by parser.
"""
if ns is None:
xmlns = ''
else:
xmlns = '{%s}' % ns['xmlns']
for ac, el in etree.iterparse(fn):
if el.tag == '{0}{1}'.format(xmlns, tag):
yield el
elif el.tag in ['{0}{1}'.format(xmlns, x) for x in
ignore_tags]:
formatting.clear_el(el)
|
Return chained iterators instead of only first of multiple iterators
|
Return chained iterators instead of only first of multiple iterators
|
Python
|
mit
|
glormph/msstitch
|
7d8f291dea725c28e4d904a3195fde46a3418925
|
parafermions/tests/test_peschel_emery.py
|
parafermions/tests/test_peschel_emery.py
|
#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.2
pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10)
N, l = 8, 1.0
pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
|
#!/usr/bin/env python
"""
Test the MPS class
"""
import unittest
import numpy as np
import parafermions as pf
class Test(unittest.TestCase):
def test_pe_degeneracy(self):
# should initialise with all zeros
N, l = 8, 0.0
pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
# check that all eigenvalues are degenerate
assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10)
N, l = 8, 1.0
pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64'))
d, v = pe.Diagonalise(k=100)
# check only the ground state eigenvalues are degenerate
assert((d[1]-d[0]) < 1e-15)
assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2)
|
Update slicing so that array sizes match
|
Update slicing so that array sizes match
|
Python
|
bsd-2-clause
|
nmoran/pf_resonances
|
5bc1731288b76978fa66acab7387a688cea76b4c
|
wallabag/wallabag_add.py
|
wallabag/wallabag_add.py
|
"""
Module for adding new entries
"""
import re
import api
import conf
def add(target_url, title=None, star=False, read=False):
conf.load()
valid_url = False
if not re.compile("(?i)https?:\\/\\/.+").match(target_url):
for protocol in "https://", "http://":
if api.is_valid_url("{0}{1}".format(protocol, target_url)):
target_url = "{0}{1}".format(protocol, target_url)
valid_url = True
break
else:
valid_url = api.is_valid_url(target_url)
if not valid_url:
print("Error: Invalid url to add.")
print()
exit(-1)
try:
request = api.api_add_entry(target_url, title, star, read)
if(request.hasError()):
print("Error: {0} - {1}".format(request.error_text,
request.error_description))
exit(-1)
else:
print("Entry successfully added")
exit(0)
except api.OAuthException as e:
print("Error: {0}".format(e.text))
print()
exit(-1)
|
"""
Module for adding new entries
"""
import re
import api
import conf
import json
def add(target_url, title=None, star=False, read=False):
conf.load()
valid_url = False
if not re.compile("(?i)https?:\\/\\/.+").match(target_url):
for protocol in "https://", "http://":
if api.is_valid_url("{0}{1}".format(protocol, target_url)):
target_url = "{0}{1}".format(protocol, target_url)
valid_url = True
break
else:
valid_url = api.is_valid_url(target_url)
if not valid_url:
print("Error: Invalid url to add.")
print()
exit(-1)
try:
request = api.api_entry_exists(target_url)
if(request.hasError()):
print("Error: {0} - {1}".format(request.error_text,
request.error_description))
exit(-1)
response = json.loads(request.response)
print(response['exists'])
if response['exists'] == True:
print("The url was already saved.")
exit(0)
except api.OAuthException as e:
print("Error: {0}".format(e.text))
print()
exit(-1)
try:
request = api.api_add_entry(target_url, title, star, read)
if(request.hasError()):
print("Error: {0} - {1}".format(request.error_text,
request.error_description))
exit(-1)
else:
print("Entry successfully added")
exit(0)
except api.OAuthException as e:
print("Error: {0}".format(e.text))
print()
exit(-1)
|
Check if an anetry already exists before adding it
|
Check if an anetry already exists before adding it
|
Python
|
mit
|
Nepochal/wallabag-cli
|
5a09c6e9545373cece95f87ed28579f05959fced
|
tests/skip_check.py
|
tests/skip_check.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and item.funcargs.get('backend') is not None:
if not isinstance(item.funcargs['backend'], iface):
pytest.skip("Backend does not support {0}".format(name))
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import pytest
def skip_check(name, iface, item):
if name in item.keywords and "backend" in item.funcargs:
if not isinstance(item.funcargs["backend"], iface):
pytest.skip("{0} backend does not support {1}".format(
item.funcargs["backend"], name
))
|
Include teh name of the backend in the error message
|
Include teh name of the backend in the error message
|
Python
|
bsd-3-clause
|
Hasimir/cryptography,skeuomorf/cryptography,skeuomorf/cryptography,dstufft/cryptography,bwhmather/cryptography,skeuomorf/cryptography,Lukasa/cryptography,bwhmather/cryptography,Hasimir/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,Lukasa/cryptography,sholsapp/cryptography,Ayrx/cryptography,dstufft/cryptography,dstufft/cryptography,bwhmather/cryptography,glyph/cryptography,bwhmather/cryptography,kimvais/cryptography,kimvais/cryptography,Ayrx/cryptography,dstufft/cryptography,glyph/cryptography,skeuomorf/cryptography,Lukasa/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography
|
8e6aebf8cb96f5ccf4a119ab213c888a4c33a0d8
|
tests/testQuotas.py
|
tests/testQuotas.py
|
import json
import os
import sys
sys.path.append('..')
from skytap.Quotas import Quotas # noqa
quotas = Quotas()
def test_quota_count():
assert len(quotas) > 0
def test_quota_id():
for quota in quotas:
assert len(quota.id) > 0
def test_quota_usage():
for quota in quotas:
assert quota.usage > 0
def test_quota_units():
for quota in quotas:
assert len(quota.units) > 0
def test_quota_limit():
for quota in quotas:
if quota.limit is not None:
assert quota.usage <= quota.limit
assert quota.pct == quota.usage / quota.limit
def test_quota_time():
for quota in quotas:
if quota.units == 'hours':
assert quota.time.seconds > 0
def test_quota_str_conversion():
for quota in quotas:
assert len(str(quota)) > 0
|
# import json
# import os
# import sys
#
# sys.path.append('..')
# from skytap.Quotas import Quotas # noqa
#
# quotas = Quotas()
#
#
# def test_quota_count():
# assert len(quotas) > 0
#
#
# def test_quota_id():
# for quota in quotas:
# assert len(quota.id) > 0
#
#
# def test_quota_usage():
# for quota in quotas:
# assert quota.usage > 0
#
#
# def test_quota_units():
# for quota in quotas:
# assert len(quota.units) > 0
#
#
# def test_quota_limit():
# for quota in quotas:
# if quota.limit is not None:
# assert quota.usage <= quota.limit
# assert quota.pct == quota.usage / quota.limit
#
#
# def test_quota_time():
# for quota in quotas:
# if quota.units == 'hours':
# assert quota.time.seconds > 0
#
#
# def test_quota_str_conversion():
# for quota in quotas:
# assert len(str(quota)) > 0
|
Remove quota testing from notestest since API change == quotas broken
|
Remove quota testing from notestest since API change == quotas broken
|
Python
|
mit
|
FulcrumIT/skytap,mapledyne/skytap
|
737fa51dc31b315e554553fc5e3b971de663d0e5
|
blog/models.py
|
blog/models.py
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Post(models.Model):
title = models.CharField(max_length=63)
slug = models.SlugField()
text = models.TextField()
pub_date = models.DateField()
|
from django.db import models
from organizer.models import Startup, Tag
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Post(models.Model):
title = models.CharField(max_length=63)
slug = models.SlugField()
text = models.TextField()
pub_date = models.DateField()
tags = models.ManyToManyField(Tag)
startups = models.ManyToManyField(Startup)
|
Define Post model related fields.
|
Ch03: Define Post model related fields. [skip ci]
https://docs.djangoproject.com/en/1.8/ref/models/fields/#manytomanyfield
Blog Posts may be about multiple Startups, just as Startups may be
written about multiple times. Posts may also be categorized by multiple
Tags, just as Tags may be used multiple times to categorize different
Posts.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
f27e1ba885fb8ba5ce33ee84d76dc562bf51db70
|
netbox/netbox/__init__.py
|
netbox/netbox/__init__.py
|
from distutils.version import StrictVersion
from django.db import connection
# NetBox v2.2 and later requires PostgreSQL 9.4 or higher
with connection.cursor() as cursor:
cursor.execute("SELECT VERSION()")
row = cursor.fetchone()
pg_version = row[0].split()[1]
if StrictVersion(pg_version) < StrictVersion('9.4.0'):
raise Exception("PostgreSQL 9.4.0 or higher is required. ({} found)".format(pg_version))
|
Check that PostgreSQL is 9.4 or higher on initialization
|
Check that PostgreSQL is 9.4 or higher on initialization
|
Python
|
apache-2.0
|
digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox
|
|
64fb250967775c690e1ae6a7c43c562f4c94438b
|
tests/test_utils.py
|
tests/test_utils.py
|
from springfield_mongo.entities import Entity as MongoEntity
from springfield_mongo import utils
from springfield import fields
from bson.objectid import ObjectId
# This dummy class just used to have an extra attribute to verify during
# using the utils
class FooEntity(MongoEntity):
foo = fields.StringField()
def test_entity_to_mongo():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'monkey'
mongo_document = utils.entity_to_mongo(m)
assert '_id' in mongo_document
assert mongo_document['_id'] == i
assert 'foo' in mongo_document
assert mongo_document['foo'] == 'monkey'
def test_entity_from_mongo():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'gorilla'
mongo_document = utils.entity_to_mongo(m)
entity = utils.entity_from_mongo(FooEntity, mongo_document)
assert '_id' not in entity
assert 'id' in entity
assert entity['id'] == i
assert 'foo' in entity
assert entity['foo'] == 'gorilla'
def test_to_and_from_equality():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'giraffe'
mongo_document = utils.entity_to_mongo(m)
entity = utils.entity_from_mongo(FooEntity, mongo_document)
assert m == entity
mongo_document2 = utils.entity_to_mongo(entity)
assert mongo_document2 == mongo_document
|
from springfield_mongo import utils
from springfield_mongo.fields import ObjectIdField
from springfield import fields
from springfield import Entity
from bson.objectid import ObjectId
# This dummy class just used to have an extra attribute to verify during
# using the utils
class FooEntity(Entity):
id = ObjectIdField()
foo = fields.StringField()
def test_entity_to_mongo():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'monkey'
mongo_document = utils.entity_to_mongo(m)
assert '_id' in mongo_document
assert mongo_document['_id'] == i
assert 'foo' in mongo_document
assert mongo_document['foo'] == 'monkey'
def test_entity_from_mongo():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'gorilla'
mongo_document = utils.entity_to_mongo(m)
entity = utils.entity_from_mongo(FooEntity, mongo_document)
assert '_id' not in entity
assert 'id' in entity
assert entity['id'] == i
assert 'foo' in entity
assert entity['foo'] == 'gorilla'
def test_to_and_from_equality():
i = ObjectId()
m = FooEntity()
m.id = i
m.foo = 'giraffe'
mongo_document = utils.entity_to_mongo(m)
entity = utils.entity_from_mongo(FooEntity, mongo_document)
assert m == entity
mongo_document2 = utils.entity_to_mongo(entity)
assert mongo_document2 == mongo_document
|
Update tests to reflect removal of springfield_mongo Entity.
|
Update tests to reflect removal of springfield_mongo Entity.
|
Python
|
mit
|
six8/springfield-mongo
|
fb08c6cfe6b6295a9aca9e579a067f34ee1c69c2
|
test/get-gh-comment-info.py
|
test/get-gh-comment-info.py
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('ghcomment', type=str) # this is for test-me-please phrases
parser.add_argument('--focus', type=str, default="")
parser.add_argument('--kernel_version', type=str, default="")
parser.add_argument('--k8s_version', type=str, default="")
parser.add_argument('--retrieve', type=str, default="focus")
args = parser.parse_args()
print(args.__dict__[args.retrieve])
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('ghcomment', type=str) # this is for test-me-please phrases
parser.add_argument('--focus', type=str, default="")
parser.add_argument('--kernel_version', type=str, default="")
parser.add_argument('--k8s_version', type=str, default="")
parser.add_argument('--retrieve', type=str, default="focus")
args = parser.parse_args()
# Update kernel_version to expected format
args.kernel_version = args.kernel_version.replace('.', '')
if args.kernel_version == "netnext":
args.kernel_version = "net-next"
print(args.__dict__[args.retrieve])
|
Format test-only's kernel_version to avoid mistakes
|
test: Format test-only's kernel_version to avoid mistakes
I often try to start test-only builds with e.g.:
test-only --kernel_version=4.19 --focus="..."
That fails because our tests expect "419". We can extend the Python
script used to parse argument to recognize that and update
kernel_version to the expected format.
Signed-off-by: Paul Chaignon <[email protected]>
|
Python
|
apache-2.0
|
cilium/cilium,tklauser/cilium,tgraf/cilium,tklauser/cilium,michi-covalent/cilium,tklauser/cilium,cilium/cilium,tgraf/cilium,cilium/cilium,michi-covalent/cilium,tgraf/cilium,tgraf/cilium,michi-covalent/cilium,michi-covalent/cilium,tgraf/cilium,cilium/cilium,tklauser/cilium,michi-covalent/cilium,tklauser/cilium,cilium/cilium,tgraf/cilium
|
4fc109c93daa3a5d39a184cd692ac7c6b19b9fab
|
simpleflow/swf/process/worker/dispatch/dynamic_dispatcher.py
|
simpleflow/swf/process/worker/dispatch/dynamic_dispatcher.py
|
# -*- coding: utf-8 -*-
import importlib
from simpleflow.activity import Activity
from .exceptions import DispatchError
class Dispatcher(object):
"""
Dispatch by name, like simpleflow.swf.process.worker.dispatch.by_module.ModuleDispatcher
but without a hierarchy.
"""
@staticmethod
def dispatch_activity(name):
"""
:param name:
:type name: str
:return:
:rtype: Activity
:raise DispatchError: if doesn't exist or not an activity
"""
module_name, activity_name = name.rsplit('.', 1)
module = importlib.import_module(module_name)
activity = getattr(module, activity_name, None)
if not activity:
raise DispatchError("unable to import '{}'".format(name))
if not isinstance(activity, Activity):
activity = Activity(activity, activity_name)
return activity
|
# -*- coding: utf-8 -*-
import importlib
from simpleflow.activity import Activity
from .exceptions import DispatchError
class Dispatcher(object):
"""
Dispatch by name, like simpleflow.swf.process.worker.dispatch.by_module.ModuleDispatcher
but without a hierarchy.
"""
@staticmethod
def dispatch_activity(name):
"""
:param name:
:type name: str
:return:
:rtype: Activity
:raise DispatchError: if doesn't exist or not an activity
"""
module_name, activity_name = name.rsplit('.', 1)
module = importlib.import_module(module_name)
activity = getattr(module, activity_name, None)
if not activity:
# We were not able to import a function at all.
raise DispatchError("unable to import '{}'".format(name))
if not isinstance(activity, Activity):
# We managed to import a function (or callable) but it's not an
# "Activity". We will transform it into an Activity now. That way
# we can accept functions that are *not* decorated with
# "@activity.with_attributes()" or equivalent. This dispatcher is
# used in the context of an activity worker, so we don't actually
# care if the task is decorated or not. We only need the decorated
# function for the decider (options to schedule, retry, fail, etc.).
activity = Activity(activity, activity_name)
return activity
|
Add comment to explain the choice in dynamic dispatcher
|
Add comment to explain the choice in dynamic dispatcher
|
Python
|
mit
|
botify-labs/simpleflow,botify-labs/simpleflow
|
85897c2bf4e4e9c89db6111894879d18fef577dd
|
app.tmpl/__init__.py
|
app.tmpl/__init__.py
|
# Main application file
#
# Copyright (c) 2015, Alexandre Hamelin <alexandre.hamelin gmail.com>
from flask import Flask
app = Flask(__name__)
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
# Main application file
#
# Copyright (c) 2015, Alexandre Hamelin <alexandre.hamelin gmail.com>
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
Use the login manager and set a default app secret key
|
Use the login manager and set a default app secret key
|
Python
|
mit
|
0xquad/flask-app-template,0xquad/flask-app-template,0xquad/flask-app-template
|
4f45e55e5b0e14cf6bf32b42a14cbdf9b3c08258
|
dbus_notify.py
|
dbus_notify.py
|
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
if is_string(icon):
# File path
icon_file = icon
else:
icon_file = ""
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
from cgi import escape
import dbus
from utils import is_string
ITEM = "org.freedesktop.Notifications"
PATH = "/org/freedesktop/Notifications"
INTERFACE = "org.freedesktop.Notifications"
APP_NAME = "mpd-hiss"
def dbus_raw_image(im):
"""Convert image for DBUS"""
raw = im.tobytes("raw", "RGBA")
alpha, bps, channels = 0, 8, 4
stride = channels * im.size[0]
return (im.size[0], im.size[1], stride, alpha, bps, channels,
dbus.ByteArray(raw))
def native_load_image(image):
return image
def notify(title, description, icon):
actions = ""
hint = {"suppress-sound": True, "urgency": 0}
time = 5000
icon_file = ""
if is_string(icon):
# File path
icon_file = icon
elif icon:
# Not all notifiers support this
# Some require "icon" and an image on disk
hint["icon_data"] = dbus_raw_image(icon)
bus = dbus.SessionBus()
notif = bus.get_object(ITEM, PATH)
notify = dbus.Interface(notif, INTERFACE)
notify.Notify(APP_NAME, 1, icon_file, title, escape(description), actions,
hint, time)
|
Make sure we do not try to convert None
|
Make sure we do not try to convert None
|
Python
|
cc0-1.0
|
hellhovnd/mpd-hiss,ahihi/mpd-hiss
|
6b4ec52a3fa6fdbb4f70f9d24904bc978341150c
|
nagare/admin/info.py
|
nagare/admin/info.py
|
#--
# Copyright (c) 2008, 2009 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
"""The ``info`` administrative command
Display informations about the framework environment
"""
import sys
from nagare.admin import util
class Info(util.Command):
"""Display informations about the framework environment"""
desc = 'Display various informations'
@staticmethod
def run(parser, options, args):
"""Display the informations
In:
- ``parser`` -- the optparse.OptParser object used to parse the configuration file
- ``options`` -- options in the command lines
- ``args`` -- arguments in the command lines
"""
# For the moment, just diplay the Python version
print sys.version
|
#--
# Copyright (c) 2008, 2009 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
"""The ``info`` administrative command
Display informations about the framework environment
"""
import sys, pkg_resources
from nagare.admin import util
class Info(util.Command):
"""Display informations about the framework environment"""
desc = 'Display various informations'
@staticmethod
def run(parser, options, args):
"""Display the informations
In:
- ``parser`` -- the optparse.OptParser object used to parse the configuration file
- ``options`` -- options in the command lines
- ``args`` -- arguments in the command lines
"""
# For the moment, just diplay the Python version
print sys.version
print
print 'Nagare version', pkg_resources.get_distribution('nagare').version
|
Print the Nagare version number
|
Print the Nagare version number
--HG--
extra : convert_revision : svn%3Afc25bd86-f976-46a1-be41-59ef0291ea8c/trunk%4076
|
Python
|
bsd-3-clause
|
nagareproject/core,nagareproject/core
|
9fffcafca0f611cfcbbf3e80435c250f43a0c68b
|
tests/dataretrival_tests.py
|
tests/dataretrival_tests.py
|
import unittest
from bluefin.dataretrieval.clients import V1Client
from bluefin.dataretrieval.exceptions import V1ClientInputException, V1ClientProcessingException
from tests.api_details import API_DETAILS, TEST_CARD
class TransactionReportingTest(unittest.TestCase):
"""
Tests for transaction reporting API calls.
"""
def test_basic(self):
"""
Test a basic successful API call.
"""
api = V1Client()
api.send_request({
'transactions_after': '2006-12-30',
'account_id': API_DETAILS['account_id'],
'authorization': 'qRdNQK0lkc7vwHP2h6mm',
})
def test_no_input(self):
api = V1Client()
self.assertRaises(V1ClientInputException, api.send_request, {})
|
import unittest
from bluefin.dataretrieval.clients import V1Client
from bluefin.dataretrieval.exceptions import V1ClientInputException, V1ClientProcessingException
from tests.api_details import API_DETAILS, TEST_CARD
class TransactionReportingTest(unittest.TestCase):
"""
Tests for transaction reporting API calls.
"""
def test_basic(self):
"""
Test a basic successful API call.
"""
api = V1Client()
result = api.send_request({
'transactions_after': '2006-12-30',
'account_id': API_DETAILS['account_id'],
'authorization': 'LIGONIER_DOT_ORG',
})
print result
def test_no_input(self):
api = V1Client()
self.assertRaises(V1ClientInputException, api.send_request, {})
|
Use the new auth keyword set by Ligonier. We're working now.
|
Use the new auth keyword set by Ligonier. We're working now.
|
Python
|
bsd-3-clause
|
duointeractive/python-bluefin
|
60a10e8fbfd40197db8226f0791c7064c80fe370
|
run.py
|
run.py
|
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
|
import sys
import os
import argparse
import shutil
from efselab import build
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
parser.add_argument('--update', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
elif args.update:
if not os.path.exists("../efselab/"):
sys.exit("Couldn't find a local efselab checkout...")
shutil.copy("../efselab/fasthash.c", "./efselab")
shutil.copy("../efselab/lemmatize.c", "./efselab")
shutil.copy("../efselab/pysuc.c", "./efselab/suc.c")
if not os.path.exists("../efselab/swe-pipeline"):
sys.exit("Couldn't find a local swe-pipeline directory for models...")
shutil.copy("../efselab/swe-pipeline/suc.bin", "./efselab")
shutil.copy("../efselab/swe-pipeline/suc-saldo.lemmas", "./efselab")
print("Building new files...")
os.chdir("efselab")
build.main()
|
Add new update command that updates efselab dependencies.
|
Add new update command that updates efselab dependencies.
Former-commit-id: 6cfed1b9af9c0bbf34b7e58e3aa8ac3bada85aa7
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
073dd8529c95f44d7d250508dd10b8ffc8208926
|
two_factor/migrations/0003_auto_20150817_1733.py
|
two_factor/migrations/0003_auto_20150817_1733.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import two_factor.models
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0002_auto_20150110_0810'),
]
operations = [
migrations.AlterField(
model_name='phonedevice',
name='number',
field=two_factor.models.PhoneNumberField(max_length=16, verbose_name='number'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.db import models, migrations
import phonenumbers
import two_factor.models
logger = logging.getLogger(__name__)
def migrate_phone_numbers(apps, schema_editor):
PhoneDevice = apps.get_model("two_factor", "PhoneDevice")
for device in PhoneDevice.objects.all():
try:
number = phonenumbers.parse(device.number)
if not phonenumbers.is_valid_number(number):
logger.info("User '%s' has an invalid phone number '%s'." % (device.user.username, device.number))
device.number = phonenumbers.format_number(number, phonenumbers.PhoneNumberFormat.E164)
device.save()
except phonenumbers.NumberParseException as e:
# Do not modify/delete the device, as it worked before. However this might result in issues elsewhere,
# so do log a warning.
logger.warning("User '%s' has an invalid phone number '%s': %s. Please resolve this issue, "
"as it might result in errors." % (device.user.username, device.number, e))
class Migration(migrations.Migration):
dependencies = [
('two_factor', '0002_auto_20150110_0810'),
]
operations = [
migrations.RunPython(migrate_phone_numbers, reverse_code=lambda apps, schema_editor: None),
migrations.AlterField(
model_name='phonedevice',
name='number',
field=two_factor.models.PhoneNumberField(max_length=16, verbose_name='number'),
),
]
|
Migrate phone numbers to E.164 format
|
Migrate phone numbers to E.164 format
|
Python
|
mit
|
koleror/django-two-factor-auth,Bouke/django-two-factor-auth,koleror/django-two-factor-auth,Bouke/django-two-factor-auth
|
ac664513eb1e99bc7aad9dda70a155e25fcff084
|
tests/services/shop/order/test_models_order_payment_state.py
|
tests/services/shop/order/test_models_order_payment_state.py
|
"""
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_open():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
"""
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.shop.order.models.order import PaymentState
from testfixtures.shop_order import create_order
from testfixtures.user import create_user
def test_is_open():
payment_state = PaymentState.open
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == True
assert order.is_canceled == False
assert order.is_paid == False
def test_is_canceled():
payment_state = PaymentState.canceled
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == True
assert order.is_paid == False
def test_is_paid():
payment_state = PaymentState.paid
order = create_order_with_payment_state(payment_state)
assert order.payment_state == payment_state
assert order.is_open == False
assert order.is_canceled == False
assert order.is_paid == True
# helpers
def create_order_with_payment_state(payment_state):
user = create_user(42)
party_id = 'acme-party-2016'
placed_by = user
order = create_order(party_id, placed_by)
order.payment_state = payment_state
return order
|
Fix overshadowed tests by giving test functions unique names
|
Fix overshadowed tests by giving test functions unique names
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
32687e078a50315baa88a9854efea5bb1ca65532
|
Cython/Compiler/Future.py
|
Cython/Compiler/Future.py
|
def _get_feature(name):
import __future__
# fall back to a unique fake object for earlier Python versions or Python 3
return getattr(__future__, name, object())
unicode_literals = _get_feature("unicode_literals")
with_statement = _get_feature("with_statement")
division = _get_feature("division")
print_function = _get_feature("print_function")
absolute_import = _get_feature("absolute_import")
nested_scopes = _get_feature("nested_scopes") # dummy
generators = _get_feature("generators") # dummy
generator_stop = _get_feature("generator_stop")
del _get_feature
|
def _get_feature(name):
import __future__
# fall back to a unique fake object for earlier Python versions or Python 3
return getattr(__future__, name, object())
unicode_literals = _get_feature("unicode_literals")
with_statement = _get_feature("with_statement") # dummy
division = _get_feature("division")
print_function = _get_feature("print_function")
absolute_import = _get_feature("absolute_import")
nested_scopes = _get_feature("nested_scopes") # dummy
generators = _get_feature("generators") # dummy
generator_stop = _get_feature("generator_stop")
del _get_feature
|
Mark the "with_statement" __future__ feature as no-op since it's always on.
|
Mark the "with_statement" __future__ feature as no-op since it's always on.
|
Python
|
apache-2.0
|
da-woods/cython,scoder/cython,cython/cython,scoder/cython,scoder/cython,da-woods/cython,da-woods/cython,cython/cython,cython/cython,cython/cython,da-woods/cython,scoder/cython
|
9545c2d78696d7f75299d958cf44f8cf695581ac
|
DGEclust/readCountData.py
|
DGEclust/readCountData.py
|
## Copyright (C) 2012-2013 Dimitrios V. Vavoulis
## Computational Genomics Group (http://bioinformatics.bris.ac.uk/)
## Department of Computer Science
## University of Bristol
################################################################################
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().astype('double') #df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
## Copyright (C) 2012-2013 Dimitrios V. Vavoulis
## Computational Genomics Group (http://bioinformatics.bris.ac.uk/)
## Department of Computer Science
## University of Bristol
################################################################################
import numpy as np
import pandas as pd
################################################################################
def readCountData(fname, classes = None, *args, **kargs):
df = pd.read_table(fname, *args, **kargs)
## add attributes
df.counts = df.values
df.exposures = df.sum() / df.sum().max().astype('double')
df.samples = df.columns
df.genes = df.index
## classes
if classes is None:
df.classes = np.arange(df.samples.size).astype('str')
else:
df.classes = classes
return df
################################################################################
|
Normalize by the size of the library
|
Normalize by the size of the library
|
Python
|
mit
|
dvav/dgeclust
|
6c31af53cdc16d9f9cb3b643e9d7f0fee14cbc85
|
__main__.py
|
__main__.py
|
#--coding:utf-8--
from __init__ import *
import json
import Queue
open('Chinese.bak.json', 'ab').write('[')
open('Foreigner.bak.json', 'ab').write('[')
open('Student.bak.json', 'ab').write('[')
Output = open('result.json', 'wb')
TaskQueue = Queue.Queue(maxsize = 0)
downloader = Downloader(TaskQueue)
downloader.start()
colony = Colony(Spider, InfoExtracter,
Output, TaskQueue, json.load(open('RegularExpression.json')), './Icon')
colony.Push(('thelyad', 'username', ))
colony.Push(('100000965387047', 'uid', ))
colony.SpiderInit()
try:
print "Info: Start Colony.Manage()"
colony.Manage()
finally:
colony.End()
downloader.stop()
|
#--coding:utf-8--
from __init__ import *
import json
import Queue
open('Chinese.bak.json', 'ab').write('[')
open('Foreigner.bak.json', 'ab').write('[')
open('Student.bak.json', 'ab').write('[')
Output = open('result.json', 'wb')
TaskQueue = Queue.Queue(maxsize = 0)
downloader = Downloader(TaskQueue)
downloader.start()
colony = Colony(Spider, InfoExtracter,
Output, TaskQueue, json.load(open('RegularExpression.json')), './Icon')
colony.Push(('thelyad', 'username', ))
colony.Push(('100000965387047', 'uid', ))
colony.SpiderInit()
try:
print "Info: Start Colony.Manage()"
colony.Manage()
except KeyboardInterrupt:
pass
|
Fix Bugs: Cannot Backup Scan Task Queue
|
Fix Bugs: Cannot Backup Scan Task Queue
|
Python
|
mit
|
nday-dev/FbSpider
|
63d989821040b5b57f6c1076dd5665d1651b30bb
|
dallinger/transformations.py
|
dallinger/transformations.py
|
"""
Define custom transformations.
See class Transformation in models.py for the base class Transformation. This
file stores a list of all the subclasses of Transformation made available by
default. Note that they don't necessarily tell you anything about the nature
in which two Info's relate to each other, but if used sensibly they will do so.
"""
from models import Transformation
class Replication(Transformation):
"""An instance of one info being identically copied into another."""
__mapper_args__ = {
"polymorphic_identity": "replication"
}
class Mutation(Transformation):
"""An instance of one info being tranformed into another + mutations."""
__mapper_args__ = {
"polymorphic_identity": "mutation"
}
class Compression(Transformation):
"""An instance of one info being compressed into another."""
__mapper_args__ = {
"polymorphic_identity": "compression"
}
class Response(Transformation):
"""An instance of one info being a response to another."""
__mapper_args__ = {
"polymorphic_identity": "response"
}
|
"""
Define custom transformations.
See class Transformation in models.py for the base class Transformation. This
file stores a list of all the subclasses of Transformation made available by
default. Note that they don't necessarily tell you anything about the nature
in which two Info's relate to each other, but if used sensibly they will do so.
"""
from .models import Transformation
class Replication(Transformation):
"""An instance of one info being identically copied into another."""
__mapper_args__ = {
"polymorphic_identity": "replication"
}
class Mutation(Transformation):
"""An instance of one info being tranformed into another + mutations."""
__mapper_args__ = {
"polymorphic_identity": "mutation"
}
class Compression(Transformation):
"""An instance of one info being compressed into another."""
__mapper_args__ = {
"polymorphic_identity": "compression"
}
class Response(Transformation):
"""An instance of one info being a response to another."""
__mapper_args__ = {
"polymorphic_identity": "response"
}
|
Fix relative import of models
|
Fix relative import of models
|
Python
|
mit
|
jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger
|
e19cee4b47d296967286a7f065f363f1e64e58f6
|
linter.py
|
linter.py
|
from SublimeLinter.lint import PythonLinter
class Pyflakes(PythonLinter):
cmd = 'pyflakes'
regex = r'''(?x)
^(?P<filename>[^:\n]+):(?P<line>\d+):((?P<col>\d+):)?\s
# The rest of the line is the error message.
# Within that, capture anything within single quotes as `near`.
(?P<message>[^\'\n\r]*(?P<near>\'.+?\')?.*)
'''
multiline = True
# stderr has all syntax errors, parse it via our regex
on_stderr = None
defaults = {
'selector': 'source.python'
}
|
from SublimeLinter.lint import PythonLinter
import re
class Pyflakes(PythonLinter):
cmd = 'pyflakes'
regex = r'''(?x)
^(?P<filename>[^:\n]+):(?P<line>\d+):((?P<col>\d+):)?\s
# The rest of the line is the error message.
# Within that, capture anything within single quotes as `near`.
(?P<message>[^\'\n\r]*(?P<near>\'.+?\')?.*)
'''
multiline = True
# stderr has all syntax errors, parse it via our regex
on_stderr = None
defaults = {
'selector': 'source.python'
}
def reposition_match(self, line, col, match, vv):
if 'imported but unused' in match.message:
# Consider:
# from foo import bar
# import foo.bar
# In both cases `pyflakes` reports `'foo.bar' ... unused`.
import_id = re.escape(match.near[1:-1]) # unquote
last_part = import_id.split('.')[-1]
# So we match either `bar` or `foo.bar` against the line content
text = vv.select_line(line)
pattern = r"\s({}|{})".format(last_part, import_id)
match = re.search(pattern, text)
if match:
return line, match.start(1), match.end(1)
return super().reposition_match(line, col, match, vv)
|
Improve col reporting for unused imports
|
Improve col reporting for unused imports
|
Python
|
mit
|
SublimeLinter/SublimeLinter-pyflakes
|
1f47381705e7115e6e466fb625fbb925fbd503e2
|
birdy/dependencies.py
|
birdy/dependencies.py
|
# -*- coding: utf-8 -*-
"""
This module is used to manage optional dependencies.
Example usage::
from birdy.dependencies import ipywidgets as widgets
"""
import warnings
from .exceptions import IPythonWarning
# TODO: we ignore warnings for now. They are only needed when birdy is used in a notebook,
# but we currently don't know how to handle this (see #89 and #138).
warnings.filterwarnings('ignore', category=IPythonWarning)
try:
import ipywidgets
except ImportError:
ipywidgets = None
warnings.warn('Jupyter Notebook is not supported. Please install *ipywidgets*.', IPythonWarning)
try:
import IPython
except ImportError:
IPython = None
warnings.warn('IPython is not supported. Please install *ipython*.', IPythonWarning)
try:
import ipyleaflet # noqa: F401
except ImportError:
ipyleaflet = None
warnings.warn('Ipyleaflet is not supported. Please install *ipyleaflet*.', IPythonWarning)
|
# -*- coding: utf-8 -*-
"""
This module is used to manage optional dependencies.
Example usage::
from birdy.dependencies import ipywidgets as widgets
"""
import warnings
from .exceptions import IPythonWarning
# TODO: we ignore warnings for now. They are only needed when birdy is used in a notebook,
# but we currently don't know how to handle this (see #89 and #138).
warnings.filterwarnings('ignore', category=IPythonWarning)
try:
import ipywidgets
except ImportError:
ipywidgets = None
warnings.warn('Jupyter Notebook is not supported. Please install *ipywidgets*.', IPythonWarning)
try:
import IPython
except ImportError:
IPython = None
warnings.warn('IPython is not supported. Please install *ipython*.', IPythonWarning)
try:
import ipyleaflet
except ImportError:
ipyleaflet = None
warnings.warn('Ipyleaflet is not supported. Please install *ipyleaflet*.', IPythonWarning)
|
Undo change that did not fix issue
|
Undo change that did not fix issue
|
Python
|
apache-2.0
|
bird-house/birdy
|
aac08ae7dbfa8542210664922b8857de0b185b6f
|
apps/bluebottle_utils/tests.py
|
apps/bluebottle_utils/tests.py
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
def generate_username():
return str(uuid.uuid4())[:30]
# If username is set and not unique, it will raise a clearly
# interpretable IntegrityError.
# If auto-generated, make sure it's unique.
if not username:
username = generate_username()
while User.objects.filter(username=username).exists():
username = generate_username()
user = User.objects.create_user(username=username)
return user
|
Fix bug in username uniqueness.
|
Fix bug in username uniqueness.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
d49d383c62233036d4195d71ba4fda78ff2278de
|
distarray/core/tests/test_distributed_array_protocol.py
|
distarray/core/tests/test_distributed_array_protocol.py
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_export(self):
self.assertIsInstance(self.arr, da.LocalArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.arr, '__distarray__'))
def test_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.arr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Improve basic checks of distarray export.
|
Improve basic checks of distarray export.
|
Python
|
bsd-3-clause
|
enthought/distarray,RaoUmer/distarray,RaoUmer/distarray,enthought/distarray
|
bc78bf85442b0ffb7962a1c9c4a3560a0fd1960d
|
skimage/io/_plugins/matplotlib_plugin.py
|
skimage/io/_plugins/matplotlib_plugin.py
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
Create a new figure for imshow if there is already data
|
Create a new figure for imshow if there is already data
|
Python
|
bsd-3-clause
|
keflavich/scikit-image,GaZ3ll3/scikit-image,pratapvardhan/scikit-image,jwiggins/scikit-image,oew1v07/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,paalge/scikit-image,paalge/scikit-image,michaelaye/scikit-image,warmspringwinds/scikit-image,GaZ3ll3/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,blink1073/scikit-image,Britefury/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,robintw/scikit-image,paalge/scikit-image,keflavich/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,ClinicalGraphics/scikit-image,newville/scikit-image,ClinicalGraphics/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,juliusbierk/scikit-image,juliusbierk/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,oew1v07/scikit-image,Midafi/scikit-image,vighneshbirodkar/scikit-image,emon10005/scikit-image,rjeli/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,dpshelio/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,chriscrosscutler/scikit-image,youprofit/scikit-image,emon10005/scikit-image,blink1073/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,michaelpacer/scikit-image,jwiggins/scikit-image
|
7b9b1a7bb7f9e48e466bd00b3edffc67be841b4e
|
pavement.py
|
pavement.py
|
import os.path
from paver.easy import sh, task
config = """# replace pass with values you would like to overwrite from DefaultConfig in
# default_config.py. Values you do not explicitly overwrite will be inherited
# from DefaultConfig. At the very least, you must set secret_key and
# tmdb_api_key.
from default_config import DefaultConfig
class Config(DefaultConfig):
pass
"""
@task
def setup():
"""Writes a default config to config.py"""
if not os.path.isfile('config.py'):
print('Writing default config.')
f = open('config.py', 'w')
f.write(config)
f.close()
else:
print('Config file already exists, will not overwrite.')
@task
def lint():
"""Checks code quality using flake8"""
sh("flake8 --max-line-length=120 --max-complexity=10 .")
@task
def run_tests():
"""Run unit tests"""
sh("./manage.py test")
@task
def check_source():
"""Identify any potential problems with code"""
sh("./manage.py check")
@task
def inspect():
"""Inspects project source for a variety of problems"""
lint()
check_source()
run_tests()
|
import os.path
import shutil
from paver.easy import sh, task
config = """# replace pass with values you would like to overwrite from DefaultConfig in
# default_config.py. Values you do not explicitly overwrite will be inherited
# from DefaultConfig. At the very least, you must set secret_key and
# tmdb_api_key.
from default_config import DefaultConfig
class Config(DefaultConfig):
pass
"""
@task
def apply_hooks():
"""Copies hooks from git_hooks folder into .git/hooks"""
os.chdir('git_hooks')
for item in os.listdir('.'):
if os.path.isfile(item):
print('Applying hook: ' + item)
shutil.copyfile(item, '../.git/hooks/' + item)
@task
def make_hooks_executable():
os.chdir('.git/hooks')
for item in os.listdir('.'):
if os.path.isfile(item):
sh("chmod +x " + item)
@task
def setup():
"""Writes a default config to config.py"""
if not os.path.isfile('config.py'):
print('Writing default config.')
f = open('config.py', 'w')
f.write(config)
f.close()
else:
print('Config file already exists, will not overwrite.')
@task
def lint():
"""Checks code quality using flake8"""
sh("flake8 --max-line-length=120 --max-complexity=10 .")
@task
def run_tests():
"""Run unit tests"""
sh("./manage.py test")
@task
def check_source():
"""Identify any potential problems with code"""
sh("./manage.py check")
@task
def inspect():
"""Inspects project source for a variety of problems"""
lint()
check_source()
run_tests()
|
Add some git hook related tasks to paver file
|
Add some git hook related tasks to paver file
|
Python
|
mit
|
simon-andrews/movieman2,simon-andrews/movieman2
|
1c3bffed864fab3163244486441f08fba00b1a65
|
fireplace/cards/gvg/warlock.py
|
fireplace/cards/gvg/warlock.py
|
from ..utils import *
##
# Minions
# Mistress of Pain
class GVG_018:
events = [
Damage().on(
lambda self, source, target, amount: source is self and [Heal(FRIENDLY_HERO, amount)] or []
)
]
# Fel Cannon
class GVG_020:
events = [
OWN_TURN_END.on(Hit(RANDOM(ALL_MINIONS - MECH), 2))
]
# Anima Golem
class GVG_077:
events = [
TURN_END.on(
lambda self, player: self.controller.field != [self] and [Destroy(SELF)] or []
)
]
# Floating Watcher
class GVG_100:
events = [
Damage(FRIENDLY_HERO).on(
lambda self, target, amount, source: self.controller.current_player and [Buff(SELF, "GVG_100e")] or []
)
]
##
# Spells
# Darkbomb
class GVG_015:
action = [Hit(TARGET, 3)]
# Demonheart
class GVG_019:
def action(self, target):
if target.controller == self.controller and target.race == Race.DEMON:
return [Buff(TARGET, "GVG_019e")]
else:
return [Hit(TARGET, 5)]
|
from ..utils import *
##
# Minions
# Mistress of Pain
class GVG_018:
events = [
Damage().on(
lambda self, target, amount, source: source is self and [Heal(FRIENDLY_HERO, amount)] or []
)
]
# Fel Cannon
class GVG_020:
events = [
OWN_TURN_END.on(Hit(RANDOM(ALL_MINIONS - MECH), 2))
]
# Anima Golem
class GVG_077:
events = [
TURN_END.on(
lambda self, player: self.controller.field != [self] and [Destroy(SELF)] or []
)
]
# Floating Watcher
class GVG_100:
events = [
Damage(FRIENDLY_HERO).on(
lambda self, target, amount, source: self.controller.current_player and [Buff(SELF, "GVG_100e")] or []
)
]
##
# Spells
# Darkbomb
class GVG_015:
action = [Hit(TARGET, 3)]
# Demonheart
class GVG_019:
def action(self, target):
if target.controller == self.controller and target.race == Race.DEMON:
return [Buff(TARGET, "GVG_019e")]
else:
return [Hit(TARGET, 5)]
|
Fix argument ordering in Mistress of Pain
|
Fix argument ordering in Mistress of Pain
Fixes #71
|
Python
|
agpl-3.0
|
amw2104/fireplace,smallnamespace/fireplace,liujimj/fireplace,Meerkov/fireplace,NightKev/fireplace,oftc-ftw/fireplace,butozerca/fireplace,jleclanche/fireplace,Ragowit/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,beheh/fireplace,butozerca/fireplace,Meerkov/fireplace,amw2104/fireplace,liujimj/fireplace,smallnamespace/fireplace
|
843b4c4c0ec7176f4b60fc9d39e7a033c2d4ef7d
|
utils/crypto.py
|
utils/crypto.py
|
import hashlib
import os
import string
import random
from django.conf import settings
def hasher(string):
'''Helper method to hash a string to SHA512'''
h = hashlib.sha512(settings.SECRET_KEY + string).hexdigest()
for _ in range(settings.HASH_PASSES):
h = hashlib.sha512(h).hexdigest()
return h
def get_random_string(length, set=string.ascii_letters+string.digits):
'''Gets a random string'''
return ''.join(random.choice(set) for _ in range(length))
|
import hashlib
import os
import string
import random
from django.conf import settings
def hasher(string):
'''Helper method to hash a string to SHA512'''
h = hashlib.sha512(settings.SECRET_KEY + string.encode("utf-8")).hexdigest()
for _ in range(settings.HASH_PASSES):
h = hashlib.sha512(h).hexdigest()
return h
def get_random_string(length, set=string.ascii_letters+string.digits):
'''Gets a random string'''
return ''.join(random.choice(set) for _ in range(length))
|
Make the password hashing unicode safe.
|
Make the password hashing unicode safe.
|
Python
|
bsd-3-clause
|
AeroNotix/django-timetracker,AeroNotix/django-timetracker,AeroNotix/django-timetracker
|
7403e79c9e3cccc7ea97e61915ec01c2176c0f57
|
tests/test_heroku.py
|
tests/test_heroku.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import mock
from dallinger.config import get_config
from dallinger.heroku import app_name
class TestHeroku(object):
def test_heroku_app_name(self):
id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0"
assert(len(app_name(id)) < 30)
class TestHerokuClock(object):
def test_check_db_for_missing_notifications_assembles_resources(self):
os.chdir('tests/experiment')
config = get_config()
if not config.ready:
config.load_config()
# Can't import until after config is loaded:
from dallinger.heroku.clock import check_db_for_missing_notifications
runner = 'dallinger.heroku.clock._run_notifications_check'
with mock.patch(runner) as mock_runner:
check_db_for_missing_notifications()
mock_runner.assert_called()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import mock
import pytest
import dallinger.db
from dallinger.config import get_config
from dallinger.heroku import app_name
@pytest.fixture
def setup():
db = dallinger.db.init_db(drop_all=True)
os.chdir('tests/experiment')
config = get_config()
if not config.ready:
config.load_config()
yield config
db.rollback()
db.close()
os.chdir('../..')
class TestHeroku(object):
def test_heroku_app_name(self):
id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0"
assert(len(app_name(id)) < 30)
class TestHerokuClock(object):
def test_check_db_for_missing_notifications_assembles_resources(self, setup):
# Can't import until after config is loaded:
from dallinger.heroku.clock import check_db_for_missing_notifications
with mock.patch.multiple('dallinger.heroku.clock',
_run_notifications_check=mock.DEFAULT,
MTurkConnection=mock.DEFAULT) as mocks:
mocks['MTurkConnection'].return_value = 'fake connection'
check_db_for_missing_notifications()
mocks['_run_notifications_check'].assert_called()
|
Allow test to run without MTurk/AWS credentials configured, and defend against other tests which don’t clean up database
|
Allow test to run without MTurk/AWS credentials configured, and defend against other tests which don’t clean up database
|
Python
|
mit
|
Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger
|
825c83697b3453644a6ec699d653ba0d4bd5d790
|
pymanopt/tools/autodiff/_autograd.py
|
pymanopt/tools/autodiff/_autograd.py
|
"""
Module containing functions to differentiate functions using autograd.
"""
import autograd.numpy as np
from autograd.core import grad
from ._backend import Backend
def _hessian_vector_product(fun, argnum=0):
"""Builds a function that returns the exact Hessian-vector product.
The returned function has arguments (*args, vector, **kwargs). Note,
this function will be incorporated into autograd, with name
hessian_vector_product. Once it has been this function can be
deleted."""
fun_grad = grad(fun, argnum)
def vector_dot_grad(*args, **kwargs):
args, vector = args[:-1], args[-1]
return np.tensordot(fun_grad(*args, **kwargs), vector,
axes=vector.ndim)
# Grad wrt original input.
return grad(vector_dot_grad, argnum)
class AutogradBackend(Backend):
def compute_gradient(self, objective, argument):
"""
Compute the gradient of 'objective' with respect to the first
argument and return as a function.
"""
return grad(objective)
def compute_hessian(self, objective, argument):
return _hessian_vector_product(objective)(x, g)
|
"""
Module containing functions to differentiate functions using autograd.
"""
import autograd.numpy as np
from autograd.core import grad
from ._backend import Backend
def _hessian_vector_product(fun, argnum=0):
"""Builds a function that returns the exact Hessian-vector product.
The returned function has arguments (*args, vector, **kwargs). Note,
this function will be incorporated into autograd, with name
hessian_vector_product. Once it has been this function can be
deleted."""
fun_grad = grad(fun, argnum)
def vector_dot_grad(*args, **kwargs):
args, vector = args[:-1], args[-1]
return np.tensordot(fun_grad(*args, **kwargs), vector,
axes=vector.ndim)
# Grad wrt original input.
return grad(vector_dot_grad, argnum)
class AutogradBackend(Backend):
def compute_gradient(self, objective, argument):
"""
Compute the gradient of 'objective' with respect to the first
argument and return as a function.
"""
return grad(objective)
def compute_hessian(self, objective, argument):
return _hessian_vector_product(objective)
|
Remove unnecessary arguments from autograd hessian
|
Remove unnecessary arguments from autograd hessian
|
Python
|
bsd-3-clause
|
pymanopt/pymanopt,j-towns/pymanopt,nkoep/pymanopt,tingelst/pymanopt,nkoep/pymanopt,pymanopt/pymanopt,nkoep/pymanopt
|
c1a38cb5fd2f6dd0f81515bece18a47f2b20234b
|
data_record.py
|
data_record.py
|
class DataRecord:
@classmethod
def get_store( cls ):
if hasattr( cls, 'store' ): return cls.store
cls.store = {}
return cls.store
@classmethod
def find( cls, record_id ):
return cls.get_store().get( record_id, None )
@classmethod
def save( cls, record_id, record ):
cls.get_store()[ record_id ] = record
@classmethod
def create_or_update( cls, record_id, **kwargs ):
found_record = cls.find( record_id )
if found_record is not None:
for name, value in kwargs.items():
setattr( found_record, name, value )
return found_record
return cls( record_id, **kwargs )
def __init__( self, record_id ):
self.record_id = record_id
self.__class__.save( record_id, self )
|
class DataRecord:
@classmethod
def get_store( cls ):
if hasattr( cls, 'store' ): return cls.store
cls.store = {}
return cls.store
@classmethod
def find( cls, record_id ):
return cls.get_store().get( str(record_id), None )
@classmethod
def save( cls, record_id, record ):
cls.get_store()[ str(record_id) ] = record
@classmethod
def create_or_update( cls, record_id, **kwargs ):
found_record = cls.find( str(record_id) )
if found_record is not None:
for name, value in kwargs.items():
setattr( found_record, name, value )
return found_record
return cls( str(record_id), **kwargs )
def __init__( self, record_id ):
self.record_id = str(record_id)
self.__class__.save( str(record_id), self )
|
Make all data records store record id keys as strings
|
Make all data records store record id keys as strings
|
Python
|
mit
|
fire-uta/iiix-data-parser
|
f787cf0303159fdae9b5a53eca86d0985b1096ee
|
coreplugins/editshortlinks/plugin.py
|
coreplugins/editshortlinks/plugin.py
|
from app.plugins import PluginBase, Menu, MountPoint
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.utils.translation import gettext as _
from .api import GetShortLink, EditShortLink, DeleteShortLink, HandleShortLink
class Plugin(PluginBase):
def build_jsx_components(self):
return ['SLControls.jsx']
def include_js_files(self):
return ['main.js']
def root_mount_points(self):
return [
MountPoint(r'^s(?P<view_type>[m3])/(?P<username>[^/.]+)/(?P<short_id>[A-Za-z0-9_-]+)/?$', HandleShortLink)
]
def api_mount_points(self):
return [
MountPoint('task/(?P<pk>[^/.]+)/shortlink', GetShortLink.as_view()),
MountPoint('task/(?P<pk>[^/.]+)/edit', EditShortLink.as_view()),
MountPoint('task/(?P<pk>[^/.]+)/delete', DeleteShortLink.as_view())
]
|
from app.plugins import PluginBase, Menu, MountPoint
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.utils.translation import gettext as _
from .api import GetShortLink, EditShortLink, DeleteShortLink, HandleShortLink
class Plugin(PluginBase):
def build_jsx_components(self):
return ['SLControls.jsx']
def include_js_files(self):
return ['main.js']
def root_mount_points(self):
return [
MountPoint(r'^s(?P<view_type>[m3])/(?P<username>[^/]+)/(?P<short_id>[A-Za-z0-9_-]+)/?$', HandleShortLink)
]
def api_mount_points(self):
return [
MountPoint('task/(?P<pk>[^/.]+)/shortlink', GetShortLink.as_view()),
MountPoint('task/(?P<pk>[^/.]+)/edit', EditShortLink.as_view()),
MountPoint('task/(?P<pk>[^/.]+)/delete', DeleteShortLink.as_view())
]
|
Modify regex group for username to allow periods
|
Modify regex group for username to allow periods
Fix for #1076
|
Python
|
agpl-3.0
|
OpenDroneMap/WebODM,OpenDroneMap/WebODM,OpenDroneMap/WebODM,OpenDroneMap/WebODM,OpenDroneMap/WebODM
|
5780f72ff95329295c735fff61463315ec3856d7
|
manage.py
|
manage.py
|
#!/usr/bin/env python
# This manage.py exists for the purpose of creating migrations
import sys
import django
from django.conf import settings
settings.configure(
ROOT_URLCONF='',
DATABASES={'default':
{'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}},
PAYPAL_RECEIVER_EMAIL='',
PAYPAL_IDENTITY_TOKEN='',
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'paypal.pro',
'paypal.standard',
'paypal.standard.ipn',
'paypal.standard.pdt',
] + (['south'] if django.VERSION < (1,7) else []),
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'TIMEOUT': 0,
'KEY_PREFIX': 'paypal_tests_',
}
},
MIDDLEWARE_CLASSES=[],
)
from django.core.management import execute_from_command_line
if __name__ == '__main__':
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
# This manage.py exists for the purpose of creating migrations
import sys
import django
from django.conf import settings
settings.configure(
ROOT_URLCONF='',
DATABASES={'default':
{'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}},
PAYPAL_RECEIVER_EMAIL='',
PAYPAL_IDENTITY_TOKEN='',
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'paypal.pro',
'paypal.standard',
'paypal.standard.ipn',
'paypal.standard.pdt',
],
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'TIMEOUT': 0,
'KEY_PREFIX': 'paypal_tests_',
}
},
MIDDLEWARE_CLASSES=[],
)
from django.core.management import execute_from_command_line
if __name__ == '__main__':
execute_from_command_line(sys.argv)
|
Remove stupid South thing that is messing up Heroku
|
Remove stupid South thing that is messing up Heroku
remove, I say!!
|
Python
|
mit
|
millanp/django-paypal,millanp/django-paypal
|
b363b0ffc9e4fd7790f418f84107c3b7233642f1
|
zou/app/utils/chats.py
|
zou/app/utils/chats.py
|
from slackclient import SlackClient
def send_to_slack(app_token, userid, message):
client = SlackClient(token=app_token)
client.api_call(
"chat.postMessage", channel="@%s" % userid, text=message, as_user=True
)
return True
|
from slackclient import SlackClient
def send_to_slack(app_token, userid, message):
client = SlackClient(token=app_token)
blocks = [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message,
}
},
]
client.api_call(
"chat.postMessage", channel="@%s" % userid, blocks=blocks, as_user=True
)
return True
|
Allow to format messages sent to Slack
|
Allow to format messages sent to Slack
|
Python
|
agpl-3.0
|
cgwire/zou
|
dc8b2e0a68644655d95d67b3ffd6d9122e94584a
|
zc_common/remote_resource/relations.py
|
zc_common/remote_resource/relations.py
|
from collections import OrderedDict
import json
import six
from rest_framework.relations import *
from rest_framework_json_api.relations import ResourceRelatedField
from zc_common.remote_resource.models import *
class RemoteResourceField(ResourceRelatedField):
def __init__(self, *args, **kwargs):
if 'model' not in kwargs:
kwargs['model'] = RemoteResource
if not kwargs.get('read_only', None):
# The queryset is required to be not None, but not used
# due to the overriding of the methods below.
kwargs['queryset'] = True
super(RemoteResourceField, self).__init__(*args, **kwargs)
def to_internal_value(self, data):
if isinstance(data, six.text_type):
try:
data = json.loads(data)
except ValueError:
self.fail('incorrect_type', data_type=type(data).__name__)
if not isinstance(data, dict):
self.fail('incorrect_type', data_type=type(data).__name__)
if 'type' not in data:
self.fail('missing_type')
if 'id' not in data:
self.fail('missing_id')
return RemoteResource(data['type'], data['id'])
def to_representation(self, value):
return OrderedDict([('type', value.type), ('id', str(value.id))])
|
from collections import OrderedDict
import json
import six
from rest_framework.relations import *
from rest_framework_json_api.relations import ResourceRelatedField
from zc_common.remote_resource.models import *
class RemoteResourceField(ResourceRelatedField):
def __init__(self, *args, **kwargs):
if 'model' not in kwargs:
kwargs['model'] = RemoteResource
if not kwargs.get('read_only', None):
# The queryset is required to be not None, but not used
# due to the overriding of the methods below.
kwargs['queryset'] = {}
super(RemoteResourceField, self).__init__(*args, **kwargs)
def to_internal_value(self, data):
if isinstance(data, six.text_type):
try:
data = json.loads(data)
except ValueError:
self.fail('incorrect_type', data_type=type(data).__name__)
if not isinstance(data, dict):
self.fail('incorrect_type', data_type=type(data).__name__)
if 'type' not in data:
self.fail('missing_type')
if 'id' not in data:
self.fail('missing_id')
return RemoteResource(data['type'], data['id'])
def to_representation(self, value):
return OrderedDict([('type', value.type), ('id', str(value.id))])
|
Use empty dict instead of True for remote resource queryset
|
Use empty dict instead of True for remote resource queryset
|
Python
|
mit
|
ZeroCater/zc_common,ZeroCater/zc_common
|
cd2b628ca118ffae8090004e845e399110aada21
|
disk/datadog_checks/disk/__init__.py
|
disk/datadog_checks/disk/__init__.py
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .disk import Disk
__all__ = ['Disk']
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__
from .disk import Disk
all = [
'__version__', 'Disk'
]
|
Allow Agent to properly pull version info
|
[Disk] Allow Agent to properly pull version info
|
Python
|
bsd-3-clause
|
DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core
|
06c3e03db75617b824eae088053a9fc563b936a7
|
virtool/user_permissions.py
|
virtool/user_permissions.py
|
#: A list of the permission strings used by Virtool.
PERMISSIONS = [
"cancel_job",
"create_sample",
"create_subtraction",
"manage_users",
"modify_hmm",
"modify_options",
"modify_virus",
"rebuild_index",
"remove_job",
"remove_virus"
]
|
#: A list of the permission strings used by Virtool.
PERMISSIONS = [
"cancel_job",
"create_sample",
"manage_users",
"modify_hmm",
"modify_options",
"modify_subtraction",
"modify_virus",
"rebuild_index",
"remove_job",
"remove_virus"
]
|
Change create_subtraction permission to modify_subtraction
|
Change create_subtraction permission to modify_subtraction
|
Python
|
mit
|
virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool
|
1424ce565ee8b47e6a9a3bc143589c7e7e0c3e53
|
cloudenvy/commands/envy_scp.py
|
cloudenvy/commands/envy_scp.py
|
import logging
import fabric.api
import fabric.operations
from cloudenvy.envy import Envy
class EnvySCP(object):
"""SCP Files to your ENVy"""
def __init__(self, argparser):
self._build_subparser(argparser)
def _build_subparser(self, subparsers):
subparser = subparsers.add_parser('scp', help='scp help')
subparser.set_defaults(func=self.run)
subparser.add_argument('source')
subparser.add_argument('target')
subparser.add_argument('-n', '--name', action='store', default='',
help='specify custom name for an ENVy')
return subparser
def run(self, config, args):
envy = Envy(config)
if envy.ip():
host_string = '%s@%s' % (envy.remote_user, envy.ip())
with fabric.api.settings(host_string=host_string):
fabric.operations.put(args.source, args.target)
else:
logging.error('Could not find IP to upload file to.')
|
import logging
import fabric.api
import fabric.operations
from cloudenvy.envy import Envy
class EnvySCP(object):
"""SCP Files to your ENVy"""
def __init__(self, argparser):
self._build_subparser(argparser)
def _build_subparser(self, subparsers):
subparser = subparsers.add_parser('scp', help='scp help')
subparser.set_defaults(func=self.run)
subparser.add_argument('source',
help='Local path to copy into your ENVy.')
subparser.add_argument('target',
help='Location in your ENVy to place file(s). Non-absolute '
'paths are interpreted relative to remote_user homedir.')
subparser.add_argument('-n', '--name', action='store', default='',
help='specify custom name for an ENVy')
return subparser
def run(self, config, args):
envy = Envy(config)
if envy.ip():
host_string = '%s@%s' % (envy.remote_user, envy.ip())
with fabric.api.settings(host_string=host_string):
fabric.operations.put(args.source, args.target)
else:
logging.error('Could not find IP to upload file to.')
|
Document source and target arguments of envy scp
|
Document source and target arguments of envy scp
Fix issue #67
|
Python
|
apache-2.0
|
cloudenvy/cloudenvy
|
94ad83d899f0c9372013a9bcdad25ee3c9783626
|
wallace/interval_storage.py
|
wallace/interval_storage.py
|
class IntervalStorage(object):
def __init__(self, interval_map=None):
if interval_map == None:
self.interval_map = {}
else:
if not isinstance(interval_map, dict):
raise ValueError("Interval map must be a dictionary containing entries as keys and interval tuples as values.")
self.interval_map = interval_map
def add_interval(self, entry, start, end):
self.validate_interval(start, end)
self.interval_map[entry] = (start, end)
def get_entry(self, point):
for entry, interval in self.interval_map.iteritems():
start, end = interval[0], interval[1]
if start <= point and point < end:
return entry
raise ValueError("Point '%s' is not contained in any stored interval." % point)
def validate_interval(self, start, end):
if start > end:
raise ValueError("Start must be lower than end in a valid interval.")
if start > 1 or start < 0 or end > 1 or end < 0:
raise ValueError("Intervals must be subsets of the interval [0,1].")
if self.has_intersection(start, end):
raise ValueError("Intervals cannot have an intersection with intervals that already exist in the storage object.")
def has_intersection(self, start, end):
for value in self.interval_map.itervalues():
if (value[0] <= start and start <= value[1]) or \
(value[0] <= end and end <= value[1]) or \
(start <= value[0] and value[1] <= end):
return True
return False
|
class IntervalStorage(object):
def __init__(self, interval_map=None):
if interval_map == None:
self.interval_map = {}
else:
if not isinstance(interval_map, dict):
raise ValueError("Interval map must be a dictionary containing entries as keys and interval tuples as values.")
self.interval_map = interval_map
def add_interval(self, entry, start, end):
self.validate_interval(start, end)
self.interval_map[entry] = (start, end)
def get_entry(self, point):
for entry, interval in self.interval_map.iteritems():
start, end = interval[0], interval[1]
if start <= point and point < end:
return entry
raise ValueError("Point '%s' is not contained in any stored interval." % point)
def validate_interval(self, start, end):
if start > end:
raise ValueError("Start must be lower than end in a valid interval.")
if start > 1 or start < 0 or end > 1 or end < 0:
raise ValueError("Intervals must be subsets of the interval [0,1].")
if self.has_intersection(start, end):
raise ValueError("Intervals cannot have an intersection with intervals that already exist in the storage object.")
def has_intersection(self, start, end):
for value in self.interval_map.itervalues():
if (value[0] <= start and start < value[1]) or \
(value[0] < end and end <= value[1]) or \
(start <= value[0] and value[1] < end):
return True
return False
|
Make sure that have intervals of the form [start, end).
|
Make sure that have intervals of the form [start, end).
|
Python
|
mit
|
wangjohn/wallace
|
901e6cc8bdafcd6e6d419ffd5eee4e58d266d40a
|
extensions.py
|
extensions.py
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
import subprocess
from functools import wraps
import os
extensions = {}
def extension(f):
# keep unwrapped function
unwrapped = f
@wraps(f)
def wrapper(**kwargs):
wrapper.settings = dict(kwargs)
return unwrapped
extensions[f.__name__] = wrapper
return wrapper
@extension
def coffee(filename, data):
command = ['coffee', '-c', '-s']
bare = coffee.settings.get('bare')
if bare:
command.append('-b')
process = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate(data)
if process.returncode:
return None, err
else:
_, ext = os.path.splitext(filename)
dest = filename.replace(ext, '.js')
return dest, out
@extension
def dest(filename, data):
destination = dest.settings.get('destination')
if destination:
if not os.path.exists(destination):
os.mkdir(destination)
_, tail = os.path.split(filename)
filename = os.path.join(destination, tail)
fo = open(filename, 'w')
fo.write(data)
fo.close()
|
Fix file not found error on directory
|
Fix file not found error on directory
|
Python
|
mit
|
rolurq/flask-gulp
|
9982e62981a7ec0fc7f05dcc8b5eabe11c65d2b3
|
anthology/representations.py
|
anthology/representations.py
|
"""Representation filters for API"""
from flask import make_response, current_app
from bson.json_util import dumps
def output_bson(data, code, headers=None):
"""Makes Flask response with a BSON encoded body
Copied from module `flask_restful.representations.json`
"""
settings = current_app.config.get('RESTFUL_JSON', {})
# If we're in debug mode, and the indent is not set, we set it to a
# reasonable value here. Note that this won't override any existing value
# that was set. We also set the "sort_keys" value.
if current_app.debug:
settings.setdefault('indent', 4)
settings.setdefault('sort_keys', True)
# always end the json dumps with a new line
# see https://github.com/mitsuhiko/flask/pull/1262
dumped = dumps(data, **settings) + "\n"
resp = make_response(dumped, code)
resp.headers.extend(headers or {})
return resp
|
"""Representation filters for API"""
from flask import make_response, current_app
from bson.json_util import dumps
def output_bson(data, code, headers=None):
"""Makes Flask response with a JSON encoded body.
Response items are serialized from MongoDB BSON objects to
JSON compatible format.
Modified from module `flask_restful.representations.json`
"""
settings = current_app.config.get('RESTFUL_JSON', {})
# If we're in debug mode, and the indent is not set, we set it to a
# reasonable value here. Note that this won't override any existing value
# that was set. We also set the "sort_keys" value.
if current_app.debug:
settings.setdefault('indent', 4)
settings.setdefault('sort_keys', True)
# always end the json dumps with a new line
# see https://github.com/mitsuhiko/flask/pull/1262
dumped = dumps(data, **settings) + "\n"
resp = make_response(dumped, code)
resp.headers.extend(headers or {})
return resp
|
Correct JSON/BSON terminology in docstrings
|
Correct JSON/BSON terminology in docstrings
|
Python
|
mit
|
surfmikko/anthology
|
aca3cf45ba32cdad69c232794497fc8033b63cc6
|
utils/builder.py
|
utils/builder.py
|
import sys
import os
output = '../build/Tween.js';
# os.system("java -jar yuicompressor-2.4.2.jar ../src/Tween.js -o ../build/Tween.js --charset utf-8 -v");
os.system("java -jar compiler.jar --js ../src/Tween.js --js_output_file ../build/Tween.js")
# HEADER
string = "// Tween.js - http://github.com/sole/tween.js\n"
src_file = open(output,'r')
string += src_file.read()
dep_file = open(output,'w')
dep_file.write(string)
dep_file.close()
|
import sys
import os
output = '../build/tween.js';
# os.system("java -jar yuicompressor-2.4.2.jar ../src/Tween.js -o ../build/Tween.js --charset utf-8 -v");
os.system("java -jar compiler.jar --js ../src/Tween.js --js_output_file %s" % (output))
# HEADER
with open(os.path.join('..', 'REVISION'), 'r') as handle:
revision = handle.read().rstrip()
string = "// tween.js r%s - http://github.com/sole/tween.js\n" % (revision)
src_file = open(output,'r')
string += src_file.read()
dep_file = open(output,'w')
dep_file.write(string)
dep_file.close()
|
Update packer system to include REVISION number too
|
Update packer system to include REVISION number too
|
Python
|
mit
|
CasualBot/tween.js,JITBALJINDER/tween.js,altereagle/tween.js,gopalindians/tween.js,rocbear/tween.js,Twelve-60/tween.js,wangzuo/cxx-tween,Twelve-60/tween.js,olizilla/tween.js,gopalindians/tween.js,camellhf/tween.js,camellhf/tween.js,olizilla/tween.js,rocbear/tween.js,npmcomponent/bestander-tween.js,altereagle/tween.js,Twelve-60/tween.js,olizilla/tween.js,yyx990803/tween.js,mcanthony/tween.js,yyx990803/tween.js,EskenderDev/tween.js,npmcomponent/bestander-tween.js,gopalindians/tween.js,rocbear/tween.js,camellhf/tween.js,EskenderDev/tween.js,JITBALJINDER/tween.js,mcanthony/tween.js,EskenderDev/tween.js,JITBALJINDER/tween.js,altereagle/tween.js,CasualBot/tween.js,mcanthony/tween.js,CasualBot/tween.js
|
ef67ce4372128d8f7e9689e1090ee44674c8f391
|
scripts/analytics/run_keen_events.py
|
scripts/analytics/run_keen_events.py
|
from framework.celery_tasks import app as celery_app
from scripts.analytics.base import DateAnalyticsHarness
from scripts.analytics.node_log_events import NodeLogEvents
class EventAnalyticsHarness(DateAnalyticsHarness):
@property
def analytics_classes(self):
return [NodeLogEvents]
@celery_app.task(name='scripts.run_keen_events')
def run_main(date):
EventAnalyticsHarness().main(date)
if __name__ == '__main__':
EventAnalyticsHarness().main()
|
from framework.celery_tasks import app as celery_app
from scripts.analytics.base import DateAnalyticsHarness
from scripts.analytics.node_log_events import NodeLogEvents
from scripts.analytics.user_domain_events import UserDomainEvents
class EventAnalyticsHarness(DateAnalyticsHarness):
@property
def analytics_classes(self):
return [NodeLogEvents, UserDomainEvents]
@celery_app.task(name='scripts.run_keen_events')
def run_main(date):
EventAnalyticsHarness().main(date)
if __name__ == '__main__':
EventAnalyticsHarness().main()
|
Add new user domain event collector to main keen events script
|
Add new user domain event collector to main keen events script
|
Python
|
apache-2.0
|
chrisseto/osf.io,caneruguz/osf.io,felliott/osf.io,alexschiller/osf.io,leb2dg/osf.io,chrisseto/osf.io,caseyrollins/osf.io,baylee-d/osf.io,icereval/osf.io,hmoco/osf.io,leb2dg/osf.io,chrisseto/osf.io,mluo613/osf.io,rdhyee/osf.io,acshi/osf.io,mluo613/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,felliott/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,binoculars/osf.io,leb2dg/osf.io,aaxelb/osf.io,laurenrevere/osf.io,cslzchen/osf.io,saradbowman/osf.io,chennan47/osf.io,mluo613/osf.io,brianjgeiger/osf.io,mluo613/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,adlius/osf.io,baylee-d/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,chennan47/osf.io,caneruguz/osf.io,hmoco/osf.io,laurenrevere/osf.io,felliott/osf.io,icereval/osf.io,aaxelb/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,crcresearch/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,binoculars/osf.io,hmoco/osf.io,cwisecarver/osf.io,rdhyee/osf.io,mluo613/osf.io,crcresearch/osf.io,adlius/osf.io,alexschiller/osf.io,binoculars/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,adlius/osf.io,icereval/osf.io,erinspace/osf.io,sloria/osf.io,crcresearch/osf.io,felliott/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,aaxelb/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,sloria/osf.io,acshi/osf.io,alexschiller/osf.io,chrisseto/osf.io,hmoco/osf.io,monikagrabowska/osf.io,erinspace/osf.io,cslzchen/osf.io,erinspace/osf.io,acshi/osf.io,Nesiehr/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,cslzchen/osf.io,mattclark/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,acshi/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,baylee-d/osf.io,pattisdr/osf.io,acshi/osf.io,cslzchen/osf.io,alexschiller/osf.io,mattclark/osf.io,cwisecarver/osf.io,sloria/osf.io,leb2dg/osf.io,adlius/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,monikagrabowska/osf.io
|
2b64dc699e222a011d5946fd53a2bda4df77d0fe
|
scripts/rename_tutorial_src_files.py
|
scripts/rename_tutorial_src_files.py
|
#%%
from pathlib import Path
from string import digits
#%%
directory = Path("./docs/tutorial/src")
output_directory = Path("./docs/tutorial/out")
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in directory.iterdir()])
for i, f in enumerate(files):
f: Path
index = str(i + 1).zfill(2)
new_name = output_directory / f"tutorial{index}.py"
print(new_name)
f.rename(new_name)
|
#%%
from pathlib import Path, PurePath
from string import digits
#%%
directory = Path("./docs/tutorial/src")
dirs = sorted([Path(f) for f in directory.iterdir()])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
new_name = output_directory / f"tutorial{index}.py"
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
|
Update tutorial src renamer to use sub-directories
|
:sparkles: Update tutorial src renamer to use sub-directories
|
Python
|
mit
|
tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi
|
1fce6a621ad4fe149988147478e15c7415295a7b
|
changes/api/serializer/models/source.py
|
changes/api/serializer/models/source.py
|
from changes.api.serializer import Serializer, register
from changes.models import Source
@register(Source)
class SourceSerializer(Serializer):
def serialize(self, instance, attrs):
if instance.patch_id:
patch = {
'id': instance.patch_id.hex,
}
else:
patch = None
return {
'id': instance.id.hex,
'patch': patch,
'revision': instance.revision,
'dateCreated': instance.date_created,
}
|
from changes.api.serializer import Serializer, register
from changes.models import Source
@register(Source)
class SourceSerializer(Serializer):
def serialize(self, instance, attrs):
if instance.patch_id:
patch = {
'id': instance.patch_id.hex,
}
else:
patch = None
return {
'id': instance.id.hex,
'patch': patch,
'revision': instance.revision,
'dateCreated': instance.date_created,
'tails_data': dict(instance.data),
}
|
Add data to Source serialization
|
Add data to Source serialization
|
Python
|
apache-2.0
|
dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes
|
50367a2d73c395a85bb7dae058f9435be6ad7c36
|
vtimshow/__init__.py
|
vtimshow/__init__.py
|
#!/usr/bin/env python3
# Module imports
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
|
#!/usr/bin/env python3
# Module imports
import logging
import os
import vitables
_defaults = dict(
AUTHOR = "Keith F Prussing",
AUTHOR_EMAIL = "[email protected]",
LICENSE = "MIT",
PLUGIN_CLASS = "VtImageViewer",
PLUGIN_NAME = "Image Viewer",
COMMENT = "Display data sets as images",
VERSION = "{VERSION!s}",
UID = "image_viewer"
)
_defaults["FOLDER"], _defaults["MODULE_NAME"] = os.path.split(
os.path.dirname(__file__)
)
_defaults["LOGGER"] = logging.getLogger(_defaults["MODULE_NAME"])
_defaults["LOGGER"].addHandler(logging.NullHandler())
__docformat__ = "restructuredtext"
__version__ = _defaults["VERSION"]
plugin_class = _defaults["PLUGIN_CLASS"]
plugin_name = _defaults["PLUGIN_NAME"]
comment = _defaults["COMMENT"]
from vtimshow.vtimageviewer import VtImageViewer
def _setup_logger(name):
"""
Add the GUI's logging window as a stream handler.
By default, the stream logger is removed during the invocation of
``vitables``. The logging window in the GUI is a stream handler for
the ``vitables`` logger _only_. This method will add the logging
window in the GUI as a stream handler for the named logger. The
method checks to see if ``vitables`` is an active application. If
it is not, nothing is done.
"""
logger = logging.getLogger(name)
app = vitables.utils.getApp()
if app is not None:
stream = logging.StreamHandler(app.gui.logger)
stream.setFormatter(
logging.Formatter(vitables.vtgui._GUI_LOG_FORMAT)
)
logger.addHandler(stream)
return
_setup_logger(_defaults["MODULE_NAME"])
|
Add method to log to console
|
Add method to log to console
Add a method to set the GUI logging window to be the stream handler for
my plug in.
|
Python
|
mit
|
kprussing/vtimshow
|
70167a8cb73673e1e904fbeb8a50b3de9d4fc1ae
|
server.py
|
server.py
|
from fickle import API
from fickle.classifier import GenericSVMClassifier
backend = GenericSVMClassifier()
app = API(__name__, backend)
if __name__ == '__main__':
host = '0.0.0.0'
port = int(os.environ.get('PORT', 5000))
debug = bool(os.environ.get('FICKLE_DEBUG'))
app.run(host = host, port = port, debug = debug)
|
from fickle import API
from fickle.classifier import GenericSVMClassifier
backend = GenericSVMClassifier()
app = API(__name__, backend)
if __name__ == '__main__':
import os
host = '0.0.0.0'
port = int(os.environ.get('PORT', 5000))
debug = bool(os.environ.get('FICKLE_DEBUG'))
app.run(host = host, port = port, debug = debug)
|
Fix run section with import statement
|
Fix run section with import statement
|
Python
|
mit
|
norbert/fickle
|
67b86cb3ddfb7c9e95ebed071ba167472276cc29
|
utils/decorators/require.py
|
utils/decorators/require.py
|
import requests
from functools import wraps
from flask import request, current_app
from utils.decorators.signature import sign
def require(resource_namespace, permissions, resource_id=None):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if request.method == 'GET':
payload = request.args
client_key = current_app.config['CLIENTS']['plutonium']['client_key']
client_id = current_app.config['CLIENTS']['plutonium']['client_id']
data = []
for permission in permissions:
data.append({
'client_namespace' : 'app',
'client_id' : payload['client_id'],
'resource_namespace' : resource_namespace,
'permission' : permission,
'resource_id' : resource_id or '*'
})
result = f(*args, **kwargs)
return result
return decorated_function
return decorator
|
import json
import requests
from functools import wraps
from flask import request, current_app
from utils.decorators.signature import sign
from utils.exceptions import HttpUnauthorized
def require(resource_namespace, permissions, resource_id=None):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if request.method == 'GET':
payload = request.args
client_key = current_app.config['CLIENTS']['plutonium']['client_key']
client_id = current_app.config['CLIENTS']['plutonium']['client_id']
apq = current_app.config['CLIENTS']['apq']
data = []
for permission in permissions:
data.append({
'client_namespace' : 'app',
'client_id' : payload['client_id'],
'resource_namespace' : resource_namespace,
'permission' : permission,
'resource_id' : resource_id or '*'
})
signature = sign(client_key, json.dumps(data))
payload = {
'data' : json.dumps(data),
'client_id': client_id,
'signature': signature
}
apq = requests.get("http://%s/has_perm" % apq['host'], params=payload)
permission = json.loads(apq.content)
granted = [granted for granted in permission if granted == 'True']
if len(permission) != len(granted):
raise HttpUnauthorized("You don't have enough permission to access this resource")
result = f(*args, **kwargs)
return result
return decorated_function
return decorator
|
Check for permission in apq
|
Check for permission in apq
|
Python
|
apache-2.0
|
PressLabs/lithium
|
e2be9eb27d6fc7cfa424cbf908347796ab595526
|
groundstation/broadcast_announcer.py
|
groundstation/broadcast_announcer.py
|
import socket
import logger
from groundstation.broadcast_socket import BroadcastSocket
import logger
log = logger.getLogger(__name__)
class BroadcastAnnouncer(BroadcastSocket):
def __init__(self, port):
super(BroadcastAnnouncer, self).__init__()
self._addr = '255.255.255.255', port
self._name = None
self.broadcast_payload = "PING None"
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
self.broadcast_payload = "PING %s" % (self._name)
def ping(self):
log.info("ping payload: %s" % (self.broadcast_payload))
transmitted = self.socket.sendto(self.broadcast_payload, self._addr)
if transmitted != len(self.broadcast_payload):
log.warning("ping wasn't successfully broadcast")
|
import socket
import logger
from sockets.broadcast_socket import BroadcastSocket
import logger
log = logger.getLogger(__name__)
class BroadcastAnnouncer(BroadcastSocket):
def __init__(self, port):
super(BroadcastAnnouncer, self).__init__()
self._addr = '255.255.255.255', port
self._name = None
self.broadcast_payload = "PING None"
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
self.broadcast_payload = "PING %s" % (self._name)
def ping(self):
log.info("ping payload: %s" % (self.broadcast_payload))
transmitted = self.socket.sendto(self.broadcast_payload, self._addr)
if transmitted != len(self.broadcast_payload):
log.warning("ping wasn't successfully broadcast")
|
Fix an import path bug masked by remaining .pyc files
|
Fix an import path bug masked by remaining .pyc files
|
Python
|
mit
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
f859964c3d8d193da92fb521f4a696a28ef9452a
|
cisco_olt_http/tests/test_operations.py
|
cisco_olt_http/tests/test_operations.py
|
import os
import pytest
from cisco_olt_http import operations
from cisco_olt_http.client import Client
@pytest.fixture
def data_dir():
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data'))
def test_get_data():
client = Client('http://base-url')
show_equipment_op = operations.ShowEquipmentOp(client)
op_data = show_equipment_op.get_data()
assert op_data
class TestOperationResult:
def test_ok_response(self, data_dir):
class Response:
pass
response = Response()
with open(os.path.join(data_dir, 'ok_response.xml')) as of:
response.content = of.read()
operation_result = operations.OperationResult(response)
assert not operation_result.error
assert operation_result.error_str == 'OK'
|
import os
import pytest
import requests
from cisco_olt_http import operations
from cisco_olt_http.client import Client
@pytest.fixture
def data_dir():
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data'))
def test_get_data():
client = Client('http://base-url')
show_equipment_op = operations.ShowEquipmentOp(client)
op_data = show_equipment_op.get_data()
assert op_data
class TestOperationResult:
def test_ok_response(self, data_dir, mocker):
response = mocker.Mock(autospec=requests.Response)
with open(os.path.join(data_dir, 'ok_response.xml')) as of:
response.content = of.read()
operation_result = operations.OperationResult(response)
assert not operation_result.error
assert operation_result.error_str == 'OK'
|
Use mock instead of own class
|
Use mock instead of own class
|
Python
|
mit
|
beezz/cisco-olt-http-client,Vnet-as/cisco-olt-http-client
|
580974cedceecea71e32f0cba1daf4dccb7e4736
|
2018/unif/unifier.py
|
2018/unif/unifier.py
|
# Python 3.6
class Expr:
pass
class App(Expr):
def __init__(self, fname, args=()):
self.fname = fname
self.args = args
def __str__(self):
return '{0}({1})'.format(self.fname, ','.join(map(str, self.args)))
class Var(Expr):
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class Const(Expr):
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
|
# Python 3.6
class Expr:
pass
class App(Expr):
def __init__(self, fname, args=()):
self.fname = fname
self.args = args
def __str__(self):
return '{0}({1})'.format(self.fname, ','.join(map(str, self.args)))
class Var(Expr):
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class Const(Expr):
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
# TODO: implement these
def parse_expr(s):
"""Parses an expression in 's' into an Expr."""
pass
# Need a bindings map to pass around for unify
|
Add more skeleton function code and TODOs
|
Add more skeleton function code and TODOs
|
Python
|
unlicense
|
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
|
37da0285ac6b08994700952e04278e1049577745
|
yanico/config.py
|
yanico/config.py
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle yanico configuration."""
import os.path
CONFIG_FILENAME = '.yanico.conf'
def user_path():
return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME)
|
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle yanico configuration."""
import os.path
CONFIG_FILENAME = '.yanico.conf'
def user_path():
"""Return user configuration filepath.
The filepath depends home directory and CONFIG_FILENAME constants.
"""
return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME)
|
Add docstring into user_path function
|
Add docstring into user_path function
Describe dependnce for constants and environments.
|
Python
|
apache-2.0
|
ma8ma/yanico
|
956d68b6e29b1e319d043945393db3825b5167d1
|
dask/compatibility.py
|
dask/compatibility.py
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and kwargs is None:
return func(args)
elif not isinstance(args, list):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
from __future__ import absolute_import, division, print_function
import sys
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.parse import quote, unquote
unicode = str
long = int
def apply(func, args, kwargs=None):
if not isinstance(args, list) and not isinstance(args, tuple) and kwargs is None:
return func(args)
elif not isinstance(args, list) and not isinstance(args, tuple):
return func(args, **kwargs)
elif kwargs:
return func(*args, **kwargs)
else:
return func(*args)
range = range
else:
import __builtin__ as builtins
from Queue import Queue, Empty
import operator
from itertools import izip_longest as zip_longest
from StringIO import StringIO
from io import BytesIO
from urllib2 import urlopen
from urlparse import urlparse
from urllib import quote, unquote
unicode = unicode
long = long
apply = apply
range = xrange
def skip(func):
return
|
Allow for tuple-based args in map also
|
Allow for tuple-based args in map also
|
Python
|
bsd-3-clause
|
blaze/dask,blaze/dask,mrocklin/dask,gameduell/dask,wiso/dask,mraspaud/dask,PhE/dask,cowlicks/dask,wiso/dask,dask/dask,PhE/dask,ContinuumIO/dask,jakirkham/dask,mraspaud/dask,jakirkham/dask,pombredanne/dask,jayhetee/dask,jayhetee/dask,ssanderson/dask,dask/dask,mikegraham/dask,clarkfitzg/dask,jcrist/dask,pombredanne/dask,vikhyat/dask,ssanderson/dask,mrocklin/dask,clarkfitzg/dask,chrisbarber/dask,jcrist/dask,ContinuumIO/dask,vikhyat/dask,cpcloud/dask
|
39e038373b0691f14605a5aec3f917b5cee40091
|
django_google_charts/charts.py
|
django_google_charts/charts.py
|
import six
import json
from django.utils.html import format_html, mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
import json
from django.utils import six
from django.utils.html import format_html, mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
Use django's bundled and customised version of six
|
Use django's bundled and customised version of six
|
Python
|
mit
|
danpalmer/django-google-charts,danpalmer/django-google-charts
|
08a3874f826d46528f049318af67b9a39922604c
|
functionaltests/api/test_versions.py
|
functionaltests/api/test_versions.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
import testtools
class VersionDiscoveryTestCase(testtools.TestCase):
def test_get_root_discovers_v1(self):
r = requests.get('http://127.0.0.1:9777')
self.assertEqual(r.status_code, 200)
body = r.json
self.assertEqual(len(body), 1)
v1 = body[0]
self.assertEqual(v1['id'], 'v1.0')
self.assertEqual(v1['status'], 'CURRENT')
self.assertEqual(v1['link']['target_name'], 'v1')
self.assertEqual(v1['link']['href'], 'http://127.0.0.1:9777/v1')
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
import testtools
class VersionDiscoveryTestCase(testtools.TestCase):
def test_get_root_discovers_v1(self):
r = requests.get('http://127.0.0.1:9777')
self.assertEqual(r.status_code, 200)
body = r.json()
self.assertEqual(len(body), 1)
v1 = body[0]
self.assertEqual(v1['id'], 'v1.0')
self.assertEqual(v1['status'], 'CURRENT')
self.assertEqual(v1['link']['target_name'], 'v1')
self.assertEqual(v1['link']['href'], 'http://127.0.0.1:9777/v1')
|
Fix minor bug in test_get_root_discovers_v1
|
Fix minor bug in test_get_root_discovers_v1
Invoke json method to get response body instead of referring it.
Change-Id: I5af060b75b14f2b9322099d8a658c070b056cee2
|
Python
|
apache-2.0
|
gilbertpilz/solum,ed-/solum,ed-/solum,julienvey/solum,gilbertpilz/solum,devdattakulkarni/test-solum,gilbertpilz/solum,stackforge/solum,devdattakulkarni/test-solum,ed-/solum,stackforge/solum,ed-/solum,openstack/solum,gilbertpilz/solum,julienvey/solum,openstack/solum
|
92ca956dc8f4229a1c427cb24843c7fe3baef405
|
tests/integration/test_parked.py
|
tests/integration/test_parked.py
|
"""Parked check integration test."""
def test_parked_query(webapp):
"""Test the parked API against our own domain."""
request = webapp.get('/api/parked/dnstwister.report')
assert request.status_code == 200
assert request.json == {
u'domain': u'dnstwister.report',
u'domain_as_hexadecimal': u'646e73747769737465722e7265706f7274',
u'fuzz_url': u'http://localhost:80/api/fuzz/646e73747769737465722e7265706f7274',
u'redirects': False,
u'redirects_to': None,
u'resolve_ip_url': u'http://localhost:80/api/ip/646e73747769737465722e7265706f7274',
u'score': 0.07,
u'score_text': u'Possibly',
u'url': u'http://localhost:80/api/parked/dnstwister.report'
}
|
"""Parked check integration test."""
def test_parked_query(webapp):
"""Test the parked API against our own domain."""
request = webapp.get('/api/parked/dnstwister.report')
assert request.status_code == 200
assert request.json == {
u'domain': u'dnstwister.report',
u'domain_as_hexadecimal': u'646e73747769737465722e7265706f7274',
u'fuzz_url': u'http://localhost:80/api/fuzz/646e73747769737465722e7265706f7274',
u'redirects': False,
u'redirects_to': None,
u'resolve_ip_url': u'http://localhost:80/api/ip/646e73747769737465722e7265706f7274',
u'score': 0.07,
u'score_text': u'Possibly',
u'url': u'http://localhost:80/api/parked/dnstwister.report'
}
def test_parked_query_on_broken_domain(webapp):
"""Test the parked API against a domain that doesn't exist."""
request = webapp.get('/api/parked/there-is-little-chance-this-domain-exists-i-hope.com')
assert request.status_code == 200
assert request.json['score'] == 0
assert request.json['redirects'] is False
assert request.json['redirects_to'] is None
assert request.json['score_text'] == 'Unlikely'
|
Test for parked check against unresolvable domain
|
Test for parked check against unresolvable domain
|
Python
|
unlicense
|
thisismyrobot/dnstwister,thisismyrobot/dnstwister,thisismyrobot/dnstwister
|
388149ead0ed3f4e5439301fa23e21050773e309
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.8.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.9.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.9.0
|
Increment version number to 0.9.0
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
8005d43146669e98d921bb36c4afd5dffb08e2e3
|
Tests/varLib/featureVars_test.py
|
Tests/varLib/featureVars_test.py
|
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
from fontTools.varLib.featureVars import (
overlayFeatureVariations)
def test_explosion(n = 10):
conds = []
for i in range(n):
end = i / n
start = end - 1.
region = [{'axis': (start, end)}]
subst = {'g%.2g'%start: 'g%.2g'%end}
conds.append((region, subst))
overlaps = overlayFeatureVariations(conds)
# XXX Currently fails for n > 2!
#assert len(overlaps) == 2 * n - 1, overlaps
return conds, overlaps
if __name__ == "__main__":
import sys
from pprint import pprint
quiet = False
args = {}
if len(sys.argv) > 1:
if sys.argv[1] == '-q':
quiet = True
del sys.argv[1]
args['n'] = int(sys.argv[1])
input, output = test_explosion(**args)
if quiet:
print(len(output))
else:
print("Input:")
pprint(input)
print()
print("Output:")
pprint(output)
|
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
from fontTools.varLib.featureVars import (
overlayFeatureVariations)
def test_explosion(n = 10):
conds = []
for i in range(n):
end = i / n
start = end - 1.
region = [{'axis': (start, end)}]
subst = {'g%.2g'%start: 'g%.2g'%end}
conds.append((region, subst))
overlaps = overlayFeatureVariations(conds)
assert len(overlaps) == 2 * n - 1, overlaps
return conds, overlaps
if __name__ == "__main__":
import sys
from pprint import pprint
quiet = False
args = {}
if len(sys.argv) > 1:
if sys.argv[1] == '-q':
quiet = True
del sys.argv[1]
args['n'] = int(sys.argv[1])
input, output = test_explosion(**args)
if quiet:
print(len(output))
else:
print("Input:")
pprint(input)
print()
print("Output:")
pprint(output)
|
Enable test now that it passes
|
[varLib.featureVars] Enable test now that it passes
|
Python
|
mit
|
googlefonts/fonttools,fonttools/fonttools
|
cf13e81d2e41608bfc8e22d9e1f669382a5bdfc6
|
indra/preassembler/make_wm_ontmap.py
|
indra/preassembler/make_wm_ontmap.py
|
import sys
from indra.sources import eidos
from indra.sources.hume.make_hume_tsv import make_file as mht
from indra.sources.sofia.make_sofia_tsv import make_file as mst
from indra.java_vm import autoclass
eidos_package = 'org.clulab.wm.eidos'
if __name__ == '__main__':
sofia_ont_path = sys.argv[1]
hume_path = 'hume_ontology_examaples.tsv'
mht(hume_path)
sofia_path = 'sofia_ontology_examples.tsv'
mst(sofia_ont_path, sofia_path)
om = autoclass(eidos_package + '.apps.OntologyMapper')
eidos = autoclass(eidos_package + '.EidosSystem')
es = eidos(autoclass('java.lang.Object')())
example_weight = 0.8
parent_weight = 0.1
topn = 10
table_str = om.mapOntologies(es, hume_path, sofia_path, example_weight,
parent_weight, topn)
|
import sys
import os
from os.path import join, dirname, abspath
from indra import preassembler
from indra.sources import eidos
from indra.sources.hume.make_hume_tsv import make_file as mht
from indra.sources.sofia.make_sofia_tsv import make_file as mst
from indra.java_vm import autoclass
eidos_package = 'org.clulab.wm.eidos'
if __name__ == '__main__':
sofia_ont_path = sys.argv[1]
hume_path = 'hume_ontology_examples.tsv'
mht(hume_path)
sofia_path = 'sofia_ontology_examples.tsv'
mst(sofia_ont_path, sofia_path)
om = autoclass(eidos_package + '.apps.OntologyMapper')
eidos = autoclass(eidos_package + '.EidosSystem')
es = eidos(autoclass('java.lang.Object')())
example_weight = 0.8
parent_weight = 0.1
topn = 10
table_str = om.mapOntologies(es, hume_path, sofia_path, example_weight,
parent_weight, topn)
with open(join(dirname(abspath(__file__)), os.pardir, 'resources',
'wm_ontomap.tsv'), 'w') as fh:
fh.write(table_str)
|
Save ontology map in script
|
Save ontology map in script
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/indra
|
40624e155ff3ec9012942744b5c09d91164d5756
|
src/neuroglancer_scripts/__init__.py
|
src/neuroglancer_scripts/__init__.py
|
# Copyright (c) 2018 Forschungszentrum Juelich GmbH
# Author: Yann Leprince <[email protected]>
#
# This software is made available under the MIT licence, see LICENCE.txt.
"""Conversion of images to the Neuroglancer pre-computed format.
.. todo:: introduction to the high-level APIs
"""
# Version used by setup.py and docs/conf.py
__version__ = "0.2.0.dev0"
|
# Copyright (c) 2018 Forschungszentrum Juelich GmbH
# Author: Yann Leprince <[email protected]>
#
# This software is made available under the MIT licence, see LICENCE.txt.
"""Conversion of images to the Neuroglancer pre-computed format.
.. todo:: introduction to the high-level APIs
"""
# Version used by setup.py and docs/conf.py (parsed with a regular expression).
#
# Release checklist (based on https://packaging.python.org/):
# 1. Ensure that tests pass for all supported Python version (Travis CI),
# ensure that the API documentation is complete (sphinx-apidoc -o docs/api/
# src/neuroglancer_scripts);
# 2. Update the release notes;
# 3. Run check-manifest;
# 4. Bump the version number in this file;
# 5. pip install -U setuptools wheel twine
# 6. python setup.py sdist bdist_wheel
# 7. twine upload --repository-url https://test.pypi.org/legacy/ dist/*
# 8. Commit the updated version number
# 9. Tag the commit (git tag -a vX.Y.Z)
# 10. Bump the version number to something that ends with .dev0 and commit
# 11. Push the master branch and the new tag to Github
# 12. twine upload dist/*
__version__ = "0.2.0"
|
Set the version to 0.2.0
|
Set the version to 0.2.0
|
Python
|
mit
|
HumanBrainProject/neuroglancer-scripts
|
4b98e89e306aa04c4b3c1df254209f59f61d4f2a
|
tt_dailyemailblast/send_backends/sync.py
|
tt_dailyemailblast/send_backends/sync.py
|
from .. import email
def sync_daily_email_blasts(blast):
for l in blast.recipients_lists.all():
l.send(blast)
def sync_recipients_list(recipients_list, blast):
for r in recipients_list.recipientss.all():
r.send(recipients_list, blast)
def sync_recipient(recipient, recipients_list, blast):
email.send_email(blast.render(recipient, recipients_list))
|
from .. import email
def sync_daily_email_blasts(blast):
for l in blast.recipient_lists.all():
l.send(blast)
def sync_recipients_list(recipients_list, blast):
for r in recipients_list.recipientss.all():
r.send(recipients_list, blast)
def sync_recipient(recipient, recipients_list, blast):
email.send_email(blast.render(recipient, recipients_list))
|
Fix there was no such thing as blast.recipients_lists
|
Fix there was no such thing as blast.recipients_lists
|
Python
|
apache-2.0
|
texastribune/tt_dailyemailblast,texastribune/tt_dailyemailblast
|
7c38eae5a07e07789713baf5ab3aaea772e76422
|
routes.py
|
routes.py
|
from flask import Flask, render_template, redirect
import psycopg2
import os
import urlparse
app = Flask(__name__)
# def connectDB(wrapped):
# def inner(*args, **kwargs):
# api_token = os.environ["API_TOKEN"]
# urlparse.uses_netloc.append("postgres")
# url = urlparse.urlparse(os.environ["DATABASE_URL"])
# conn = psycopg2.connect(
# database=url.path[1:],
# user=url.username,
# password=url.password,
# host=url.hostname,
# port=url.port
# )
# cur = conn.cursor()
# ret = wrapped(*args, **kwargs)
# return ret
# return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
# @connectDB
def participants():
return render_template('participants.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
# @connectDB
def complete():
return redirect('/')
|
from flask import Flask, render_template, redirect, request
import psycopg2
from functools import wraps
import os
import urlparse
app = Flask(__name__)
def connectDB(wrapped):
@wraps(wrapped)
def inner(*args, **kwargs):
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
return wrapped(cur, *args, **kwargs)
return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
@connectDB
def participants(*args):
return args[0]
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
@connectDB
def complete(*args):
return render_template('/success.html')
|
Add decorator to connect to database
|
Add decorator to connect to database
|
Python
|
mit
|
AlexMathew/csipy-home
|
79978bd00fc4834b01f7473cc5b7b8407abec51c
|
Lib/test/test_nis.py
|
Lib/test/test_nis.py
|
import nis
verbose = 0
if __name__ == '__main__':
verbose = 1
maps = nis.maps()
try:
for nismap in maps:
if verbose:
print nismap
mapping = nis.cat(nismap)
for k, v in mapping.items():
if verbose:
print ' ', k, v
if not k:
continue
if nis.match(k, nismap) <> v:
print "NIS match failed for key `%s' in map `%s'" % (k, nismap)
else:
# just test the one key, otherwise this test could take a
# very long time
raise 'done'
except 'done':
pass
|
import nis
verbose = 0
if __name__ == '__main__':
verbose = 1
maps = nis.maps()
done = 0
for nismap in maps:
if verbose:
print nismap
mapping = nis.cat(nismap)
for k, v in mapping.items():
if verbose:
print ' ', k, v
if not k:
continue
if nis.match(k, nismap) <> v:
print "NIS match failed for key `%s' in map `%s'" % (k, nismap)
else:
# just test the one key, otherwise this test could take a
# very long time
done = 1
break
if done:
break
|
Rewrite without using try-except to break out of two loops.
|
Rewrite without using try-except to break out of two loops.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
4bd0637ad181c5ded8c6e5fa9ae79ab607b70aeb
|
geokey_dataimports/__init__.py
|
geokey_dataimports/__init__.py
|
"""Main initialisation for extension."""
VERSION = (0, 2, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
"""Main initialisation for extension."""
VERSION = (0, 3, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
Increment minor version number ahead of release.
|
Increment minor version number ahead of release.
|
Python
|
mit
|
ExCiteS/geokey-dataimports,ExCiteS/geokey-dataimports,ExCiteS/geokey-dataimports
|
b624552af638652147ca8b5e49ca109a4723dca1
|
MoMMI/Modules/development.py
|
MoMMI/Modules/development.py
|
from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
@command("reload", "reload", roles=["owner"])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=["owner"])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
|
from discord import Message
from typing import re as typing_re
from MoMMI.commands import command
from MoMMI.master import master
from MoMMI.server import MChannel
from MoMMI.role import MRoleType
@command("reload", "reload", roles=[MRoleType.OWNER])
async def reload(channel: MChannel, match: typing_re.Match, message: Message):
await master.reload_modules()
@command("modules", "modules", roles=[MRoleType.OWNER])
async def modules(channel: MChannel, match: typing_re.Match, message: Message):
msg = "```"
for module in channel.server.master.modules.values():
msg += f"{module.name}:\n"
for handler in module.handlers.values():
msg += f"* {handler.name} ({type(handler)})\n"
msg += "```"
await channel.send(msg)
|
Fix dev commands using string roles.
|
Fix dev commands using string roles.
|
Python
|
mit
|
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
|
a003a7b0d52365c5f5976c7382bc1daf2f5960ac
|
glitter_news/search_indexes.py
|
glitter_news/search_indexes.py
|
# -*- coding: utf-8 -*-
from haystack import indexes
from .models import Post
class PostIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Post
def index_queryset(self, using=None):
return self.get_model().objects.select_related().filter(
published=True
).exclude(
current_version=None
)
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from haystack import indexes
from .models import Post
class PostIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Post
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related().filter(
date__lte=timezone.now())
|
Fix the queryset for news indexing
|
Fix the queryset for news indexing
|
Python
|
bsd-2-clause
|
blancltd/glitter-news
|
bd6b969b85a1c8df7cf8d6da7b93f5c94cf8a180
|
sum-of-multiples/sum_of_multiples.py
|
sum-of-multiples/sum_of_multiples.py
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
Use more optimal method of getting multiples
|
Use more optimal method of getting multiples
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
cfa0b71f056c88f14f79f4f47a169ade9ce08096
|
serrano/resources/base.py
|
serrano/resources/base.py
|
from restlib2.resources import Resource
from avocado.models import DataContext
class BaseResource(Resource):
param_defaults = {}
def get_params(self, request):
params = request.GET.copy()
for param, default in self.param_defaults.items():
params.setdefault(param, default)
return params
def get_context(self, request):
params = self.get_params(request)
context = params.get('context')
# Explicit request to not use a context
if context != 'null':
kwargs = {
'archived': False,
}
if hasattr(request, 'user') and request.user.is_authenticated():
kwargs['user'] = request.user
else:
kwargs['session_key'] = request.session.session_key
# Assume it is a primary key and fallback to the sesssion
try:
kwargs['pk'] = int(context)
except (ValueError, TypeError):
kwargs['session'] = True
try:
return DataContext.objects.get(**kwargs)
except DataContext.DoesNotExist:
pass
return DataContext()
|
from restlib2.resources import Resource
from avocado.models import DataContext, DataView
class BaseResource(Resource):
param_defaults = {}
def get_params(self, request):
params = request.GET.copy()
for param, default in self.param_defaults.items():
params.setdefault(param, default)
return params
def get_context(self, request):
params = self.get_params(request)
context = params.get('context')
# Explicit request to not use a context
if context != 'null':
kwargs = {
'archived': False,
}
if hasattr(request, 'user') and request.user.is_authenticated():
kwargs['user'] = request.user
else:
kwargs['session_key'] = request.session.session_key
# Assume it is a primary key and fallback to the sesssion
try:
kwargs['pk'] = int(context)
except (ValueError, TypeError):
kwargs['session'] = True
try:
return DataContext.objects.get(**kwargs)
except DataContext.DoesNotExist:
pass
return DataContext()
def get_view(self, request):
params = self.get_params(request)
view = params.get('view')
# Explicit request to not use a view
if view != 'null':
kwargs = {
'archived': False,
}
if hasattr(request, 'user') and request.user.is_authenticated():
kwargs['user'] = request.user
else:
kwargs['session_key'] = request.session.session_key
# Assume it is a primary key and fallback to the sesssion
try:
kwargs['pk'] = int(view)
except (ValueError, TypeError):
kwargs['session'] = True
try:
return DataView.objects.get(**kwargs)
except DataView.DoesNotExist:
pass
return DataView()
|
Add method to get the most appropriate DataView
|
Add method to get the most appropriate DataView
|
Python
|
bsd-2-clause
|
rv816/serrano_night,chop-dbhi/serrano,rv816/serrano_night,chop-dbhi/serrano
|
c5aa1c7ee17313e3abe156c2bfa429f124a451d5
|
bc125csv/__init__.py
|
bc125csv/__init__.py
|
"""
bc125csv - Channel import and export tool for the Uniden BC125AT, UBC125XLT
and UBC126AT.
Copyright (c) 2015, fdev.nl. All rights reserved.
Released under the MIT license.
Uniden and Bearcat are registered trademarks of Uniden America Corporation.
This application and its author are not affiliated with or endorsed by Uniden
in any way.
"""
__author__ = "Folkert de Vries"
__email__ = "[email protected]"
__version__ = "1.0.0"
__date__ = "Aug 02, 2015"
# Expose main function for setup.py console_scripts
from bc125csv.handler import main
|
"""
bc125csv - Channel import and export tool for the Uniden BC125AT, UBC125XLT
and UBC126AT.
Copyright (c) 2015, fdev.nl. All rights reserved.
Released under the MIT license.
Uniden and Bearcat are registered trademarks of Uniden America Corporation.
This application and its author are not affiliated with or endorsed by Uniden
in any way.
"""
__author__ = "Folkert de Vries"
__email__ = "[email protected]"
__version__ = "1.0.0"
__date__ = "Aug 02, 2015"
# Expose main function for setup.py console_scripts
from bc125csv.handler import main
if __name__ == "__main__":
main()
|
Call main when run directly
|
Call main when run directly
|
Python
|
mit
|
fdev/bc125csv
|
d6b9be8145316f6f90e47bb3a55c861f993a375a
|
tweetyr.py
|
tweetyr.py
|
#!/usr/bin/env python
# -*- coding: UTF-8
'''
A simple twitter client that posts current weather to twitter
'''
import tweepy
import json
from urllib2 import urlopen
import os
root =os.path.dirname(os.path.abspath(__file__))
conf = json.loads(file(root+'/twitterconfig.json').read())
auth = tweepy.OAuthHandler(conf['consumerkey'], conf['consumersecret'])
auth.set_access_token(conf['accesstoken'], conf['accesssecret'])
api = tweepy.API(auth)
w = json.loads(urlopen(conf['apiurl']).read())[0]
api.update_status('%(outtemp).1f °C, %(windspeed).1f m/s vind, %(rain).1f mm nedbør' %w);
|
#!/usr/bin/env python
# -*- coding: UTF-8
'''
A simple twitter client that posts current weather to twitter
'''
import tweepy
import json
from urllib2 import urlopen
import os
root = os.path.dirname(os.path.abspath(__file__))
conf = json.loads(file(root+'/twitterconfig.json').read())
auth = tweepy.OAuthHandler(conf['consumerkey'], conf['consumersecret'])
auth.set_access_token(conf['accesstoken'], conf['accesssecret'])
api = tweepy.API(auth)
w = json.loads(urlopen(conf['apiurl']).read())[0]
api.update_status('%(outtemp).1f °C, %(windspeed).1f m/s vind, %(rain).1f mm nedbør' %w,lat=conf['lat'],long=conf['long'])
|
Add geo info to status update
|
Add geo info to status update
|
Python
|
bsd-3-clause
|
torhve/Amatyr,torhve/Amatyr,torhve/Amatyr
|
00ddeefdcdacb811f5e665a91139e165d7217f84
|
week1/poc_2048_merge_template.py
|
week1/poc_2048_merge_template.py
|
"""
Merge function for 2048 game.
"""
def merge(line):
"""
Function that merges a single row or column in 2048.
"""
l = len(line)
s1 = [0]*l
j = 0
for i in range(l):
if l[i] != 0:
s1[j] = l[i]
return []
a = [2,0,2,4]
print merge(a)
|
"""
Merge function for 2048 game.
"""
def merge(line):
"""
Function that merges a single row or column in 2048.
"""
l = len(line)
s1 = [0]*l
j = 0
for i in range(l):
if line[i] != 0:
s1[j] = line[i]
j += 1
return s1
a = [2,0,2,4]
print (merge(a))
|
Modify the correct merge 1 fct
|
Modify the correct merge 1 fct
|
Python
|
mit
|
Crescent-Saturn/Principles-of-Computing
|
dafde564f3ea18655b1e15f410df70d05b3eb8f5
|
beets/util/collections.py
|
beets/util/collections.py
|
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Custom collections classes
"""
class IdentityFallbackDict(dict):
"""A dictionary which is "transparent" (maps keys to themselves) for all
keys not in it.
"""
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return key
|
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Custom collections classes.
"""
from __future__ import division, absolute_import, print_function
class IdentityFallbackDict(dict):
"""A dictionary which is "transparent" (maps keys to themselves) for all
keys not in it.
"""
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return key
|
Add __future__ imports to a new module
|
Add __future__ imports to a new module
|
Python
|
mit
|
mosesfistos1/beetbox,ibmibmibm/beets,mosesfistos1/beetbox,MyTunesFreeMusic/privacy-policy,artemutin/beets,jackwilsdon/beets,sampsyo/beets,pkess/beets,xsteadfastx/beets,shamangeorge/beets,diego-plan9/beets,MyTunesFreeMusic/privacy-policy,jackwilsdon/beets,beetbox/beets,sampsyo/beets,beetbox/beets,madmouser1/beets,beetbox/beets,madmouser1/beets,pkess/beets,shamangeorge/beets,diego-plan9/beets,MyTunesFreeMusic/privacy-policy,xsteadfastx/beets,Kraymer/beets,MyTunesFreeMusic/privacy-policy,SusannaMaria/beets,jackwilsdon/beets,lengtche/beets,Kraymer/beets,lengtche/beets,jackwilsdon/beets,artemutin/beets,madmouser1/beets,sampsyo/beets,ibmibmibm/beets,pkess/beets,Kraymer/beets,mosesfistos1/beetbox,shamangeorge/beets,Kraymer/beets,lengtche/beets,diego-plan9/beets,ibmibmibm/beets,SusannaMaria/beets,beetbox/beets,xsteadfastx/beets,madmouser1/beets,ibmibmibm/beets,artemutin/beets,pkess/beets,xsteadfastx/beets,artemutin/beets,SusannaMaria/beets,SusannaMaria/beets,lengtche/beets,shamangeorge/beets,mosesfistos1/beetbox,sampsyo/beets,diego-plan9/beets
|
c178e9aba40f6cf775dce5badf60cff9acd9e908
|
boardinghouse/__init__.py
|
boardinghouse/__init__.py
|
"""
"""
__version__ = '0.1'
__release__ = '0.1a2'
def inject_app_defaults():
try:
import settings as app_settings
from django.conf import settings, global_settings
from django.core.exceptions import ImproperlyConfigured
except ImportError:
return
for key in dir(app_settings):
if key.isupper():
value = getattr(app_settings, key)
setattr(global_settings, key, value)
if not hasattr(settings, key):
# We can just ignore failures, as this means we are
# not set up, so global_settings will suffice.
try:
setattr(settings, key, value)
except (ImproperlyConfigured, ImportError):
pass
inject_app_defaults()
|
"""
"""
__version__ = '0.1'
__release__ = '0.1a2'
def inject_app_defaults():
"""
Automatically inject the default settings for this app.
If settings has already been configured, then we need to add
our defaults to that (if not overridden), and in all cases we
also want to inject our settings into the global_settings object,
so we can use diffsettings.
Based on:
http://passingcuriosity.com/2010/default-settings-for-django-applications/
but with improvements for importing/assignation failures.
"""
try:
import settings as app_settings
from django.conf import settings, global_settings
from django.core.exceptions import ImproperlyConfigured
except ImportError:
return
for key in dir(app_settings):
if key.isupper():
value = getattr(app_settings, key)
setattr(global_settings, key, value)
if not hasattr(settings, key):
# We can just ignore failures, as this means we are
# not set up, so global_settings will suffice.
try:
setattr(settings, key, value)
except (ImproperlyConfigured, ImportError):
pass
inject_app_defaults()
|
Document where we got settings injection from.
|
Document where we got settings injection from.
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
f8ae44cb19584a2b7d08b08dc4f32651acfe90f9
|
core/templatetags/tags.py
|
core/templatetags/tags.py
|
from core.models import Comment, Tag
from django import template
register = template.Library()
#May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template
@register.simple_tag
def recent_comments():
comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3]
output = '<ul id="recent">'
for comment in comments:
if not comment.name:
comment.name = "Anonymous"
if comment.website:
output += '<li><a href="' + comment.website + '">' + comment.name + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>'
elif comment.user:
output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>'
else:
output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>'
output += '</ul>'
return output
@register.simple_tag
def tags():
tags = Tag.objects.order_by('?')[:10]
return tags
|
from core.models import Comment, Tag
from django import template
register = template.Library()
#May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template
@register.simple_tag
def recent_comments():
comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3]
output = '<ul id="recent">'
for comment in comments:
if not comment.name:
comment.name = "Anonymous"
elif comment.user:
output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>'
else:
output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>'
output += '</ul>'
return output
@register.simple_tag
def tags():
tags = Tag.objects.order_by('?')[:10]
return tags
|
Remove website from recent comments
|
Remove website from recent comments
|
Python
|
bsd-2-clause
|
mburst/burstolio,mburst/burstolio,mburst/burstolio
|
d7ed79ec53279f0fea0881703079a1c5b82bf938
|
_settings.py
|
_settings.py
|
# Configuration settings
# ID of HPO to validate (see resources/hpo.csv)
hpo_id = 'hpo_id'
# location of files to validate, evaluate
csv_dir = 'path/to/csv_files'
# sprint number being validated against
sprint_num = 0
# Submissions and logs stored here
# For more examples and requirements see http://docs.sqlalchemy.org/en/latest/core/engines.html
conn_str = 'mssql+pymssql://localhost/pmi_sprint_1'
|
# Configuration settings
# ID of HPO to validate (see resources/hpo.csv)
hpo_id = 'hpo_id'
# location of files to validate, evaluate
csv_dir = 'path/to/csv_files'
# sprint number being validated against
sprint_num = 0
# Submissions and logs stored here
# Note: Connecting to MSSQL from *nix may require FreeTDS configuration (see https://goo.gl/qKhusY)
# For more examples and requirements see http://docs.sqlalchemy.org/en/latest/core/engines.html
conn_str = 'mssql+pymssql://localhost/pmi_sprint_1'
|
Add comment regarding freetds config
|
Add comment regarding freetds config
|
Python
|
mit
|
cumc-dbmi/pmi_sprint_reporter
|
703ff26008525bce769b137fafe51ac080a6af81
|
plyer/platforms/ios/compass.py
|
plyer/platforms/ios/compass.py
|
'''
iOS Compass
---------------------
'''
from plyer.facades import Compass
from pyobjus import autoclass
class IosCompass(Compass):
def __init__(self):
super(IosCompass, self).__init__()
self.bridge = autoclass('bridge').alloc().init()
self.bridge.motionManager.setMagnetometerUpdateInterval_(0.1)
def _enable(self):
self.bridge.startMagnetometer()
def _disable(self):
self.bridge.stopMagnetometer()
def _get_orientation(self):
return (
self.bridge.mg_x,
self.bridge.mg_y,
self.bridge.mg_z)
def instance():
return IosCompass()
|
'''
iOS Compass
-----------
'''
from plyer.facades import Compass
from pyobjus import autoclass
class IosCompass(Compass):
def __init__(self):
super(IosCompass, self).__init__()
self.bridge = autoclass('bridge').alloc().init()
self.bridge.motionManager.setMagnetometerUpdateInterval_(0.1)
self.bridge.motionManager.setDeviceMotionUpdateInterval_(0.1)
def _enable(self):
self.bridge.startMagnetometer()
self.bridge.startDeviceMotionWithReferenceFrame()
def _disable(self):
self.bridge.stopMagnetometer()
self.bridge.stopDeviceMotion()
def _get_orientation(self):
return (
self.bridge.mf_x,
self.bridge.mf_y,
self.bridge.mf_z)
def _get_field_uncalib(self):
return (
self.bridge.mg_x,
self.bridge.mg_y,
self.bridge.mg_z,
self.bridge.mg_x - self.bridge.mf_x,
self.bridge.mg_y - self.bridge.mf_y,
self.bridge.mg_z - self.bridge.mf_z)
def instance():
return IosCompass()
|
Add iOS implementation to get uncalibrated values
|
Add iOS implementation to get uncalibrated values
|
Python
|
mit
|
kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer
|
74d402684d4c949bfc69b9fa5e34f5f560339da7
|
api/caching/tasks.py
|
api/caching/tasks.py
|
import urlparse
import requests
from celery.utils.log import get_task_logger
from api.base import settings
logger = get_task_logger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
|
import urlparse
import requests
import logging
from api.base import settings
logger = logging.getLogger(__name__)
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
def ban_url(url):
timeout = 0.5 # 500ms timeout for bans
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, timeout=timeout, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed: {}'.format(
url,
response.text
))
|
Switch from celery task logger to logger
|
Switch from celery task logger to logger
|
Python
|
apache-2.0
|
doublebits/osf.io,aaxelb/osf.io,aaxelb/osf.io,Nesiehr/osf.io,wearpants/osf.io,aaxelb/osf.io,doublebits/osf.io,samchrisinger/osf.io,zamattiac/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,abought/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,chrisseto/osf.io,emetsger/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,chennan47/osf.io,cslzchen/osf.io,SSJohns/osf.io,icereval/osf.io,binoculars/osf.io,zamattiac/osf.io,erinspace/osf.io,mfraezz/osf.io,hmoco/osf.io,chrisseto/osf.io,caneruguz/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,wearpants/osf.io,cslzchen/osf.io,rdhyee/osf.io,sloria/osf.io,cwisecarver/osf.io,billyhunt/osf.io,SSJohns/osf.io,TomBaxter/osf.io,billyhunt/osf.io,RomanZWang/osf.io,erinspace/osf.io,wearpants/osf.io,amyshi188/osf.io,mluo613/osf.io,saradbowman/osf.io,billyhunt/osf.io,zamattiac/osf.io,zachjanicki/osf.io,kwierman/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,alexschiller/osf.io,TomBaxter/osf.io,doublebits/osf.io,amyshi188/osf.io,adlius/osf.io,RomanZWang/osf.io,zamattiac/osf.io,kch8qx/osf.io,caneruguz/osf.io,leb2dg/osf.io,baylee-d/osf.io,mluo613/osf.io,TomHeatwole/osf.io,mattclark/osf.io,icereval/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,kch8qx/osf.io,acshi/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,doublebits/osf.io,mfraezz/osf.io,mluo613/osf.io,abought/osf.io,chrisseto/osf.io,felliott/osf.io,DanielSBrown/osf.io,felliott/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,baylee-d/osf.io,samchrisinger/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,mattclark/osf.io,jnayak1/osf.io,mattclark/osf.io,caseyrollins/osf.io,emetsger/osf.io,zachjanicki/osf.io,acshi/osf.io,crcresearch/osf.io,erinspace/osf.io,mluke93/osf.io,jnayak1/osf.io,wearpants/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,mluke93/osf.io,acshi/osf.io,chrisseto/osf.io,mluo613/osf.io,cwisecarver/osf.io,binoculars/osf.io,icereval/osf.io,kwierman/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,zachjanicki/osf.io,mfraezz/osf.io,chennan47/osf.io,binoculars/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,aaxelb/osf.io,adlius/osf.io,pattisdr/osf.io,cwisecarver/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,kch8qx/osf.io,sloria/osf.io,leb2dg/osf.io,SSJohns/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,kwierman/osf.io,kwierman/osf.io,sloria/osf.io,cslzchen/osf.io,adlius/osf.io,hmoco/osf.io,Johnetordoff/osf.io,mluke93/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,caseyrollins/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,abought/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,acshi/osf.io,mfraezz/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,samchrisinger/osf.io,pattisdr/osf.io,adlius/osf.io,mluke93/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,alexschiller/osf.io,jnayak1/osf.io,RomanZWang/osf.io,mluo613/osf.io,asanfilippo7/osf.io,hmoco/osf.io,abought/osf.io,jnayak1/osf.io,zachjanicki/osf.io,chennan47/osf.io,baylee-d/osf.io,SSJohns/osf.io,emetsger/osf.io,felliott/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,TomBaxter/osf.io
|
52a07b32eb499d74b1770a42ac0851be71da8288
|
polygraph/types/object_type.py
|
polygraph/types/object_type.py
|
from collections import OrderedDict
from graphql.type.definition import GraphQLObjectType
from marshmallow import Schema, SchemaOpts
class ObjectTypeOpts(SchemaOpts):
def __init__(self, meta, **kwargs):
SchemaOpts.__init__(self, meta, **kwargs)
self.name = getattr(meta, 'name', None)
self.description = getattr(meta, 'name', None)
class ObjectType(Schema):
OPTIONS_CLASS = ObjectTypeOpts
def __init__(self, only=(), exclude=(), prefix='', strict=None,
many=False, context=None, load_only=(), dump_only=(),
partial=False):
super().__init__(only, exclude, prefix, strict,
many, context, load_only, dump_only, partial)
self._name = self.opts.name or self.__class__.__name__
self._description = self.opts.description or self.__doc__
def build_definition(self):
field_map = OrderedDict()
for fieldname, field in self.fields.items():
field_map[fieldname] = field.build_definition()
return GraphQLObjectType(name=self._name, fields=field_map)
|
from collections import OrderedDict
from graphql.type.definition import GraphQLObjectType
from marshmallow import Schema, SchemaOpts
from polygraph.utils.trim_docstring import trim_docstring
class ObjectTypeOpts(SchemaOpts):
def __init__(self, meta, **kwargs):
SchemaOpts.__init__(self, meta, **kwargs)
self.name = getattr(meta, 'name', None)
self.description = getattr(meta, 'name', None)
class ObjectType(Schema):
OPTIONS_CLASS = ObjectTypeOpts
def __init__(self, only=(), exclude=(), prefix='', strict=None,
many=False, context=None, load_only=(), dump_only=(),
partial=False):
super().__init__(only, exclude, prefix, strict,
many, context, load_only, dump_only, partial)
self._name = self.opts.name or self.__class__.__name__
self._description = self.opts.description or trim_docstring(self.__doc__)
def build_definition(self):
field_map = OrderedDict()
for fieldname, field in self.fields.items():
field_map[fieldname] = field.build_definition()
return GraphQLObjectType(name=self._name,
fields=field_map,
description=self._description)
|
Modify ObjectType to derive description from docstring
|
Modify ObjectType to derive description from docstring
|
Python
|
mit
|
polygraph-python/polygraph
|
3cccb20cb9de803867084cab47f43401bf044e63
|
backend/sponsors/models.py
|
backend/sponsors/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from ordered_model.models import OrderedModel
class SponsorLevel(OrderedModel):
name = models.CharField(_("name"), max_length=20)
conference = models.ForeignKey(
"conferences.Conference",
on_delete=models.CASCADE,
verbose_name=_("conference"),
related_name="sponsor_levels",
)
class Meta(OrderedModel.Meta):
unique_together = ["name", "conference"]
class Sponsor(TimeStampedModel):
name = models.CharField(_("name"), max_length=200)
link = models.URLField(_("link"), blank=True)
image = models.ImageField(_("image"), null=True, blank=True, upload_to="sponsors")
level = models.ForeignKey(
SponsorLevel,
on_delete=models.CASCADE,
verbose_name=_("level"),
related_name="sponsors",
)
def __str__(self):
return self.name
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from ordered_model.models import OrderedModel
class SponsorLevel(OrderedModel):
name = models.CharField(_("name"), max_length=20)
conference = models.ForeignKey(
"conferences.Conference",
on_delete=models.CASCADE,
verbose_name=_("conference"),
related_name="sponsor_levels",
)
def __str__(self):
return self.name
class Meta(OrderedModel.Meta):
unique_together = ["name", "conference"]
class Sponsor(TimeStampedModel):
name = models.CharField(_("name"), max_length=200)
link = models.URLField(_("link"), blank=True)
image = models.ImageField(_("image"), null=True, blank=True, upload_to="sponsors")
level = models.ForeignKey(
SponsorLevel,
on_delete=models.CASCADE,
verbose_name=_("level"),
related_name="sponsors",
)
def __str__(self):
return self.name
|
Fix sponsor level name in the django admin
|
Fix sponsor level name in the django admin
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
9db490d5d175f108231cc87afd87a593359837e8
|
app/views.py
|
app/views.py
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
@app.route('/')
@app.route('/index')
def index():
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
Fix the disconnect after 8 hours bug.
|
Fix the disconnect after 8 hours bug.
|
Python
|
mit
|
jbwhit/hammer-pricer,jbwhit/hammer-pricer
|
d90dadca57437def08416638267f2b2db2e7fe3f
|
pytest_raises/pytest_raises.py
|
pytest_raises/pytest_raises.py
|
# -*- coding: utf-8 -*-
import sys
import pytest
class ExpectedException(Exception):
pass
class ExpectedMessage(Exception):
pass
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_call(item):
outcome = yield
raises_marker = item.get_marker('raises')
if raises_marker:
exception = raises_marker.kwargs.get('exception')
exception = exception or Exception
message = raises_marker.kwargs.get('message')
raised_exception = outcome.excinfo[1] if outcome.excinfo else None
traceback = outcome.excinfo[2] if outcome.excinfo else None
if isinstance(raised_exception, exception):
outcome.force_result(None)
if message is not None:
try:
raised_message = str(raised_exception)
if message not in raised_message:
raise ExpectedMessage('"{}" not in "{}"'.format(message, raised_message))
except(ExpectedMessage):
excinfo = sys.exc_info()
if traceback:
outcome.excinfo = excinfo[:2] + (traceback, )
else:
outcome.excinfo = excinfo
else:
try:
raise raised_exception or ExpectedException('Expected exception {}, but it did not raise'.format(exception))
except(ExpectedException):
excinfo = sys.exc_info()
if traceback:
outcome.excinfo = excinfo[:2] + (traceback, )
else:
outcome.excinfo = excinfo
|
# -*- coding: utf-8 -*-
import sys
import pytest
class ExpectedException(Exception):
pass
class ExpectedMessage(Exception):
pass
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_call(item):
outcome = yield
raises_marker = item.get_closest_marker('raises')
if raises_marker:
exception = raises_marker.kwargs.get('exception')
exception = exception or Exception
message = raises_marker.kwargs.get('message')
raised_exception = outcome.excinfo[1] if outcome.excinfo else None
traceback = outcome.excinfo[2] if outcome.excinfo else None
if isinstance(raised_exception, exception):
outcome.force_result(None)
if message is not None:
try:
raised_message = str(raised_exception)
if message not in raised_message:
raise ExpectedMessage('"{}" not in "{}"'.format(message, raised_message))
except(ExpectedMessage):
excinfo = sys.exc_info()
if traceback:
outcome.excinfo = excinfo[:2] + (traceback, )
else:
outcome.excinfo = excinfo
else:
try:
raise raised_exception or ExpectedException('Expected exception {}, but it did not raise'.format(exception))
except(ExpectedException):
excinfo = sys.exc_info()
if traceback:
outcome.excinfo = excinfo[:2] + (traceback, )
else:
outcome.excinfo = excinfo
|
Update marker call for pytest 3.6+
|
Update marker call for pytest 3.6+
|
Python
|
mit
|
Authentise/pytest-raises,Authentise/pytest-raises
|
b16c49cfd6a0ee659e4493ef959e0483e93d350a
|
os_client_config/defaults.py
|
os_client_config/defaults.py
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
_defaults = dict(
auth_type='password',
compute_api_version='2',
floating_ip_source='neutron',
identity_api_version='2',
image_api_use_tasks=False,
image_api_version='1',
network_api_version='2',
object_api_version='1',
volume_api_version='1',
)
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
_defaults = dict(
auth_type='password',
baremetal_api_version='1',
compute_api_version='2',
database_api_version='1.0',
floating_ip_source='neutron',
identity_api_version='2',
image_api_use_tasks=False,
image_api_version='1',
network_api_version='2',
object_api_version='1',
volume_api_version='1',
)
|
Add default versions for trove and ironic
|
Add default versions for trove and ironic
Change-Id: Ib7af38664cfbe75c78c70693117f1193c4beb7e6
|
Python
|
apache-2.0
|
openstack/python-openstacksdk,stackforge/python-openstacksdk,redhat-openstack/os-client-config,dtroyer/python-openstacksdk,openstack/os-client-config,dtroyer/python-openstacksdk,switch-ch/os-client-config,stackforge/python-openstacksdk,dtroyer/os-client-config,openstack/python-openstacksdk
|
c5bfe6f408163267a16b8137e5871943657fb211
|
conftest.py
|
conftest.py
|
from __future__ import absolute_import
pytest_plugins = [
'sentry.utils.pytest'
]
def pytest_configure(config):
from django.conf import settings
settings.INSTALLED_APPS += ('sentry_jira',)
|
from __future__ import absolute_import
import os
os.environ.setdefault('DB', 'sqlite')
pytest_plugins = [
'sentry.utils.pytest'
]
def pytest_configure(config):
from django.conf import settings
settings.INSTALLED_APPS += ('sentry_jira',)
|
Fix tests to run against sqlite
|
Fix tests to run against sqlite
|
Python
|
bsd-3-clause
|
getsentry/sentry-jira,thurloat/sentry-jira,thurloat/sentry-jira,getsentry/sentry-jira
|
389330f4cd1d49ab8fdcfa9554046dedbc5dcffc
|
plugins/Views/SimpleView/__init__.py
|
plugins/Views/SimpleView/__init__.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import SimpleView
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Simple View"),
"author": "Ultimaker",
"version": "1.0",
"decription": i18n_catalog.i18nc("@info:whatsthis", "Provides a simple solid mesh view."),
"api": 2
},
"view": {
"name": i18n_catalog.i18nc("@item:inmenu", "Simple")
}
}
def register(app):
return { "view": SimpleView.SimpleView() }
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import SimpleView
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Simple View"),
"author": "Ultimaker",
"version": "1.0",
"decription": i18n_catalog.i18nc("@info:whatsthis", "Provides a simple solid mesh view."),
"api": 2
},
"view": {
"name": i18n_catalog.i18nc("@item:inmenu", "Simple"),
"visible": False
}
}
def register(app):
return { "view": SimpleView.SimpleView() }
|
Hide simple view by default
|
Hide simple view by default
It is an example implementation, most actual applications would probably
use something with more features.
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
87778eec6425c9bc8ae80f6ad8a0264986d1e7c1
|
api/base/views.py
|
api/base/views.py
|
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
# TODO: Use user serializer
current_user = {
'id': user.pk,
'fullname': user.fullname,
}
else:
current_user = None
return Response({
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
|
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
# TODO: Use user serializer
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
|
Use UserSerializer to serialize current user in the root response
|
Use UserSerializer to serialize current user in the root response
|
Python
|
apache-2.0
|
HarryRybacki/osf.io,cosenal/osf.io,jeffreyliu3230/osf.io,leb2dg/osf.io,erinspace/osf.io,aaxelb/osf.io,Ghalko/osf.io,chennan47/osf.io,abought/osf.io,saradbowman/osf.io,barbour-em/osf.io,TomHeatwole/osf.io,ZobairAlijan/osf.io,dplorimer/osf,DanielSBrown/osf.io,alexschiller/osf.io,samanehsan/osf.io,reinaH/osf.io,RomanZWang/osf.io,icereval/osf.io,samanehsan/osf.io,doublebits/osf.io,cslzchen/osf.io,SSJohns/osf.io,caseyrygt/osf.io,cosenal/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,mattclark/osf.io,zamattiac/osf.io,zamattiac/osf.io,aaxelb/osf.io,bdyetton/prettychart,cwisecarver/osf.io,HalcyonChimera/osf.io,mluke93/osf.io,ZobairAlijan/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,mattclark/osf.io,reinaH/osf.io,mluo613/osf.io,njantrania/osf.io,petermalcolm/osf.io,haoyuchen1992/osf.io,jinluyuan/osf.io,DanielSBrown/osf.io,fabianvf/osf.io,pattisdr/osf.io,caseyrygt/osf.io,jinluyuan/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,sloria/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,mluo613/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,haoyuchen1992/osf.io,lyndsysimon/osf.io,kch8qx/osf.io,jmcarp/osf.io,sbt9uc/osf.io,chrisseto/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,chennan47/osf.io,samanehsan/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,baylee-d/osf.io,barbour-em/osf.io,cslzchen/osf.io,acshi/osf.io,crcresearch/osf.io,erinspace/osf.io,hmoco/osf.io,cslzchen/osf.io,jeffreyliu3230/osf.io,amyshi188/osf.io,cldershem/osf.io,njantrania/osf.io,pattisdr/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,lyndsysimon/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,rdhyee/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,mluke93/osf.io,bdyetton/prettychart,petermalcolm/osf.io,abought/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,jinluyuan/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,adlius/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,cldershem/osf.io,jmcarp/osf.io,petermalcolm/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,acshi/osf.io,doublebits/osf.io,wearpants/osf.io,danielneis/osf.io,ticklemepierce/osf.io,cldershem/osf.io,barbour-em/osf.io,sbt9uc/osf.io,reinaH/osf.io,hmoco/osf.io,mfraezz/osf.io,danielneis/osf.io,hmoco/osf.io,jolene-esposito/osf.io,cwisecarver/osf.io,emetsger/osf.io,mluo613/osf.io,doublebits/osf.io,Johnetordoff/osf.io,binoculars/osf.io,bdyetton/prettychart,emetsger/osf.io,cwisecarver/osf.io,Ghalko/osf.io,njantrania/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,billyhunt/osf.io,doublebits/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,cwisecarver/osf.io,icereval/osf.io,rdhyee/osf.io,pattisdr/osf.io,wearpants/osf.io,rdhyee/osf.io,danielneis/osf.io,kwierman/osf.io,amyshi188/osf.io,icereval/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,jmcarp/osf.io,caneruguz/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,adlius/osf.io,jnayak1/osf.io,caneruguz/osf.io,cslzchen/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,arpitar/osf.io,lyndsysimon/osf.io,kch8qx/osf.io,danielneis/osf.io,laurenrevere/osf.io,SSJohns/osf.io,jolene-esposito/osf.io,zachjanicki/osf.io,laurenrevere/osf.io,njantrania/osf.io,samchrisinger/osf.io,fabianvf/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,erinspace/osf.io,barbour-em/osf.io,felliott/osf.io,kch8qx/osf.io,chennan47/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,saradbowman/osf.io,lyndsysimon/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,cldershem/osf.io,mfraezz/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,abought/osf.io,emetsger/osf.io,zamattiac/osf.io,MerlinZhang/osf.io,fabianvf/osf.io,alexschiller/osf.io,adlius/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,binoculars/osf.io,HarryRybacki/osf.io,chrisseto/osf.io,chrisseto/osf.io,sloria/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,dplorimer/osf,acshi/osf.io,ckc6cz/osf.io,kch8qx/osf.io,sloria/osf.io,crcresearch/osf.io,kwierman/osf.io,kch8qx/osf.io,jinluyuan/osf.io,crcresearch/osf.io,emetsger/osf.io,Johnetordoff/osf.io,wearpants/osf.io,felliott/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,KAsante95/osf.io,ckc6cz/osf.io,brandonPurvis/osf.io,mattclark/osf.io,asanfilippo7/osf.io,dplorimer/osf,ckc6cz/osf.io,mluke93/osf.io,arpitar/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,hmoco/osf.io,billyhunt/osf.io,TomBaxter/osf.io,billyhunt/osf.io,kwierman/osf.io,sbt9uc/osf.io,leb2dg/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,jnayak1/osf.io,caseyrollins/osf.io,bdyetton/prettychart,chrisseto/osf.io,aaxelb/osf.io,acshi/osf.io,Ghalko/osf.io,MerlinZhang/osf.io,leb2dg/osf.io,HarryRybacki/osf.io,brianjgeiger/osf.io,dplorimer/osf,KAsante95/osf.io,ckc6cz/osf.io,Nesiehr/osf.io,fabianvf/osf.io,adlius/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,GageGaskins/osf.io,kwierman/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,arpitar/osf.io,MerlinZhang/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,billyhunt/osf.io,samchrisinger/osf.io,felliott/osf.io,mluke93/osf.io,binoculars/osf.io,jolene-esposito/osf.io,brandonPurvis/osf.io,acshi/osf.io,cosenal/osf.io,doublebits/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,felliott/osf.io,jmcarp/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,HarryRybacki/osf.io,abought/osf.io,reinaH/osf.io,MerlinZhang/osf.io,arpitar/osf.io,ZobairAlijan/osf.io,baylee-d/osf.io
|
d9b804f72e54ffc9cb0f1cef8ce74aef1079ef76
|
tosec/management/commands/tosecscan.py
|
tosec/management/commands/tosecscan.py
|
import os
import hashlib
from tosec.models import Rom
from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = '<folder>'
help = 'Scan a folder for TOSEC roms'
def handle(self, *args, **kwargs):
directory = args[0]
dest = os.path.join(directory, 'TOSEC')
if not os.path.exists(dest):
os.makedirs(dest)
self.stdout.write("Scanning %s" % directory)
for filename in os.listdir(directory):
abspath = os.path.join(directory, filename)
if not os.path.isfile(abspath):
continue
md5sum = hashlib.md5(open(abspath).read()).hexdigest()
rom = Rom.objects.filter(md5=md5sum)
if not rom:
continue
else:
rom = rom[0]
self.stdout.write("Found %s" % rom.name)
new_path = os.path.join(dest, rom.name)
os.rename(abspath, new_path)
|
import os
import hashlib
from tosec.models import Rom, Game
from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = '<folder>'
help = 'Scan a folder for TOSEC roms'
def handle(self, *args, **kwargs):
directory = args[0]
dest = os.path.join(directory, 'TOSEC')
if not os.path.exists(dest):
os.makedirs(dest)
self.stdout.write("Scanning %s" % directory)
filenames = os.listdir(directory)
total_files = len(filenames)
tosec_sets = {} # Store TOSEC sets with number of found roms
for index, filename in enumerate(filenames, start=1):
abspath = os.path.join(directory, filename)
if not os.path.isfile(abspath):
continue
md5sum = hashlib.md5(open(abspath).read()).hexdigest()
try:
rom = Rom.objects.get(md5=md5sum)
except Rom.DoesNotExist:
continue
set_name = rom.game.category.name
if set_name in tosec_sets:
tosec_sets[set_name] += 1
else:
tosec_sets[set_name] = 1
self.stdout.write("[{} of {}] Found {}".format(index,
total_files,
rom.name))
new_path = os.path.join(dest, rom.name)
os.rename(abspath, new_path)
for set_name in tosec_sets:
set_size = Game.objects.filter(category__name=set_name).count()
self.stdout.write("{}: imported {} of {} games".format(
set_name, tosec_sets[set_name], set_size
))
|
Print report on imported TOSEC sets
|
Print report on imported TOSEC sets
|
Python
|
agpl-3.0
|
Turupawn/website,Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website,lutris/website,Turupawn/website
|
6beefdaf46e3febbb106bd72da58ed62fc1349f0
|
scripts/graph/wavefront_obj.py
|
scripts/graph/wavefront_obj.py
|
import numpy as np
def readObj(filename):
vert = []
face = []
with open(filename, 'r') as file:
for line in file:
l = line.split()
if len(l) > 0:
if l[0] == 'v':
vert.append(l[1:])
elif l[0] == 'f':
face.append(l[1:])
return (np.array(vert, np.double), np.array(face, np.int))
|
import numpy as np
def readObj(filename):
vert = []
face = []
with open(filename, 'r') as file:
for line in file:
l = line.split()
if len(l) > 0:
if l[0] == 'v':
vert.append(l[1:])
elif l[0] == 'f':
face.append(l[1:])
return (np.array(vert, np.double), np.array(face, np.int) - 1)
|
Fix wavefront obj face index.
|
Fix wavefront obj face index.
|
Python
|
bsd-2-clause
|
gergondet/RBDyn,gergondet/RBDyn,gergondet/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn
|
dc377e246e65db8257298eb604c032313d8a113e
|
propertyfrontend/__init__.py
|
propertyfrontend/__init__.py
|
import os, logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from raven.contrib.flask import Sentry
from lrutils import dateformat
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.jinja_env.filters['dateformat'] = dateformat
if app.config.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
app.logger.debug("\nConfiguration\n%s\n" % app.config)
@app.context_processor
def asset_path_context_processor():
return {
'asset_path': '/static/build/',
'landregistry_asset_path': '/static/build/'
}
@app.context_processor
def address_processor():
from lrutils import build_address
def process_address_json(address_json):
return build_address(address_json)
return dict(formatted=process_address_json)
|
import os, logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from raven.contrib.flask import Sentry
from lrutils import dateformat
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
app.jinja_env.filters['dateformat'] = dateformat
if app.config.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
app.logger.debug("\nConfiguration\n%s\n" % app.config)
@app.context_processor
def asset_path_context_processor():
return {
'asset_path': '/static/build/',
'landregistry_asset_path': '/static/build/'
}
@app.context_processor
def address_processor():
from lrutils import build_address
def process_address_json(address_json):
return build_address(address_json)
return dict(formatted=process_address_json)
|
Add proxy fix as in lr this will run with reverse proxy
|
Add proxy fix as in lr this will run with reverse proxy
|
Python
|
mit
|
LandRegistry/property-frontend-alpha,LandRegistry/property-frontend-alpha,LandRegistry/property-frontend-alpha,LandRegistry/property-frontend-alpha
|
05db0c3dc6affdc3938d45195fb807be78ae5ff1
|
dit/math/__init__.py
|
dit/math/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Mathematical tools for dit.
"""
from __future__ import absolute_import
# Global random number generator
import numpy as np
prng = np.random.RandomState()
# Set the error level to ignore...for example: log2(0).
np.seterr(all='ignore')
del np
from .equal import close, allclose
from .sampling import sample, _sample, _samples
from .ops import get_ops, LinearOperations, LogOperations
from .fraction import approximate_fraction
from .sigmaalgebra import sigma_algebra, is_sigma_algebra, atom_set
from . import aitchison
from . import combinatorics
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Mathematical tools for dit.
"""
from __future__ import absolute_import
# Global random number generator
import numpy as np
prng = np.random.RandomState()
# Set the error level to ignore...for example: log2(0).
np.seterr(all='ignore')
del np
from .equal import close, allclose
from .sampling import sample, _sample, _samples
from .ops import get_ops, LinearOperations, LogOperations
from .fraction import approximate_fraction
from .sigmaalgebra import sigma_algebra, is_sigma_algebra, atom_set
from .perturb import perturb_pmf
from . import aitchison
from . import combinatorics
|
Make perturb_pmf available in dit.math.
|
Make perturb_pmf available in dit.math.
|
Python
|
bsd-3-clause
|
Autoplectic/dit,chebee7i/dit,chebee7i/dit,Autoplectic/dit,Autoplectic/dit,Autoplectic/dit,Autoplectic/dit,dit/dit,dit/dit,chebee7i/dit,dit/dit,dit/dit,chebee7i/dit,dit/dit
|
5a7f88a7d033a8005d09792d62827689d6d5230d
|
mox3/fixture.py
|
mox3/fixture.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from mox3 import mox
from mox3 import stubout
class MoxStubout(fixtures.Fixture):
"""Deal with code around mox and stubout as a fixture."""
def setUp(self):
super(MoxStubout, self).setUp()
self.mox = mox.Mox()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
self.addCleanup(self.mox.VerifyAll)
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from mox3 import mox
from mox3 import stubout
class MoxStubout(fixtures.Fixture):
"""Deal with code around mox and stubout as a fixture."""
def setUp(self):
super(MoxStubout, self).setUp()
self.mox = mox.Mox()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
self.addCleanup(self.mox.VerifyAll)
|
Remove vim header from source files
|
Remove vim header from source files
trivialfix
Change-Id: I6ccd551bc5cec8f5a682502b0a6e99a6d02cad3b
|
Python
|
apache-2.0
|
openstack/mox3
|
6692476cc7523516275f4512c32b0378574210bf
|
django_tenants/routers.py
|
django_tenants/routers.py
|
from django.conf import settings
class TenantSyncRouter(object):
"""
A router to control which applications will be synced,
depending if we are syncing the shared apps or the tenant apps.
"""
def allow_migrate(self, db, app_label, model_name=None, **hints):
# the imports below need to be done here else django <1.5 goes crazy
# https://code.djangoproject.com/ticket/20704
from django.db import connection
from django_tenants.utils import get_public_schema_name
# for INSTALLED_APPS we need a name
from django.apps import apps
app_name = apps.get_app_config(app_label).name
if connection.schema_name == get_public_schema_name():
if app_name not in settings.SHARED_APPS:
return False
else:
if app_name not in settings.TENANT_APPS:
return False
return None
|
from django.conf import settings
from django.apps import apps as django_apps
class TenantSyncRouter(object):
"""
A router to control which applications will be synced,
depending if we are syncing the shared apps or the tenant apps.
"""
def app_in_list(self, app_label, apps_list):
"""
Is 'app_label' present in 'apps_list'?
apps_list is either settings.SHARED_APPS or settings.TENANT_APPS, a
list of app names.
We check the presense of the app's name or the full path to the apps's
AppConfig class.
https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications
"""
appconfig = django_apps.get_app_config(app_label)
appconfig_full_name = '{}.{}'.format(
appconfig.__module__, appconfig.__class__.__name__)
return (appconfig.name in apps_list) or (appconfig_full_name in apps_list)
def allow_migrate(self, db, app_label, model_name=None, **hints):
# the imports below need to be done here else django <1.5 goes crazy
# https://code.djangoproject.com/ticket/20704
from django.db import connection
from django_tenants.utils import get_public_schema_name
if connection.schema_name == get_public_schema_name():
if not self.app_in_list(app_label, settings.SHARED_APPS):
return False
else:
if not self.app_in_list(app_label, settings.TENANT_APPS):
return False
return None
|
Fix check of an app's presence in INSTALLED_APPS
|
Fix check of an app's presence in INSTALLED_APPS
In TenantSyncRouter, the logic to check whether an app is a tenant app or shared app was too simplistic. Django 1.7 allows two ways to add an app to INSTALLED_APPS. 1) By specifying the app's name, and 2) By specifying the dotted path to the app's AppConfig's class. This commit ensures that we check for the latter case as well. https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications
|
Python
|
mit
|
sigma-geosistemas/django-tenants,tomturner/django-tenants,tomturner/django-tenants,tomturner/django-tenants,sigma-geosistemas/django-tenants
|
3749294ae008c8bda7de9ea538d3d64d5a21c8f4
|
driller/CGCSimProc.py
|
driller/CGCSimProc.py
|
import angr
import simuvex
class DrillerTransmit(simuvex.SimProcedure):
'''
CGC's transmit simprocedure which supports errors
'''
def run(self, fd, buf, count, tx_bytes):
if self.state.mode == 'fastpath':
# Special case for CFG generation
self.state.store_mem(tx_bytes, count, endness='Iend_LE')
return self.state.se.BVV(0, self.state.arch.bits)
if ABSTRACT_MEMORY in self.state.options:
data = self.state.mem_expr(buf, count)
self.state.posix.write(fd, data, count)
self.state.store_mem(tx_bytes, count, endness='Iend_LE')
else:
if self.state.satisfiable(extra_constraints=[count != 0]):
data = self.state.mem_expr(buf, count)
self.state.posix.write(fd, data, count)
self.data = data
else:
self.data = None
self.size = count
self.state.store_mem(tx_bytes, count, endness='Iend_LE', condition=tx_bytes != 0)
# TODO: transmit failure
transmit_return = self.state.se.BV("transmit_return", self.state.arch.bits)
self.state.add_constraints(transmit_return >= -1)
self.state.add_constraints(transmit_return <= 0)
return transmit_return
simprocedures = [("transmit", DrillerTransmit)]
|
import angr
import simuvex
class DrillerTransmit(simuvex.SimProcedure):
'''
CGC's transmit simprocedure which supports errors
'''
def run(self, fd, buf, count, tx_bytes):
if self.state.mode == 'fastpath':
# Special case for CFG generation
self.state.store_mem(tx_bytes, count, endness='Iend_LE')
return self.state.se.BVV(0, self.state.arch.bits)
if ABSTRACT_MEMORY in self.state.options:
data = self.state.mem_expr(buf, count)
self.state.posix.write(fd, data, count)
self.state.store_mem(tx_bytes, count, endness='Iend_LE')
else:
if self.state.satisfiable(extra_constraints=[count != 0]):
data = self.state.mem_expr(buf, count)
self.state.posix.write(fd, data, count)
self.data = data
else:
self.data = None
self.size = count
self.state.store_mem(tx_bytes, count, endness='Iend_LE', condition=tx_bytes != 0)
# TODO: transmit failure
transmit_return = self.state.se.BV("transmit_return", self.state.arch.bits)
self.state.add_constraints(transmit_return >= -1)
self.state.add_constraints(transmit_return <= 0)
return transmit_return
# disable simprocedures for CGC
simprocedures = []
|
Disable CGC simprocedures, it's unhelpful at the moment
|
Disable CGC simprocedures, it's unhelpful at the moment
|
Python
|
bsd-2-clause
|
shellphish/driller
|
cb49f6d3f0ecb367fd76afe2e98c55e48ba1128f
|
bin/gephi-mock.py
|
bin/gephi-mock.py
|
#!/usr/bin/env python
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from http.server import BaseHTTPRequestHandler, HTTPServer
class GephiHandler(BaseHTTPRequestHandler):
def respond(self):
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.end_headers()
self.wfile.write("{}")
def do_GET(self):
self.respond()
def do_POST(self):
self.respond()
def main():
try:
server = HTTPServer(('', 8080), GephiHandler)
print('listening on port 8080...')
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from http.server import BaseHTTPRequestHandler, HTTPServer
class GephiHandler(BaseHTTPRequestHandler):
def respond(self):
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.end_headers()
self.wfile.write("{}")
def do_GET(self):
self.respond()
def do_POST(self):
self.respond()
def main():
try:
server = HTTPServer(('', 8080), GephiHandler)
print('listening on port 8080...')
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down server')
server.socket.close()
if __name__ == '__main__':
main()
|
Switch to python3 specifically in gephi mock script.
|
Switch to python3 specifically in gephi mock script.
Can't change the docker image to make python==python3 right now because we need python2 on the image as it is used in the older branches that still support that version. CTR
|
Python
|
apache-2.0
|
apache/tinkerpop,apache/tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop,robertdale/tinkerpop,apache/tinkerpop,robertdale/tinkerpop,apache/tinkerpop,krlohnes/tinkerpop,krlohnes/tinkerpop,apache/tinkerpop,apache/incubator-tinkerpop,apache/tinkerpop,apache/incubator-tinkerpop,apache/incubator-tinkerpop,apache/tinkerpop,robertdale/tinkerpop,krlohnes/tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop
|
b7971d68256b23646de1ef648181da5ceacd67f8
|
scipy/constants/tests/test_codata.py
|
scipy/constants/tests/test_codata.py
|
import warnings
from scipy.constants import find
from numpy.testing import assert_equal, run_module_suite
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
if __name__ == "__main__":
run_module_suite()
|
import warnings
import codata
import constants
from scipy.constants import find
from numpy.testing import assert_equal, run_module_suite
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
def test_basic_table_parse():
c = 'speed of light in vacuum'
assert_equal(codata.value(c), constants.c)
assert_equal(codata.value(c), constants.speed_of_light)
def test_basic_lookup():
assert_equal('%d %s' % (codata.c, codata.unit('speed of light in vacuum')),
'299792458 m s^-1')
if __name__ == "__main__":
run_module_suite()
|
Add very basic tests for codata and constants.
|
ENH: Add very basic tests for codata and constants.
|
Python
|
bsd-3-clause
|
WarrenWeckesser/scipy,arokem/scipy,aeklant/scipy,vhaasteren/scipy,lhilt/scipy,jseabold/scipy,fernand/scipy,e-q/scipy,nonhermitian/scipy,ndchorley/scipy,newemailjdm/scipy,scipy/scipy,grlee77/scipy,zerothi/scipy,FRidh/scipy,Shaswat27/scipy,trankmichael/scipy,mgaitan/scipy,vigna/scipy,woodscn/scipy,andyfaff/scipy,jonycgn/scipy,arokem/scipy,mtrbean/scipy,minhlongdo/scipy,jor-/scipy,pbrod/scipy,nvoron23/scipy,efiring/scipy,scipy/scipy,zaxliu/scipy,jseabold/scipy,andim/scipy,giorgiop/scipy,sonnyhu/scipy,zaxliu/scipy,zerothi/scipy,sauliusl/scipy,chatcannon/scipy,larsmans/scipy,mingwpy/scipy,larsmans/scipy,lukauskas/scipy,njwilson23/scipy,surhudm/scipy,Shaswat27/scipy,jamestwebber/scipy,Eric89GXL/scipy,ales-erjavec/scipy,WarrenWeckesser/scipy,WillieMaddox/scipy,perimosocordiae/scipy,vanpact/scipy,gfyoung/scipy,zxsted/scipy,haudren/scipy,fredrikw/scipy,dominicelse/scipy,ogrisel/scipy,petebachant/scipy,anielsen001/scipy,raoulbq/scipy,ogrisel/scipy,pnedunuri/scipy,zerothi/scipy,dch312/scipy,rgommers/scipy,maniteja123/scipy,juliantaylor/scipy,piyush0609/scipy,mortonjt/scipy,Eric89GXL/scipy,njwilson23/scipy,sonnyhu/scipy,grlee77/scipy,Shaswat27/scipy,WarrenWeckesser/scipy,piyush0609/scipy,dch312/scipy,trankmichael/scipy,endolith/scipy,fernand/scipy,grlee77/scipy,surhudm/scipy,pyramania/scipy,e-q/scipy,WarrenWeckesser/scipy,nmayorov/scipy,befelix/scipy,chatcannon/scipy,giorgiop/scipy,vanpact/scipy,argriffing/scipy,Gillu13/scipy,kalvdans/scipy,Dapid/scipy,rmcgibbo/scipy,jakevdp/scipy,vhaasteren/scipy,josephcslater/scipy,mhogg/scipy,FRidh/scipy,Kamp9/scipy,njwilson23/scipy,mgaitan/scipy,richardotis/scipy,jakevdp/scipy,perimosocordiae/scipy,njwilson23/scipy,mdhaber/scipy,nvoron23/scipy,Gillu13/scipy,FRidh/scipy,zxsted/scipy,ales-erjavec/scipy,mikebenfield/scipy,befelix/scipy,jamestwebber/scipy,befelix/scipy,jseabold/scipy,mgaitan/scipy,gdooper/scipy,richardotis/scipy,sriki18/scipy,jseabold/scipy,zxsted/scipy,pyramania/scipy,zaxliu/scipy,mhogg/scipy,mtrbean/scipy,jjhelmus/scipy,mgaitan/scipy,aman-iitj/scipy,person142/scipy,mhogg/scipy,mortada/scipy,matthewalbani/scipy,richardotis/scipy,piyush0609/scipy,fernand/scipy,teoliphant/scipy,andim/scipy,aarchiba/scipy,nonhermitian/scipy,mingwpy/scipy,giorgiop/scipy,Stefan-Endres/scipy,vanpact/scipy,jor-/scipy,ortylp/scipy,gertingold/scipy,aeklant/scipy,aeklant/scipy,mhogg/scipy,ogrisel/scipy,juliantaylor/scipy,maniteja123/scipy,pnedunuri/scipy,efiring/scipy,lukauskas/scipy,sauliusl/scipy,vanpact/scipy,tylerjereddy/scipy,pyramania/scipy,aman-iitj/scipy,niknow/scipy,tylerjereddy/scipy,zaxliu/scipy,gef756/scipy,cpaulik/scipy,ilayn/scipy,pnedunuri/scipy,teoliphant/scipy,lukauskas/scipy,lukauskas/scipy,ortylp/scipy,pnedunuri/scipy,futurulus/scipy,larsmans/scipy,hainm/scipy,vigna/scipy,bkendzior/scipy,mortonjt/scipy,argriffing/scipy,efiring/scipy,Stefan-Endres/scipy,lhilt/scipy,trankmichael/scipy,sonnyhu/scipy,zxsted/scipy,maciejkula/scipy,pnedunuri/scipy,mhogg/scipy,arokem/scipy,richardotis/scipy,matthewalbani/scipy,sargas/scipy,aarchiba/scipy,grlee77/scipy,pbrod/scipy,anielsen001/scipy,kalvdans/scipy,arokem/scipy,gef756/scipy,lhilt/scipy,gdooper/scipy,Kamp9/scipy,sargas/scipy,apbard/scipy,vanpact/scipy,giorgiop/scipy,WillieMaddox/scipy,zxsted/scipy,juliantaylor/scipy,sriki18/scipy,futurulus/scipy,mikebenfield/scipy,vhaasteren/scipy,anielsen001/scipy,haudren/scipy,futurulus/scipy,zxsted/scipy,mdhaber/scipy,teoliphant/scipy,bkendzior/scipy,ChanderG/scipy,rmcgibbo/scipy,WillieMaddox/scipy,ilayn/scipy,jsilter/scipy,Stefan-Endres/scipy,Gillu13/scipy,jjhelmus/scipy,larsmans/scipy,Dapid/scipy,witcxc/scipy,raoulbq/scipy,jakevdp/scipy,vhaasteren/scipy,andyfaff/scipy,gdooper/scipy,mtrbean/scipy,Newman101/scipy,endolith/scipy,pbrod/scipy,anielsen001/scipy,Dapid/scipy,anntzer/scipy,aeklant/scipy,mdhaber/scipy,apbard/scipy,felipebetancur/scipy,aeklant/scipy,felipebetancur/scipy,ChanderG/scipy,josephcslater/scipy,minhlongdo/scipy,ilayn/scipy,vberaudi/scipy,Newman101/scipy,kleskjr/scipy,pschella/scipy,kalvdans/scipy,woodscn/scipy,andyfaff/scipy,arokem/scipy,gfyoung/scipy,fernand/scipy,zaxliu/scipy,matthewalbani/scipy,matthewalbani/scipy,gef756/scipy,cpaulik/scipy,newemailjdm/scipy,rgommers/scipy,jonycgn/scipy,apbard/scipy,surhudm/scipy,matthew-brett/scipy,chatcannon/scipy,Dapid/scipy,jonycgn/scipy,Newman101/scipy,aman-iitj/scipy,jor-/scipy,FRidh/scipy,kalvdans/scipy,jakevdp/scipy,nvoron23/scipy,perimosocordiae/scipy,juliantaylor/scipy,Stefan-Endres/scipy,njwilson23/scipy,witcxc/scipy,fernand/scipy,rgommers/scipy,mortada/scipy,ogrisel/scipy,sargas/scipy,gertingold/scipy,scipy/scipy,maciejkula/scipy,person142/scipy,mingwpy/scipy,lukauskas/scipy,gdooper/scipy,tylerjereddy/scipy,WillieMaddox/scipy,matthewalbani/scipy,njwilson23/scipy,andim/scipy,jjhelmus/scipy,bkendzior/scipy,fredrikw/scipy,haudren/scipy,larsmans/scipy,apbard/scipy,vigna/scipy,mingwpy/scipy,newemailjdm/scipy,nvoron23/scipy,petebachant/scipy,sriki18/scipy,raoulbq/scipy,WarrenWeckesser/scipy,andim/scipy,jamestwebber/scipy,jamestwebber/scipy,ilayn/scipy,maniteja123/scipy,minhlongdo/scipy,sauliusl/scipy,rmcgibbo/scipy,nonhermitian/scipy,ndchorley/scipy,gfyoung/scipy,surhudm/scipy,hainm/scipy,efiring/scipy,gdooper/scipy,dominicelse/scipy,Newman101/scipy,giorgiop/scipy,pbrod/scipy,pizzathief/scipy,josephcslater/scipy,raoulbq/scipy,witcxc/scipy,anntzer/scipy,richardotis/scipy,mikebenfield/scipy,behzadnouri/scipy,mdhaber/scipy,rgommers/scipy,scipy/scipy,sonnyhu/scipy,mtrbean/scipy,jsilter/scipy,anntzer/scipy,lhilt/scipy,pnedunuri/scipy,petebachant/scipy,nvoron23/scipy,gertingold/scipy,ortylp/scipy,ndchorley/scipy,chatcannon/scipy,minhlongdo/scipy,fredrikw/scipy,futurulus/scipy,surhudm/scipy,cpaulik/scipy,endolith/scipy,pizzathief/scipy,hainm/scipy,Kamp9/scipy,vberaudi/scipy,efiring/scipy,vhaasteren/scipy,mortada/scipy,chatcannon/scipy,gertingold/scipy,jjhelmus/scipy,pschella/scipy,pschella/scipy,witcxc/scipy,WillieMaddox/scipy,jseabold/scipy,andyfaff/scipy,maciejkula/scipy,sriki18/scipy,pizzathief/scipy,apbard/scipy,josephcslater/scipy,ChanderG/scipy,zerothi/scipy,Srisai85/scipy,newemailjdm/scipy,aman-iitj/scipy,jakevdp/scipy,aarchiba/scipy,maniteja123/scipy,nonhermitian/scipy,chatcannon/scipy,endolith/scipy,hainm/scipy,person142/scipy,maniteja123/scipy,Kamp9/scipy,ilayn/scipy,jamestwebber/scipy,endolith/scipy,ogrisel/scipy,Stefan-Endres/scipy,Gillu13/scipy,futurulus/scipy,cpaulik/scipy,anntzer/scipy,dominicelse/scipy,vhaasteren/scipy,ChanderG/scipy,nonhermitian/scipy,bkendzior/scipy,matthew-brett/scipy,matthew-brett/scipy,grlee77/scipy,gfyoung/scipy,WarrenWeckesser/scipy,sargas/scipy,mgaitan/scipy,Srisai85/scipy,andyfaff/scipy,jonycgn/scipy,kleskjr/scipy,ales-erjavec/scipy,anielsen001/scipy,niknow/scipy,Gillu13/scipy,woodscn/scipy,woodscn/scipy,ndchorley/scipy,aarchiba/scipy,behzadnouri/scipy,gfyoung/scipy,behzadnouri/scipy,rmcgibbo/scipy,behzadnouri/scipy,Dapid/scipy,argriffing/scipy,tylerjereddy/scipy,vberaudi/scipy,gef756/scipy,haudren/scipy,maciejkula/scipy,andim/scipy,dch312/scipy,befelix/scipy,lhilt/scipy,mortada/scipy,sargas/scipy,befelix/scipy,Shaswat27/scipy,jonycgn/scipy,behzadnouri/scipy,argriffing/scipy,argriffing/scipy,Eric89GXL/scipy,hainm/scipy,anielsen001/scipy,minhlongdo/scipy,haudren/scipy,newemailjdm/scipy,niknow/scipy,fredrikw/scipy,pizzathief/scipy,rmcgibbo/scipy,e-q/scipy,ndchorley/scipy,teoliphant/scipy,Newman101/scipy,nmayorov/scipy,maniteja123/scipy,pschella/scipy,FRidh/scipy,e-q/scipy,Srisai85/scipy,endolith/scipy,piyush0609/scipy,andyfaff/scipy,maciejkula/scipy,rmcgibbo/scipy,sriki18/scipy,Srisai85/scipy,mingwpy/scipy,pizzathief/scipy,mortonjt/scipy,vanpact/scipy,vberaudi/scipy,aman-iitj/scipy,pyramania/scipy,scipy/scipy,vberaudi/scipy,dominicelse/scipy,nmayorov/scipy,petebachant/scipy,mdhaber/scipy,ales-erjavec/scipy,Srisai85/scipy,jsilter/scipy,haudren/scipy,vigna/scipy,ales-erjavec/scipy,nmayorov/scipy,kleskjr/scipy,mingwpy/scipy,Shaswat27/scipy,felipebetancur/scipy,niknow/scipy,lukauskas/scipy,gertingold/scipy,andim/scipy,ales-erjavec/scipy,niknow/scipy,perimosocordiae/scipy,gef756/scipy,sauliusl/scipy,jjhelmus/scipy,trankmichael/scipy,gef756/scipy,hainm/scipy,mortada/scipy,raoulbq/scipy,jonycgn/scipy,vberaudi/scipy,perimosocordiae/scipy,kleskjr/scipy,efiring/scipy,Newman101/scipy,tylerjereddy/scipy,Srisai85/scipy,woodscn/scipy,behzadnouri/scipy,kalvdans/scipy,giorgiop/scipy,ortylp/scipy,minhlongdo/scipy,zerothi/scipy,nmayorov/scipy,zerothi/scipy,nvoron23/scipy,cpaulik/scipy,jsilter/scipy,mhogg/scipy,mortonjt/scipy,anntzer/scipy,person142/scipy,mdhaber/scipy,piyush0609/scipy,person142/scipy,aman-iitj/scipy,niknow/scipy,Stefan-Endres/scipy,sriki18/scipy,mortada/scipy,Eric89GXL/scipy,witcxc/scipy,bkendzior/scipy,juliantaylor/scipy,aarchiba/scipy,ChanderG/scipy,mortonjt/scipy,fredrikw/scipy,trankmichael/scipy,Gillu13/scipy,mtrbean/scipy,Kamp9/scipy,pbrod/scipy,sauliusl/scipy,larsmans/scipy,jseabold/scipy,ilayn/scipy,Eric89GXL/scipy,sauliusl/scipy,petebachant/scipy,WillieMaddox/scipy,pschella/scipy,kleskjr/scipy,anntzer/scipy,matthew-brett/scipy,felipebetancur/scipy,fredrikw/scipy,surhudm/scipy,sonnyhu/scipy,argriffing/scipy,FRidh/scipy,mikebenfield/scipy,ndchorley/scipy,dch312/scipy,mikebenfield/scipy,mortonjt/scipy,kleskjr/scipy,zaxliu/scipy,trankmichael/scipy,ortylp/scipy,Eric89GXL/scipy,newemailjdm/scipy,raoulbq/scipy,vigna/scipy,mgaitan/scipy,scipy/scipy,dch312/scipy,pbrod/scipy,sonnyhu/scipy,ortylp/scipy,felipebetancur/scipy,josephcslater/scipy,ChanderG/scipy,matthew-brett/scipy,piyush0609/scipy,woodscn/scipy,jor-/scipy,petebachant/scipy,teoliphant/scipy,cpaulik/scipy,felipebetancur/scipy,rgommers/scipy,pyramania/scipy,Kamp9/scipy,dominicelse/scipy,fernand/scipy,jsilter/scipy,mtrbean/scipy,perimosocordiae/scipy,jor-/scipy,futurulus/scipy,richardotis/scipy,Dapid/scipy,e-q/scipy,Shaswat27/scipy
|
234e50c105b7d7d1e77e1c392200668891130840
|
formish/__init__.py
|
formish/__init__.py
|
"""
Base package to import top level modules
"""
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError
from formish.widgets import *
from formish.util import form_in_request
|
"""
Base package to import top level modules
"""
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError, NoActionError
from formish.widgets import *
from formish.util import form_in_request
|
Add missing exception to package-level exports.
|
Add missing exception to package-level exports.
|
Python
|
bsd-3-clause
|
ish/formish,ish/formish,ish/formish
|
097cc817894c8bd05711f92916b184b88c108fb9
|
decision/__init__.py
|
decision/__init__.py
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
# Validate config
for property_to_check in ['CASEWORK_URL', 'CHECK_URL']:
if not app.config[property_to_check]:
raise Exception('Missing %r configuation property.' % (property_to_check))
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
# Validate config
for property_to_check in ['CASEWORK_URL', 'CHECK_URL']:
if not app.config[property_to_check]:
raise Exception('Missing %r configuation property.' % (property_to_check))
|
Add proxy fix as in lr this will run with reverse proxy
|
Add proxy fix as in lr this will run with reverse proxy
|
Python
|
mit
|
LandRegistry/decision-alpha,LandRegistry/decision-alpha
|
8866a828e029c2db6439c16d49ba636e460fbf49
|
go/apps/tests/base.py
|
go/apps/tests/base.py
|
from django.conf import settings
from vumi.tests.utils import FakeRedis
from go.base.tests.utils import VumiGoDjangoTestCase, declare_longcode_tags
from go.vumitools.tests.utils import CeleryTestMixIn
from go.vumitools.api import VumiApi
class DjangoGoApplicationTestCase(VumiGoDjangoTestCase, CeleryTestMixIn):
def setUp(self):
super(DjangoGoApplicationTestCase, self).setUp()
self.setup_api()
self.declare_longcode_tags(self.api)
self.setup_celery_for_tests()
def tearDown(self):
self.teardown_api()
super(DjangoGoApplicationTestCase, self).tearDown()
def setup_api(self):
self._fake_redis = FakeRedis()
vumi_config = settings.VUMI_API_CONFIG.copy()
vumi_config['redis_cls'] = lambda **kws: self._fake_redis
self.patch_settings(VUMI_API_CONFIG=vumi_config)
self.api = VumiApi(settings.VUMI_API_CONFIG)
def teardown_api(self):
self._fake_redis.teardown()
def declare_longcode_tags(self):
declare_longcode_tags(self.api)
def acquire_all_longcode_tags(self):
for _i in range(4):
self.api.acquire_tag("longcode")
def get_api_commands_sent(self):
consumer = self.get_cmd_consumer()
return self.fetch_cmds(consumer)
|
from django.conf import settings
from vumi.tests.utils import FakeRedis
from go.base.tests.utils import VumiGoDjangoTestCase, declare_longcode_tags
from go.vumitools.tests.utils import CeleryTestMixIn
from go.vumitools.api import VumiApi
class DjangoGoApplicationTestCase(VumiGoDjangoTestCase, CeleryTestMixIn):
def setUp(self):
super(DjangoGoApplicationTestCase, self).setUp()
self.setup_api()
self.declare_longcode_tags()
self.setup_celery_for_tests()
def tearDown(self):
self.teardown_api()
super(DjangoGoApplicationTestCase, self).tearDown()
def setup_api(self):
self._fake_redis = FakeRedis()
vumi_config = settings.VUMI_API_CONFIG.copy()
vumi_config['redis_cls'] = lambda **kws: self._fake_redis
self.patch_settings(VUMI_API_CONFIG=vumi_config)
self.api = VumiApi(settings.VUMI_API_CONFIG)
def teardown_api(self):
self._fake_redis.teardown()
def declare_longcode_tags(self):
declare_longcode_tags(self.api)
def acquire_all_longcode_tags(self):
for _i in range(4):
self.api.acquire_tag("longcode")
def get_api_commands_sent(self):
consumer = self.get_cmd_consumer()
return self.fetch_cmds(consumer)
|
Fix buglet introduced by longcode clean-up.
|
Fix buglet introduced by longcode clean-up.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.