commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
0fc2cc8aafc1cf778ecc12439becf8482cb47097
|
resolve imports
|
jharris119/set-game,jharris119/set-game,jharris119/set-game
|
setapp.py
|
setapp.py
|
import os
import cherrypy
from ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
from app.setutils import Card
import webservices.solitairegame
import webservices.multiplayergame
class SetApp:
homepage = ''
@cherrypy.expose
def index(self):
try:
return open(self.homepage)
except FileNotFoundError:
raise cherrypy.HTTPRedirect('/solitaire', 302)
@staticmethod
def json_to_cards(blob):
return [Card(*[getattr(Card, key)(obj[key])
for key in ['number', 'color', 'shading', 'shape']]) for obj in blob]
if __name__ == '__main__':
base_conf = {
'/': {
'tools.staticdir.root': os.path.abspath(os.getcwd()),
'tools.sessions.on': True,
'tools.trailing_slash.on': False
},
'/game': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.response_headers.on': True,
'tools.response_headers.headers': [('Content-Type', 'application/json')]
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'public'
},
'/bower_components': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'bower_components'
}
}
mp_conf = base_conf.copy()
mp_conf.update({
'/join': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.response_headers.on': True,
'tools.response_headers.headers': [('Content-Type', 'application/json')]
}
})
cherrypy.config.update({
'server.socket_host': '0.0.0.0',
'server.socket_port': int(os.environ.get('PORT', 8080)),
})
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
cherrypy.tree.mount(webservices.solitairegame.SolitaireApp(), '/solitaire', base_conf)
cherrypy.tree.mount(webservices.multiplayergame.MultiplayerApp(), '/multiplayer', mp_conf)
cherrypy.quickstart(SetApp(), '/', base_conf) # needs to be mounted last
cherrypy.engine.start()
cherrypy.engine.block()
|
import os
import cherrypy
from ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
from app.setutils import Card
import webservices.solitairegame
class SetApp:
homepage = ''
@cherrypy.expose
def index(self):
try:
return open(self.homepage)
except FileNotFoundError:
raise cherrypy.HTTPRedirect('/solitaire', 302)
@staticmethod
def json_to_cards(blob):
return [Card(*[getattr(Card, key)(obj[key])
for key in ['number', 'color', 'shading', 'shape']]) for obj in blob]
if __name__ == '__main__':
base_conf = {
'/': {
'tools.staticdir.root': os.path.abspath(os.getcwd()),
'tools.sessions.on': True,
'tools.trailing_slash.on': False
},
'/game': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.response_headers.on': True,
'tools.response_headers.headers': [('Content-Type', 'application/json')]
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'public'
},
'/bower_components': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'bower_components'
}
}
mp_conf = base_conf.copy()
mp_conf.update({
'/join': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
'tools.response_headers.on': True,
'tools.response_headers.headers': [('Content-Type', 'application/json')]
}
})
cherrypy.config.update({
'server.socket_host': '0.0.0.0',
'server.socket_port': int(os.environ.get('PORT', 8080)),
})
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
cherrypy.tree.mount(webservices.solitairegame.SolitaireApp(), '/solitaire', base_conf)
cherrypy.tree.mount(MultiplayerApp(), '/multiplayer', mp_conf)
cherrypy.quickstart(SetApp(), '/', base_conf) # needs to be mounted last
cherrypy.engine.start()
cherrypy.engine.block()
|
mit
|
Python
|
6b276c384141bd2f2a22523184db349a91849169
|
Determine supported http methods from the classes.
|
jaapverloop/massa
|
massa/api.py
|
massa/api.py
|
# -*- coding: utf-8 -*-
from flask import Blueprint, jsonify, g, request, url_for
from flask.views import MethodView
from .domain import EntityNotFoundError, InvalidInputError
def endpoint(f):
def wrapper(*args, **kwargs):
try:
rv = f(*args, **kwargs)
except EntityNotFoundError as e:
rv = {'message': e.message}, 404
except InvalidInputError as e:
rv = {'message': e.message, 'details': e.details}, 400
msg = [rv, 200, {}]
if isinstance(rv, tuple):
for index, value in enumerate(rv):
msg[index] = value
body, code, headers = msg
response = jsonify(body)
for key, value in headers.iteritems():
response.headers[key] = value
return response, code
return wrapper
def payload():
return request.get_json() or request.form.to_dict()
class ApiView(MethodView):
decorators = [endpoint]
class MeasurementList(ApiView):
def get(self):
service = g.sl('measurement_service')
return {'items': service.find_all()}
def post(self):
service = g.sl('measurement_service')
id = service.create(**payload())
location = url_for('api.measurement_item', id=id, _external=True)
return service.get(id), 201, {'Location': location}
class MeasurementItem(ApiView):
def get(self, id):
service = g.sl('measurement_service')
return service.get(id)
def put(self, id):
service = g.sl('measurement_service')
service.update(id, **payload())
return service.get(id), 200
def delete(self, id):
service = g.sl('measurement_service')
service.delete(id)
return '', 204
bp = Blueprint('api', __name__)
bp.add_url_rule(
'/measurements/',
view_func=MeasurementList.as_view('measurement_list'),
)
bp.add_url_rule(
'/measurements/<id>',
view_func=MeasurementItem.as_view('measurement_item'),
)
|
# -*- coding: utf-8 -*-
from flask import Blueprint, jsonify, g, request, url_for
from flask.views import MethodView
from .domain import EntityNotFoundError, InvalidInputError
def endpoint(f):
def wrapper(*args, **kwargs):
try:
rv = f(*args, **kwargs)
except EntityNotFoundError as e:
rv = {'message': e.message}, 404
except InvalidInputError as e:
rv = {'message': e.message, 'details': e.details}, 400
msg = [rv, 200, {}]
if isinstance(rv, tuple):
for index, value in enumerate(rv):
msg[index] = value
body, code, headers = msg
response = jsonify(body)
for key, value in headers.iteritems():
response.headers[key] = value
return response, code
return wrapper
def payload():
return request.get_json() or request.form.to_dict()
class ApiView(MethodView):
decorators = [endpoint]
class MeasurementList(ApiView):
def get(self):
service = g.sl('measurement_service')
return {'items': service.find_all()}
def post(self):
service = g.sl('measurement_service')
id = service.create(**payload())
location = url_for('api.measurement_item', id=id, _external=True)
return service.get(id), 201, {'Location': location}
class MeasurementItem(ApiView):
def get(self, id):
service = g.sl('measurement_service')
return service.get(id)
def put(self, id):
service = g.sl('measurement_service')
service.update(id, **payload())
return service.get(id), 200
def delete(self, id):
service = g.sl('measurement_service')
service.delete(id)
return '', 204
bp = Blueprint('api', __name__)
bp.add_url_rule(
'/measurements/',
view_func=MeasurementList.as_view('measurement_list'),
methods=['GET', 'POST']
)
bp.add_url_rule(
'/measurements/<id>',
view_func=MeasurementItem.as_view('measurement_item'),
methods=['GET', 'PUT', 'DELETE']
)
|
mit
|
Python
|
3cc3c22acbdc84124bade7c6f81af016951cb4cd
|
Fix typo in stream_spec.
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
spec/data/stream_spec.py
|
spec/data/stream_spec.py
|
import mock
from data import stream
from spec.mamba import *
with description('stream'):
with it('instantiates without errors'):
expect(calling(stream.Stream)).not_to(raise_error)
with it('subscribes without errors'):
s = stream.Stream()
expect(calling(s.subscribe, mock.Mock())).not_to(raise_error)
with it('publishes to subscribers'):
s = stream.Stream()
observer = mock.Mock()
s.subscribe(observer)
s.publish_value(1)
expect(observer.on_next.call_args).to(equal(mock.call(1)))
|
import mock
from data import stream
from spec.mamba import *
with description('stream'):
with it('instantiates without errors'):
expect(calling(stream.Stream)).not_to(raise_error)
with it('subscribes without errors'):
s = stream.Stream()
expect(calling(s.subscribe, mock.Mock())).not_to(raise_error)
with it('publishes to subscrbers'):
s = stream.Stream()
observer = mock.Mock()
s.subscribe(observer)
s.publish_value(1)
expect(observer.on_next.call_args).to(equal(mock.call(1)))
|
mit
|
Python
|
1f59ad4a5fa14a420c683cfd8713c0eb31a9acec
|
Bump version number for aio.
|
aioTV/django-rest-swagger,aioTV/django-rest-swagger,aioTV/django-rest-swagger
|
rest_framework_swagger/__init__.py
|
rest_framework_swagger/__init__.py
|
VERSION = '0.3.5-aio-v1'
DEFAULT_SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '',
'api_key': '',
'token_type': 'Token',
'enabled_methods': ['get', 'post', 'put', 'patch', 'delete'],
'is_authenticated': False,
'is_superuser': False,
'permission_denied_handler': None,
'resource_access_handler': None,
'template_path': 'rest_framework_swagger/index.html',
'doc_expansion': 'none',
'base_path': ''
}
try:
from django.conf import settings
from django.test.signals import setting_changed
def load_settings(provided_settings):
global SWAGGER_SETTINGS
SWAGGER_SETTINGS = provided_settings
for key, value in DEFAULT_SWAGGER_SETTINGS.items():
if key not in SWAGGER_SETTINGS:
SWAGGER_SETTINGS[key] = value
def reload_settings(*args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == 'SWAGGER_SETTINGS':
load_settings(value)
load_settings(getattr(settings,
'SWAGGER_SETTINGS',
DEFAULT_SWAGGER_SETTINGS))
setting_changed.connect(reload_settings)
except:
SWAGGER_SETTINGS = DEFAULT_SWAGGER_SETTINGS
|
VERSION = '0.3.2'
DEFAULT_SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': '',
'api_key': '',
'token_type': 'Token',
'enabled_methods': ['get', 'post', 'put', 'patch', 'delete'],
'is_authenticated': False,
'is_superuser': False,
'permission_denied_handler': None,
'resource_access_handler': None,
'template_path': 'rest_framework_swagger/index.html',
'doc_expansion': 'none',
'base_path': ''
}
try:
from django.conf import settings
from django.test.signals import setting_changed
def load_settings(provided_settings):
global SWAGGER_SETTINGS
SWAGGER_SETTINGS = provided_settings
for key, value in DEFAULT_SWAGGER_SETTINGS.items():
if key not in SWAGGER_SETTINGS:
SWAGGER_SETTINGS[key] = value
def reload_settings(*args, **kwargs):
setting, value = kwargs['setting'], kwargs['value']
if setting == 'SWAGGER_SETTINGS':
load_settings(value)
load_settings(getattr(settings,
'SWAGGER_SETTINGS',
DEFAULT_SWAGGER_SETTINGS))
setting_changed.connect(reload_settings)
except:
SWAGGER_SETTINGS = DEFAULT_SWAGGER_SETTINGS
|
bsd-2-clause
|
Python
|
62a978256476754a7f604b2f872b7bd221930ac2
|
add test_debian_repo and test_nested_debian_repo
|
alfredodeza/merfi
|
merfi/tests/test_repocollector.py
|
merfi/tests/test_repocollector.py
|
from merfi.collector import RepoCollector, DebRepo
from os.path import join, dirname
class TestRepoCollector(object):
def setup(self):
self.repos = RepoCollector(path='/', _eager=False)
def test_simple_tree(self, deb_repotree):
repos = RepoCollector(path=deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert [r.path for r in repos] == [deb_repotree]
def test_path_is_absolute(self):
assert self.repos._abspath('/') == '/'
def test_path_is_not_absolute(self):
assert self.repos._abspath('directory').startswith('/')
def test_debian_repo(self, deb_repotree):
repos = RepoCollector(deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert repos == [DebRepo(deb_repotree)]
def test_debian_release_files(self, deb_repotree):
repos = RepoCollector(deb_repotree)
release_files = repos.debian_release_files
# The root of the deb_repotree fixture is itself a repository.
expected = [
join(deb_repotree, 'dists', 'trusty', 'Release'),
join(deb_repotree, 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
def test_nested_debian_repo(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
# Verify that we found the two repo trees.
expected = [DebRepo(join(path, 'jewel')),
DebRepo(join(path, 'luminous'))]
assert repos == expected
def test_debian_nested_release_files(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
release_files = repos.debian_release_files
expected = [
join(path, 'jewel', 'dists', 'trusty', 'Release'),
join(path, 'jewel', 'dists', 'xenial', 'Release'),
join(path, 'luminous', 'dists', 'trusty', 'Release'),
join(path, 'luminous', 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
|
from merfi.collector import RepoCollector
from os.path import join, dirname
class TestRepoCollector(object):
def setup(self):
self.repos = RepoCollector(path='/', _eager=False)
def test_simple_tree(self, deb_repotree):
repos = RepoCollector(path=deb_repotree)
# The root of the deb_repotree fixture is itself a repository.
assert [r.path for r in repos] == [deb_repotree]
def test_path_is_absolute(self):
assert self.repos._abspath('/') == '/'
def test_path_is_not_absolute(self):
assert self.repos._abspath('directory').startswith('/')
def test_debian_release_files(self, deb_repotree):
repos = RepoCollector(deb_repotree)
release_files = repos.debian_release_files
# The root of the deb_repotree fixture is itself a repository.
expected = [
join(deb_repotree, 'dists', 'trusty', 'Release'),
join(deb_repotree, 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
def test_debian_nested_release_files(self, nested_deb_repotree):
# go one level up
path = dirname(nested_deb_repotree)
repos = RepoCollector(path)
release_files = repos.debian_release_files
expected = [
join(path, 'jewel', 'dists', 'trusty', 'Release'),
join(path, 'jewel', 'dists', 'xenial', 'Release'),
join(path, 'luminous', 'dists', 'trusty', 'Release'),
join(path, 'luminous', 'dists', 'xenial', 'Release'),
]
assert set(release_files) == set(expected)
|
mit
|
Python
|
19fd0b75e07311bb3eb863d132125325e3478424
|
Fix typo in docstring
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
|
byceps/services/user_avatar/models.py
|
byceps/services/user_avatar/models.py
|
"""
byceps.services.user_avatar.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import namedtuple
from datetime import datetime
from pathlib import Path
from flask import current_app, url_for
from sqlalchemy.ext.hybrid import hybrid_property
from ...database import db, generate_uuid
from ...util.image.models import ImageType
from ...util.instances import ReprBuilder
class Avatar(db.Model):
"""An avatar image uploaded by a user."""
__tablename__ = 'user_avatars'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=False)
_image_type = db.Column('image_type', db.Unicode(4), nullable=False)
def __init__(self, creator_id, image_type):
self.creator_id = creator_id
self.image_type = image_type
@hybrid_property
def image_type(self):
image_type_str = self._image_type
if image_type_str is not None:
return ImageType[image_type_str]
@image_type.setter
def image_type(self, image_type):
self._image_type = image_type.name if (image_type is not None) else None
@property
def filename(self):
name_without_suffix = str(self.id)
suffix = '.' + self.image_type.name
return Path(name_without_suffix).with_suffix(suffix)
@property
def path(self):
path = current_app.config['PATH_USER_AVATAR_IMAGES']
return path / self.filename
@property
def url(self):
path = 'users/avatars/{}'.format(self.filename)
return url_for('global_file', filename=path)
def __repr__(self):
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('image_type', self.image_type.name) \
.build()
class AvatarSelection(db.Model):
"""The selection of an avatar image to be used for a user."""
__tablename__ = 'user_avatar_selections'
user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True)
user = db.relationship('User', backref=db.backref('avatar_selection', uselist=False))
avatar_id = db.Column(db.Uuid, db.ForeignKey('user_avatars.id'), unique=True, nullable=False)
avatar = db.relationship(Avatar)
def __init__(self, user_id, avatar_id):
self.user_id = user_id
self.avatar_id = avatar_id
AvatarCreationTuple = namedtuple('AvatarCreationTuple', 'created_at, url')
|
"""
byceps.services.user_avatar.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from collections import namedtuple
from datetime import datetime
from pathlib import Path
from flask import current_app, url_for
from sqlalchemy.ext.hybrid import hybrid_property
from ...database import db, generate_uuid
from ...util.image.models import ImageType
from ...util.instances import ReprBuilder
class Avatar(db.Model):
"""A avatar image uploaded by a user."""
__tablename__ = 'user_avatars'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
creator_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=False)
_image_type = db.Column('image_type', db.Unicode(4), nullable=False)
def __init__(self, creator_id, image_type):
self.creator_id = creator_id
self.image_type = image_type
@hybrid_property
def image_type(self):
image_type_str = self._image_type
if image_type_str is not None:
return ImageType[image_type_str]
@image_type.setter
def image_type(self, image_type):
self._image_type = image_type.name if (image_type is not None) else None
@property
def filename(self):
name_without_suffix = str(self.id)
suffix = '.' + self.image_type.name
return Path(name_without_suffix).with_suffix(suffix)
@property
def path(self):
path = current_app.config['PATH_USER_AVATAR_IMAGES']
return path / self.filename
@property
def url(self):
path = 'users/avatars/{}'.format(self.filename)
return url_for('global_file', filename=path)
def __repr__(self):
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('image_type', self.image_type.name) \
.build()
class AvatarSelection(db.Model):
"""The selection of an avatar image to be used for a user."""
__tablename__ = 'user_avatar_selections'
user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True)
user = db.relationship('User', backref=db.backref('avatar_selection', uselist=False))
avatar_id = db.Column(db.Uuid, db.ForeignKey('user_avatars.id'), unique=True, nullable=False)
avatar = db.relationship(Avatar)
def __init__(self, user_id, avatar_id):
self.user_id = user_id
self.avatar_id = avatar_id
AvatarCreationTuple = namedtuple('AvatarCreationTuple', 'created_at, url')
|
bsd-3-clause
|
Python
|
ef03541b2b25ab9cf34deec554a19a32dad7fbec
|
Add new line to end of init file for Meta Writer application
|
percival-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data
|
tools/python/odin_data/meta_writer/__init__.py
|
tools/python/odin_data/meta_writer/__init__.py
|
from pkg_resources import require
require('pygelf==0.3.1')
require("h5py==2.8.0")
require('pyzmq==16.0.2')
|
from pkg_resources import require
require('pygelf==0.3.1')
require("h5py==2.8.0")
require('pyzmq==16.0.2')
|
apache-2.0
|
Python
|
514074dee639b30fb56ec664804bdd3f533befda
|
Apply `cacheonceproperty` on props of Tree & Chunk.
|
drowse314-dev-ymat/xmlpumpkin
|
xmlpumpkin/tree.py
|
xmlpumpkin/tree.py
|
# encoding: utf-8
from lxml import etree
from .utils import cacheonceproperty
XML_ENCODING = 'utf-8'
class Tree(object):
"""Tree accessor for CaboCha xml."""
def __init__(self, cabocha_xml):
self._element = etree.fromstring(
cabocha_xml.encode(XML_ENCODING),
)
@cacheonceproperty
def chunks(self):
chunk_elems = self._element.findall('.//chunk')
chunks = tuple([Chunk(elem, self) for elem in chunk_elems])
return chunks
@cacheonceproperty
def root(self):
for chunk in self.chunks:
if chunk.link_to_id == -1:
return chunk
return None
def chunk_by_id(self, chunk_id):
for chunk in self.chunks:
if chunk.id == chunk_id:
return chunk
return None
class Chunk(object):
"""CaboCha chunk object representation."""
def __init__(self, element, parent):
self._element = element
self._parent = parent
def __eq__(self, other):
return self._element == other._element
@cacheonceproperty
def id(self):
return int(self._element.attrib['id'])
@cacheonceproperty
def link_to_id(self):
return int(self._element.attrib['link'])
@cacheonceproperty
def linked_from_ids(self):
return tuple([chunk.id for chunk in self.linked])
@cacheonceproperty
def func_id(self):
return int(self._element.attrib['func'])
@cacheonceproperty
def dep(self):
return self._parent.chunk_by_id(self.link_to_id)
@cacheonceproperty
def linked(self):
to_id = self.id
return [
chunk for chunk
in self._parent.chunks
if chunk.link_to_id == to_id
]
@cacheonceproperty
def surface(self):
tokens = self._tokens()
texts = [t.text for t in tokens]
return u''.join(texts)
@cacheonceproperty
def func_surface(self):
tid = self.func_id
tokens = self._tokens()
for tok in tokens:
if int(tok.attrib['id']) == tid:
return tok.text
def _tokens(self):
return self._element.findall('.//tok')
|
# encoding: utf-8
from lxml import etree
XML_ENCODING = 'utf-8'
class Tree(object):
"""Tree accessor for CaboCha xml."""
def __init__(self, cabocha_xml):
self._element = etree.fromstring(
cabocha_xml.encode(XML_ENCODING),
)
@property
def chunks(self):
chunk_elems = self._element.findall('.//chunk')
chunks = tuple([Chunk(elem, self) for elem in chunk_elems])
return chunks
@property
def root(self):
for chunk in self.chunks:
if chunk.link_to_id == -1:
return chunk
return None
def chunk_by_id(self, chunk_id):
for chunk in self.chunks:
if chunk.id == chunk_id:
return chunk
return None
class Chunk(object):
"""CaboCha chunk object representation."""
def __init__(self, element, parent):
self._element = element
self._parent = parent
def __eq__(self, other):
return self._element == other._element
@property
def id(self):
return int(self._element.attrib['id'])
@property
def link_to_id(self):
return int(self._element.attrib['link'])
@property
def linked_from_ids(self):
return tuple([chunk.id for chunk in self.linked])
@property
def func_id(self):
return int(self._element.attrib['func'])
@property
def dep(self):
return self._parent.chunk_by_id(self.link_to_id)
@property
def linked(self):
to_id = self.id
return [
chunk for chunk
in self._parent.chunks
if chunk.link_to_id == to_id
]
@property
def surface(self):
tokens = self._tokens()
texts = [t.text for t in tokens]
return u''.join(texts)
@property
def func_surface(self):
tid = self.func_id
tokens = self._tokens()
for tok in tokens:
if int(tok.attrib['id']) == tid:
return tok.text
def _tokens(self):
return self._element.findall('.//tok')
|
mit
|
Python
|
7aae3f244f15d31e4d5a0c844df5cbbb5a594e84
|
update mongostring
|
oschumac/python-uart-pi-xbridge,oschumac/python-uart-pi-xbridge,oschumac/python-uart-pi-xbridge
|
mongo.py
|
mongo.py
|
import os
import sys
import pymongo
from bson import BSON
from bson import json_util
MONGODB_URI_LOCAL = 'mongodb://aps:[email protected]:27017/aps'
def getlast3():
try:
client = pymongo.MongoClient(MONGODB_URI_LOCAL)
except:
print('Error: Unable to Connect')
connection = None
db = client['aps']
cursor = db.entries.find({'type':'cal'}).sort('date', -1).limit(3)
for doc in cursor:
print (doc)
client.close()
if __name__ == '__main__':
getlast3()
|
import os
import sys
import pymongo
from bson import BSON
from bson import json_util
MONGODB_URI_REMOTE = 'mongodb://Lars_2009:[email protected]:21060/larscgmtest'
MONGODB_URI_LOCAL = 'mongodb://aps:[email protected]:27017/aps'
def getlast3():
try:
client = pymongo.MongoClient(MONGODB_URI_LOCAL)
except:
print('Error: Unable to Connect')
connection = None
db = client['aps']
cursor = db.entries.find({'type':'cal'}).sort('date', -1).limit(3)
for doc in cursor:
print (doc)
client.close()
if __name__ == '__main__':
getlast3()
|
mit
|
Python
|
de3b4775b7dbcecc9c42e18c59b35485f83ca74a
|
Update max-chunks-to-make-sorted-i.py
|
kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015
|
Python/max-chunks-to-make-sorted-i.py
|
Python/max-chunks-to-make-sorted-i.py
|
# Time: O(n)
# Space: O(1)
# Given an array arr that is a permutation of [0, 1, ..., arr.length - 1],
# we split the array into some number of "chunks" (partitions), and individually sort each chunk.
# After concatenating them, the result equals the sorted array.
#
# What is the most number of chunks we could have made?
#
# Example 1:
#
# Input: arr = [4,3,2,1,0]
# Output: 1
# Explanation:
# Splitting into two or more chunks will not return the required result.
# For example, splitting into [4, 3], [2, 1, 0] will result in [3, 4, 0, 1, 2], which isn't sorted.
#
# Example 2:
#
# Input: arr = [1,0,2,3,4]
# Output: 4
# Explanation:
# We can split into two chunks, such as [1, 0], [2, 3, 4].
# However, splitting into [1, 0], [2], [3], [4] is the highest number of chunks possible.
#
# Note:
# - arr will have length in range [1, 10].
# - arr[i] will be a permutation of [0, 1, ..., arr.length - 1].
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
result, max_i = 0, 0
for i, v in enumerate(arr):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
|
# Time: O(n)
# Space: O(1)
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
result, max_i = 0, 0
for i, v in enumerate(arr):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
|
mit
|
Python
|
32fccb04bac6be7e79f6b05b727e5e847fef498c
|
Update misc.py
|
CounterClops/Counter-Cogs
|
misc/misc.py
|
misc/misc.py
|
import discord
from discord.ext import commands
import random
import time
class misc:
"""My custom cog that does stuff"""
def __init__(self, bot):
self.bot = bot
self.bank = Bank(bot, "data/economy/bank.json")
def role_colour():
#Rand between 0 - 256
a = random.randrange(0,256)
b = random.randrange(0,256)
c = random.randrange(0,256)
if a != 0 or b != 0 or c != 0:
choice = random.randrange(1,4)
if choice === 1:
a = 0
if choice === 2:
b = 0
if choice === 3:
c = 0
return a, b, c
def change_colour(r, g, b):
picked_role = bot.role("400618311861272577")
bot.edit_role(role=picked_role, colour=bot.colour(r, g, b))
def colour_loop():
while true:
change_colour(role_colour())
time.sleep(5)
colour_loop()
def setup(bot):
bot.add_cog(Counter(bot))
|
mit
|
Python
|
|
4b9948e665c78df468917b0906afc288244fa303
|
add doc back in.
|
twaugh/osbs-client,jpopelka/osbs-client,jpopelka/osbs-client,pombredanne/osbs-client,TomasTomecek/osbs,bfontecc007/osbs-client,vrutkovs/osbs-client,twaugh/osbs-client,TomasTomecek/osbs,vrutkovs/osbs-client,pbabinca/osbs,pombredanne/osbs-client,projectatomic/osbs-client,bfontecc007/osbs-client,pbabinca/osbs,projectatomic/osbs-client,DBuildService/osbs-client,DBuildService/osbs-client
|
osbs/exceptions.py
|
osbs/exceptions.py
|
"""
Exceptions raised by OSBS
"""
class OsbsException(Exception):
pass
class OsbsResponseException(OsbsException):
""" OpenShift didn't respond with OK (200) status """
def __init__ (self, message, status_code, *args, **kwargs):
super (OsbsResponseException, self).__init__ (message, *args, **kwargs)
self.status_code = status_code
class OsbsNetworkException(OsbsException):
def __init__ (self, url, message, status_code, *args, **kwargs):
super (OsbsNetworkException, self).__init__ (message, *args, **kwargs)
self.url = url
self.status_code = status_code
|
"""
Exceptions raised by OSBS
"""
class OsbsException(Exception):
pass
class OsbsResponseException(OsbsException):
def __init__ (self, message, status_code, *args, **kwargs):
super (OsbsResponseException, self).__init__ (message, *args, **kwargs)
self.status_code = status_code
class OsbsNetworkException(OsbsException):
def __init__ (self, url, message, status_code, *args, **kwargs):
super (OsbsNetworkException, self).__init__ (message, *args, **kwargs)
self.url = url
self.status_code = status_code
|
bsd-3-clause
|
Python
|
bbf8886a2cbf4fa371f0a67157fdd3df3dfa47dd
|
Fix broken MLflow DB README link in CLI docs (#2377)
|
mlflow/mlflow,mlflow/mlflow,mlflow/mlflow,mlflow/mlflow,mlflow/mlflow,mlflow/mlflow,mlflow/mlflow
|
mlflow/db.py
|
mlflow/db.py
|
import click
import mlflow.store.db.utils
@click.group("db")
def commands():
"""
Commands for managing an MLflow tracking database.
"""
pass
@commands.command()
@click.argument("url")
def upgrade(url):
"""
Upgrade the schema of an MLflow tracking database to the latest supported version.
**IMPORTANT**: Schema migrations can be slow and are not guaranteed to be transactional -
**always take a backup of your database before running migrations**. The migrations README,
which is located at
https://github.com/mlflow/mlflow/blob/master/mlflow/store/db_migrations/README.md, describes
large migrations and includes information about how to estimate their performance and
recover from failures.
"""
if mlflow.store.db.utils._is_initialized_before_mlflow_1(url):
mlflow.store.db.utils._upgrade_db_initialized_before_mlflow_1(url)
mlflow.store.db.utils._upgrade_db(url)
|
import click
import mlflow.store.db.utils
@click.group("db")
def commands():
"""
Commands for managing an MLflow tracking database.
"""
pass
@commands.command()
@click.argument("url")
def upgrade(url):
"""
Upgrade the schema of an MLflow tracking database to the latest supported version.
**IMPORTANT**: Schema migrations can be slow and are not guaranteed to be transactional -
**always take a backup of your database before running migrations**. The migrations README,
which is located at
https://github.com/mlflow/mlflow/blob/master/mlflow/store/db_migrations/README, describes
large migrations and includes information about how to estimate their performance and
recover from failures.
"""
if mlflow.store.db.utils._is_initialized_before_mlflow_1(url):
mlflow.store.db.utils._upgrade_db_initialized_before_mlflow_1(url)
mlflow.store.db.utils._upgrade_db(url)
|
apache-2.0
|
Python
|
d5f979236089e7cb3de90b03303e1c3af967331c
|
add UW-Madison, minor formatting
|
materialsproject/MPContribs,materialsproject/MPContribs,materialsproject/MPContribs,materialsproject/MPContribs
|
uw_si2/rest/rester.py
|
uw_si2/rest/rester.py
|
from __future__ import division, unicode_literals
import six, bson, os
from bson.json_util import dumps, loads
from mpcontribs.rest.rester import MPContribsRester
from mpcontribs.io.core.utils import get_short_object_id
from mpcontribs.io.archieml.mpfile import MPFile
from pandas import Series
class UWSI2Rester(MPContribsRester):
"""UW/SI2-specific convenience functions to interact with MPContribs REST interface"""
z = loads(open(os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'z.json'
), 'r').read())
def get_uwsi2_contributions(self):
"""
- [<host(pretty-formula)>] <mp_cat_id-linked-to-materials-details-page> <cid-linked-to-contribution-details-page>
|- <solute> <D0-value> <Q-value> <toggle-in-graph>
|- ...
- ...
"""
labels = ["Solute element name", "Solute D0 [cm^2/s]", "Solute Q [eV]"]
data = []
for doc in self.query_contributions(
criteria={'project': {'$in': ['LBNL', 'UW-Madison']}},
projection={'_id': 1, 'mp_cat_id': 1, 'content': 1}
):
mpfile = MPFile.from_contribution(doc)
mp_id = mpfile.ids[0]
table = mpfile.tdata[mp_id]['data_supporting'][labels]
table.columns = ['El.', 'D0 [cm2/s]', 'Q [eV]']
anums = [self.z[el] for el in table['El.']]
table.insert(0, 'Z', Series(anums, index=table.index))
table.sort_values('Z', inplace=True)
table.reset_index(drop=True, inplace=True)
hdata = mpfile.hdata[mp_id]
data.append({
'mp_id': mp_id, 'cid': doc['_id'],
'short_cid': get_short_object_id(doc['_id']),
'formula': hdata['formula'],
'table': table
})
return data
|
from __future__ import division, unicode_literals
import six, bson, os
from bson.json_util import dumps, loads
from mpcontribs.rest.rester import MPContribsRester
from mpcontribs.io.core.utils import get_short_object_id
from mpcontribs.io.archieml.mpfile import MPFile
from pandas import Series
class UWSI2Rester(MPContribsRester):
"""UW/SI2-specific convenience functions to interact with MPContribs REST interface"""
z = loads(open(os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'z.json'
), 'r').read())
def get_uwsi2_contributions(self):
"""
- [<host(pretty-formula)>] <mp_cat_id-linked-to-materials-details-page> <cid-linked-to-contribution-details-page>
|- <solute> <D0-value> <Q-value> <toggle-in-graph>
|- ...
- ...
"""
labels = ["Solute element name", "Solute D0 [cm^2/s]", "Solute Q [eV]"]
data = []
for doc in self.query_contributions(
criteria={'project': 'LBNL'},
projection={'_id': 1, 'mp_cat_id': 1, 'content': 1}
):
mpfile = MPFile.from_contribution(doc)
mp_id = mpfile.ids[0]
table = mpfile.tdata[mp_id]['data_supporting'][labels]
table.columns = ['El.', 'D0 [cm^2/s]', 'Q [eV]']
anums = [self.z[el] for el in table['El.']]
table.insert(0, 'Z', Series(anums, index=table.index))
table.sort_values('Z', inplace=True)
table.reset_index(drop=True, inplace=True)
hdata = mpfile.hdata[mp_id]
data.append({
'mp_id': mp_id, 'cid': doc['_id'],
'short_cid': get_short_object_id(doc['_id']),
'formula': hdata['formula'],
'table': table
})
return data
|
mit
|
Python
|
af54f9666b15cd68e5404b60f495f6d51c1470b1
|
Fix upload_manual_flac command to add its arguments
|
karamanolev/WhatManager2,karamanolev/WhatManager2,karamanolev/WhatManager2,karamanolev/WhatManager2
|
WhatManager2/management/commands/upload_manual_flac.py
|
WhatManager2/management/commands/upload_manual_flac.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
import requests
import time
from django.core.management.base import BaseCommand
from WhatManager2.utils import wm_unicode
from home.models import get_what_client
from what_transcode.tasks import TranscodeSingleJob
def _add_to_wm_transcode(what_id):
print 'Adding {0} to wm'.format(what_id)
post_data = {
'what_id': what_id,
}
response = requests.post('https://karamanolev.com/wm/transcode/request', data=post_data,
auth=('', ''))
response_json = response.json()
if response_json['message'] != 'Request added.':
raise Exception('Cannot add {0} to wm: {1}'.format(what_id, response_json['message']))
def add_to_wm_transcode(what_id):
for i in range(2):
try:
_add_to_wm_transcode(what_id)
return
except Exception:
print 'Error adding to wm, trying again in 2 sec...'
time.sleep(3)
_add_to_wm_transcode(what_id)
def report_progress(msg):
print msg
class Command(BaseCommand):
help = 'Help you create a torrent and add it to WM'
def add_arguments(self, parser):
parser.add_argument('source_dir', required=True, help='Source directory for the torrent.')
def handle(self, *args, **options):
source_dir = wm_unicode(options['source_dir'])
if not source_dir:
print u'Pass only the source directory.'
return 1
if source_dir.endswith('/'):
source_dir = source_dir[:-1]
what = get_what_client(lambda: None)
job = TranscodeSingleJob(what, None, report_progress, None, None, source_dir)
job.create_torrent()
raw_input('Please upload the torrent and press enter...')
job.move_torrent_to_dest()
add_to_wm_transcode(job.new_torrent['torrent']['id'])
|
#!/usr/bin/env python
from __future__ import unicode_literals
import time
from django.core.management.base import BaseCommand
import requests
from WhatManager2.utils import wm_unicode
from home.models import get_what_client
from what_transcode.tasks import TranscodeSingleJob
def _add_to_wm_transcode(what_id):
print 'Adding {0} to wm'.format(what_id)
post_data = {
'what_id': what_id,
}
response = requests.post('https://karamanolev.com/wm/transcode/request', data=post_data,
auth=('', ''))
response_json = response.json()
if response_json['message'] != 'Request added.':
raise Exception('Cannot add {0} to wm: {1}'.format(what_id, response_json['message']))
def add_to_wm_transcode(what_id):
for i in range(2):
try:
_add_to_wm_transcode(what_id)
return
except Exception:
print 'Error adding to wm, trying again in 2 sec...'
time.sleep(3)
_add_to_wm_transcode(what_id)
def report_progress(msg):
print msg
class Command(BaseCommand):
help = 'Help you create a torrent and add it to WM'
def handle(self, *args, **options):
if len(args) != 1:
print u'Pass only the source directory.'
return 1
source_dir = wm_unicode(args[0])
if source_dir.endswith('/'):
source_dir = source_dir[:-1]
what = get_what_client(lambda: None)
job = TranscodeSingleJob(what, None, report_progress, None, None, source_dir)
job.create_torrent()
raw_input('Please upload the torrent and press enter...')
job.move_torrent_to_dest()
add_to_wm_transcode(job.new_torrent['torrent']['id'])
|
mit
|
Python
|
fdeb06bdf33a55413f1f8f8cd780c84438ad2277
|
add missing import
|
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
|
src/zeit/content/cp/browser/blocks/av.py
|
src/zeit/content/cp/browser/blocks/av.py
|
# Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
from zeit.content.cp.i18n import MessageFactory as _
import zeit.content.cp.interfaces
import zope.app.pagetemplate
import zope.formlib.form
class EditProperties(zope.formlib.form.SubPageEditForm):
template = zope.app.pagetemplate.ViewPageTemplateFile(
'av.edit-properties.pt')
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IAVBlock).omit('media_type')
close = False
@property
def form(self):
return super(EditProperties, self).template
@zope.formlib.form.action(_('Apply'))
def handle_edit_action(self, action, data):
self.close = True
# XXX: dear zope.formlib, are you serious?!
return super(EditProperties, self).handle_edit_action.success(data)
|
# Copyright (c) 2009 gocept gmbh & co. kg
# See also LICENSE.txt
import zeit.content.cp.interfaces
import zope.app.pagetemplate
import zope.formlib.form
class EditProperties(zope.formlib.form.SubPageEditForm):
template = zope.app.pagetemplate.ViewPageTemplateFile(
'av.edit-properties.pt')
form_fields = zope.formlib.form.Fields(
zeit.content.cp.interfaces.IAVBlock).omit('media_type')
close = False
@property
def form(self):
return super(EditProperties, self).template
@zope.formlib.form.action(_('Apply'))
def handle_edit_action(self, action, data):
self.close = True
# XXX: dear zope.formlib, are you serious?!
return super(EditProperties, self).handle_edit_action.success(data)
|
bsd-3-clause
|
Python
|
db1f0556f72eb84e4273ff8925494de81bf21898
|
rename paths / meta not needed
|
nathbo/GO_DILab,nathbo/GO_DILab
|
src/learn/dev_ben/generate_training_data.py
|
src/learn/dev_ben/generate_training_data.py
|
import os
import sgf
from time import strftime
from os.path import dirname, abspath
from src.play.model.Board import Board
size = 9
EMPTY_val = 0 # 0.45
BLACK_val = 1 # -1.35
WHITE_val = -1 # 1.05
data_dir = os.path.join(dirname(dirname(dirname(dirname(abspath(__file__))))), 'data')
sgf_files = [
os.path.join(data_dir, 'game_57083.sgf'),
os.path.join(data_dir, 'game_100672.sgf'),
]
training_data_dir = os.path.join(data_dir, 'training_data')
if not os.path.exists(training_data_dir): # create the folder if it does not exist yet
os.makedirs(training_data_dir)
training_data_file = open(
os.path.join(training_data_dir, str(len(sgf_files)) + '_games_' + strftime('%d-%m-%Y_%H-%M-%S') + '.csv'), 'w')
for path in sgf_files:
sgf_file = open(path, 'r')
training_data_file.write(os.path.basename(path) + '\n')
collection = sgf.parse(sgf_file.read())
game_tree = collection.children[0]
moves = game_tree.nodes[1:]
# meta = game_tree.nodes[0].properties
# see SGF properties here: www.red-bean.com/sgf/properties.html
board = Board([[EMPTY_val] * size] * size)
training_data_file.write(board.matrix2csv() + '\n')
for move in moves:
keys = move.properties.keys()
if 'B' not in keys and 'W' not in keys: # don't know how to deal with special stuff yet
continue
# can't rely on the order in keys(), apparently must extract it like this
player_color = 'B' if 'B' in move.properties.keys() else 'W'
sgf_move = move.properties[player_color][0]
if len(sgf_move) is 2: # otherwise its a pass
loc = ord(sgf_move[1]) - ord('a'), ord(sgf_move[0]) - ord('a')
player_val = BLACK_val if player_color == 'B' else WHITE_val
opponent_val = WHITE_val if player_color == 'B' else BLACK_val
board.place_stone_and_capture_if_applicable(loc, player_val, opponent_val, EMPTY_val)
training_data_file.write(board.matrix2csv() + '\n')
training_data_file.close()
|
import os
import sgf
from time import strftime
from os.path import dirname, abspath
from src.play.model.Board import Board
size = 9
EMPTY_val = 0 # 0.45
BLACK_val = 1 # -1.35
WHITE_val = -1 # 1.05
data_dir = os.path.join(dirname(dirname(dirname(dirname(abspath(__file__))))), 'data')
paths = [
os.path.join(data_dir, 'game_57083.sgf'),
os.path.join(data_dir, 'game_100672.sgf'),
]
training_data_dir = os.path.join(data_dir, 'training_data')
if not os.path.exists(training_data_dir): # create the folder if it does not exist yet
os.makedirs(training_data_dir)
training_data_file = open(
os.path.join(training_data_dir, str(len(paths)) + '_games_' + strftime('%d-%m-%Y_%H-%M-%S') + '.csv'), 'w')
for path in paths:
sgf_file = open(path, 'r')
training_data_file.write(os.path.basename(path) + '\n')
collection = sgf.parse(sgf_file.read())
game_tree = collection.children[0]
meta = game_tree.nodes[0].properties
moves = game_tree.nodes[1:]
# see SGF properties here: www.red-bean.com/sgf/properties.html
board = Board([[EMPTY_val] * size] * size)
training_data_file.write(board.matrix2csv() + '\n')
for move in moves:
keys = move.properties.keys()
if 'B' not in keys and 'W' not in keys: # don't know how to deal with special stuff yet
continue
# can't rely on the order in keys(), apparently must extract it like this
player_color = 'B' if 'B' in move.properties.keys() else 'W'
sgf_move = move.properties[player_color][0]
if len(sgf_move) is 2: # otherwise its a pass
loc = ord(sgf_move[1]) - ord('a'), ord(sgf_move[0]) - ord('a')
player_val = BLACK_val if player_color == 'B' else WHITE_val
opponent_val = WHITE_val if player_color == 'B' else BLACK_val
board.place_stone_and_capture_if_applicable(loc, player_val, opponent_val, EMPTY_val)
training_data_file.write(board.matrix2csv() + '\n')
training_data_file.close()
|
mit
|
Python
|
327fcfd4c6b0ad10b25c286f271c577afd741099
|
set width for login details to 50 chars.
|
barry-scott/scm-workbench,barry-scott/git-workbench,barry-scott/scm-workbench,barry-scott/git-workbench,barry-scott/scm-workbench
|
Source/Hg/wb_hg_credential_dialogs.py
|
Source/Hg/wb_hg_credential_dialogs.py
|
'''
====================================================================
Copyright (c) 2016 Barry A Scott. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_hg_credential_dialogs.py
'''
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
import wb_dialog_bases
class WbHgGetLoginDialog(wb_dialog_bases.WbDialog):
def __init__( self, parent, url, realm ):
super().__init__( parent )
self.setWindowTitle( T_('Mercurial Credentials') )
self.username = QtWidgets.QLineEdit( '' )
self.password = QtWidgets.QLineEdit()
self.password.setEchoMode( self.password.Password )
self.username.textChanged.connect( self.nameTextChanged )
self.password.textChanged.connect( self.nameTextChanged )
em = self.fontMetrics().width( 'M' )
self.addRow( T_('URL'), url )
self.addRow( T_('Realm'), realm )
self.addRow( T_('Username'), self.username, min_width=50*em )
self.addRow( T_('Password'), self.password )
self.addButtons()
def completeInit( self ):
# set focus
self.username.setFocus()
def nameTextChanged( self, text ):
self.ok_button.setEnabled( self.getUsername() != '' and self.getPassword() != '' )
def getUsername( self ):
return self.username.text().strip()
def getPassword( self ):
return self.password.text().strip()
|
'''
====================================================================
Copyright (c) 2016 Barry A Scott. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_hg_credential_dialogs.py
'''
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
import wb_dialog_bases
class WbHgGetLoginDialog(wb_dialog_bases.WbDialog):
def __init__( self, parent, url, realm ):
super().__init__( parent )
self.setWindowTitle( T_('Mercurial Credentials') )
self.username = QtWidgets.QLineEdit( '' )
self.password = QtWidgets.QLineEdit()
self.password.setEchoMode( self.password.Password )
self.username.textChanged.connect( self.nameTextChanged )
self.password.textChanged.connect( self.nameTextChanged )
em = self.fontMetrics().width( 'M' )
self.username.setMinimumWidth( 50*em )
self.addRow( T_('URL'), url )
self.addRow( T_('Realm'), realm )
self.addRow( T_('Username'), self.username )
self.addRow( T_('Password'), self.password )
self.addButtons()
def completeInit( self ):
# set focus
self.username.setFocus()
def nameTextChanged( self, text ):
self.ok_button.setEnabled( self.getUsername() != '' and self.getPassword() != '' )
def getUsername( self ):
return self.username.text().strip()
def getPassword( self ):
return self.password.text().strip()
|
apache-2.0
|
Python
|
fe7d5ec956f0277d0689dec57d9e145fcd19f79f
|
Modify svm
|
Evensgn/MNIST-learning
|
mnist_svm.py
|
mnist_svm.py
|
import numpy as np
import matplotlib.pyplot as plt
GRAY_SCALE_RANGE = 255
import pickle
data_filename = 'data_deskewed.pkl'
print('Loading data from file \'' + data_filename + '\' ...')
with open(data_filename, 'rb') as f:
train_labels = pickle.load(f)
train_images = pickle.load(f)
test_labels = pickle.load(f)
test_images = pickle.load(f)
num_pixel = pickle.load(f)
print('Data loading complete.')
train_images = np.array(train_images)
train_images.resize(train_images.size // num_pixel, num_pixel)
test_images = np.array(test_images)
test_images.resize(test_images.size // num_pixel, num_pixel)
test_labels = np.array(test_labels)
train_labels = np.array(train_labels)
## normalization
train_images = train_images / GRAY_SCALE_RANGE
test_images = test_images / GRAY_SCALE_RANGE
from sklearn import svm, metrics
# clf = svm.SVC(gamma = 0.001)
clf = svm.SVC(kernel = 'linear')
clf.fit(train_images[:1000], train_labels[:1000])
prediction = clf.predict(test_images)
print("Classification report for classifier %s:\n%s\n"
% (clf, metrics.classification_report(test_labels, prediction)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(test_labels, prediction))
|
import numpy as np
import matplotlib.pyplot as plt
GRAY_SCALE_RANGE = 255
import pickle
data_filename = 'data_deskewed.pkl'
print('Loading data from file \'' + data_filename + '\' ...')
with open(data_filename, 'rb') as f:
train_labels = pickle.load(f)
train_images = pickle.load(f)
test_labels = pickle.load(f)
test_images = pickle.load(f)
num_pixel = pickle.load(f)
print('Data loading complete.')
train_images = np.array(train_images)
train_images.resize(train_images.size // num_pixel, num_pixel)
test_images = np.array(test_images)
test_images.resize(test_images.size // num_pixel, num_pixel)
test_labels = np.array(test_labels)
train_labels = np.array(train_labels)
## normalization
train_images = train_images / GRAY_SCALE_RANGE
test_images = test_images / GRAY_SCALE_RANGE
from sklearn import svm, metrics
clf = svm.SVC(gamma = 0.001)
clf.fit(train_images, train_labels)
prediction = clf.predict(test_images)
print("Classification report for classifier %s:\n%s\n"
% (clf, metrics.classification_report(test_labels, prediction)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(test_labels, prediction))
|
mit
|
Python
|
ec7411f409f07bd04778c9baf509adb10f446f10
|
allow cross origin requests
|
socialc0de/germany-says-welcome-backend
|
mock/mock.py
|
mock/mock.py
|
import cherrypy
class MockController:
def poi(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("poi.json") as poifile:
return poifile.read()
def faq(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("faq.json") as faqfile:
return faqfile.read()
def phrasebook(self, location):
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
with open("phrasebook.json") as phrasebookfile:
return phrasebookfile.read()
def setup_routes():
d = cherrypy.dispatch.RoutesDispatcher()
d.connect('mock', '/:action/:location', controller=MockController())
dispatcher = d
return dispatcher
conf = {
'/': {
'request.dispatch': setup_routes()
}
}
if __name__ == '__main__':
app = cherrypy.tree.mount(None, config=conf)
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.quickstart(app)
|
import cherrypy
class MockController:
def poi(self, location):
with open("poi.json") as poifile:
return poifile.read()
def faq(self, location):
with open("faq.json") as faqfile:
return faqfile.read()
def phrasebook(self, location):
with open("phrasebook.json") as phrasebookfile:
return phrasebookfile.read()
def setup_routes():
d = cherrypy.dispatch.RoutesDispatcher()
d.connect('mock', '/:action/:location', controller=MockController())
dispatcher = d
return dispatcher
conf = {
'/': {
'request.dispatch': setup_routes()
}
}
if __name__ == '__main__':
app = cherrypy.tree.mount(None, config=conf)
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.quickstart(app)
|
agpl-3.0
|
Python
|
f5cd2c396bcc9ef2775503b4f86aa9bb7d6c8d93
|
Sort operator extra links (#24992)
|
cfei18/incubator-airflow,nathanielvarona/airflow,cfei18/incubator-airflow,nathanielvarona/airflow,cfei18/incubator-airflow,apache/airflow,apache/airflow,nathanielvarona/airflow,apache/airflow,apache/airflow,apache/airflow,cfei18/incubator-airflow,nathanielvarona/airflow,nathanielvarona/airflow,cfei18/incubator-airflow,nathanielvarona/airflow,cfei18/incubator-airflow,apache/airflow
|
airflow/api_connexion/endpoints/extra_link_endpoint.py
|
airflow/api_connexion/endpoints/extra_link_endpoint.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm.session import Session
from airflow import DAG
from airflow.api_connexion import security
from airflow.api_connexion.exceptions import NotFound
from airflow.api_connexion.types import APIResponse
from airflow.exceptions import TaskNotFound
from airflow.models.dagbag import DagBag
from airflow.security import permissions
from airflow.utils.airflow_flask_app import get_airflow_app
from airflow.utils.session import NEW_SESSION, provide_session
@security.requires_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
)
@provide_session
def get_extra_links(
*,
dag_id: str,
dag_run_id: str,
task_id: str,
session: Session = NEW_SESSION,
) -> APIResponse:
"""Get extra links for task instance"""
from airflow.models.taskinstance import TaskInstance
dagbag: DagBag = get_airflow_app().dag_bag
dag: DAG = dagbag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found", detail=f'DAG with ID = "{dag_id}" not found')
try:
task = dag.get_task(task_id)
except TaskNotFound:
raise NotFound("Task not found", detail=f'Task with ID = "{task_id}" not found')
ti = (
session.query(TaskInstance)
.filter(
TaskInstance.dag_id == dag_id,
TaskInstance.run_id == dag_run_id,
TaskInstance.task_id == task_id,
)
.one_or_none()
)
if not ti:
raise NotFound("DAG Run not found", detail=f'DAG Run with ID = "{dag_run_id}" not found')
all_extra_link_pairs = (
(link_name, task.get_extra_links(ti, link_name)) for link_name in task.extra_links
)
all_extra_links = {
link_name: link_url if link_url else None for link_name, link_url in sorted(all_extra_link_pairs)
}
return all_extra_links
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm.session import Session
from airflow import DAG
from airflow.api_connexion import security
from airflow.api_connexion.exceptions import NotFound
from airflow.api_connexion.types import APIResponse
from airflow.exceptions import TaskNotFound
from airflow.models.dagbag import DagBag
from airflow.security import permissions
from airflow.utils.airflow_flask_app import get_airflow_app
from airflow.utils.session import NEW_SESSION, provide_session
@security.requires_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
)
@provide_session
def get_extra_links(
*,
dag_id: str,
dag_run_id: str,
task_id: str,
session: Session = NEW_SESSION,
) -> APIResponse:
"""Get extra links for task instance"""
from airflow.models.taskinstance import TaskInstance
dagbag: DagBag = get_airflow_app().dag_bag
dag: DAG = dagbag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found", detail=f'DAG with ID = "{dag_id}" not found')
try:
task = dag.get_task(task_id)
except TaskNotFound:
raise NotFound("Task not found", detail=f'Task with ID = "{task_id}" not found')
ti = (
session.query(TaskInstance)
.filter(
TaskInstance.dag_id == dag_id,
TaskInstance.run_id == dag_run_id,
TaskInstance.task_id == task_id,
)
.one_or_none()
)
if not ti:
raise NotFound("DAG Run not found", detail=f'DAG Run with ID = "{dag_run_id}" not found')
all_extra_link_pairs = (
(link_name, task.get_extra_links(ti, link_name)) for link_name in task.extra_links
)
all_extra_links = {
link_name: link_url if link_url else None for link_name, link_url in all_extra_link_pairs
}
return all_extra_links
|
apache-2.0
|
Python
|
7f248f252b0a846e39c60d66485f796576b2179e
|
fix doctest
|
erikedin/aoc2016
|
aoc2016/day9.py
|
aoc2016/day9.py
|
import re
def parse(lines):
return ''.join([x.strip() for x in lines])
class Marker(object):
def __init__(self, chars, repeats):
self.chars = chars
self.repeats = repeats
@classmethod
def parse(clazz, text):
"""
>>> m, rest = Marker.parse('(10x2)abc')
>>> m.chars
10
>>> m.repeats
2
>>> rest
'abc'
"""
pattern = r"\((\d+)x(\d+)\)"
m = re.match(pattern, text)
if not m:
return None, text
return Marker(int(m.group(1)), int(m.group(2))), text[len(m.group(0)):]
def take(s, n):
return s[:n], s[n:]
def decompress(compressed):
"""
>>> decompress('ADVENT')
'ADVENT'
>>> decompress('A(1x5)BC')
'ABBBBBC'
>>> decompress('(3x3)XYZ')
'XYZXYZXYZ'
"""
result = []
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result.append(c)
else:
s, compressed = take(compressed, m.chars)
result.append(s * m.repeats)
return ''.join(result)
def decompressed_length2(compressed):
"""
>>> decompressed_length2('ADVENT')
6
>>> decompressed_length2('X(8x2)(3x3)ABCY')
20
"""
result = 0
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result += 1
else:
s, compressed = take(compressed, m.chars)
d = decompressed_length2(s)
result += d * m.repeats
return result
def step1(input):
return len(decompress(input))
def step2(input):
return decompressed_length2(input)
|
import re
def parse(lines):
return ''.join([x.strip() for x in lines])
class Marker(object):
def __init__(self, chars, repeats):
self.chars = chars
self.repeats = repeats
@classmethod
def parse(clazz, text):
"""
>>> m, rest = Marker.parse('(10x2)abc')
>>> m.chars
10
>>> m.repeats
2
>>> rest
'abc'
"""
pattern = r"\((\d+)x(\d+)\)"
m = re.match(pattern, text)
if not m:
return None, text
return Marker(int(m.group(1)), int(m.group(2))), text[len(m.group(0)):]
def take(s, n):
return s[:n], s[n:]
def decompress(compressed):
"""
>>> decompress('ADVENT')
'ADVENT'
>>> decompress('A(1x5)BC')
'ABBBBBC'
>>> decompress('(3x3)XYZ')
'XYZXYZXYZ'
"""
result = []
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result.append(c)
else:
s, compressed = take(compressed, m.chars)
result.append(s * m.repeats)
return ''.join(result)
def decompressed_length2(compressed):
"""
>>> decompress2('ADVENT')
'ADVENT'
>>> decompress2('X(8x2)(3x3)ABCY')
'XABCABCABCABCABCABCY'
"""
result = 0
while compressed:
m, compressed = Marker.parse(compressed)
if m is None:
c, compressed = take(compressed, 1)
result += 1
else:
s, compressed = take(compressed, m.chars)
d = decompressed_length2(s)
result += d * m.repeats
return result
def step1(input):
return len(decompress(input))
def step2(input):
return decompressed_length2(input)
|
mit
|
Python
|
eaae2a1e88572e224621e242be1d15e92065f15e
|
Use new extension setup() API
|
ZenithDK/mopidy-primare,mopidy/mopidy-nad
|
mopidy_nad/__init__.py
|
mopidy_nad/__init__.py
|
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def setup(self, registry):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def register_gstreamer_elements(self):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
apache-2.0
|
Python
|
daf577f1e4bab13f9d5f2e3fdad8765dbab70dfe
|
refactor settings
|
openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms
|
openstax/settings/dev.py
|
openstax/settings/dev.py
|
from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# BASE_URL required for notification emails
BASE_URL = 'http://localhost:8000'
try:
from .local import *
except ImportError:
pass
##################################
# OVERRIDE ACCOUNTS SETTINGS #
##################################
# use default loging and logout urls,
# Needed for selenium test.
ACC_APP_LOGIN_URL = None
ACC_APP_LOGOUT_URL = None
ACC_APP_PROFILE_URL = None
|
from .base import *
DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# BASE_URL required for notification emails
BASE_URL = 'http://localhost:8000'
try:
from .local import *
except ImportError:
pass
##################################
# ACCOUNTS SETTINGS #
##################################
# Use default login, logout and profile urls
ACC_APP_LOGIN_URL = None
ACC_APP_LOGOUT_URL = None
ACC_APP_PROFILE_URL = None
ACCOUNTS_LOGIN_URL = 'https://accounts-qa.openstax.org/login?'
AUTHORIZATION_URL = 'https://accounts-qa.openstax.org/oauth/authorize'
ACCESS_TOKEN_URL = 'https://accounts-qa.openstax.org/oauth/token'
USER_QUERY = 'https://accounts-qa.openstax.org/api/user?'
SOCIAL_AUTH_OPENSTAX_KEY = '0a3c6b8c21091873805181b4b2a42cdbabeec6f6871332b817f59fac37033537'
SOCIAL_AUTH_OPENSTAX_SECRET = '40035a7f2a7948b33ffce370af3918d692b958a6cc195e8b57b1fbe621a88157'
|
agpl-3.0
|
Python
|
945c93fa91cb7b3b14f002e37e2a8bd2ee915fdd
|
Clean the mako cache between runs, because it breaks theme switching
|
TyberiusPrime/nikola,yamila-moreno/nikola,techdragon/nikola,getnikola/nikola,xuhdev/nikola,s2hc-johan/nikola,masayuko/nikola,okin/nikola,atiro/nikola,atiro/nikola,Proteus-tech/nikola,yamila-moreno/nikola,immanetize/nikola,berezovskyi/nikola,Proteus-tech/nikola,gwax/nikola,schettino72/nikola,x1101/nikola,pluser/nikola,lucacerone/nikola,yamila-moreno/nikola,knowsuchagency/nikola,xuhdev/nikola,pluser/nikola,schettino72/nikola,okin/nikola,immanetize/nikola,berezovskyi/nikola,andredias/nikola,knowsuchagency/nikola,kotnik/nikola,masayuko/nikola,xuhdev/nikola,xuhdev/nikola,wcmckee/nikola,masayuko/nikola,kotnik/nikola,damianavila/nikola,atiro/nikola,okin/nikola,servalproject/nikola,getnikola/nikola,Proteus-tech/nikola,jjconti/nikola,immanetize/nikola,lucacerone/nikola,pluser/nikola,techdragon/nikola,x1101/nikola,andredias/nikola,lucacerone/nikola,TyberiusPrime/nikola,s2hc-johan/nikola,jjconti/nikola,servalproject/nikola,TyberiusPrime/nikola,s2hc-johan/nikola,wcmckee/nikola,andredias/nikola,wcmckee/nikola,damianavila/nikola,JohnTroony/nikola,x1101/nikola,getnikola/nikola,damianavila/nikola,servalproject/nikola,jjconti/nikola,Proteus-tech/nikola,gwax/nikola,knowsuchagency/nikola,techdragon/nikola,okin/nikola,berezovskyi/nikola,getnikola/nikola,kotnik/nikola,JohnTroony/nikola,gwax/nikola,JohnTroony/nikola,schettino72/nikola
|
nikola/mako_templates.py
|
nikola/mako_templates.py
|
########################################
# Mako template handlers
########################################
import os
import shutil
from mako import util, lexer
from mako.lookup import TemplateLookup
lookup = None
cache = {}
def get_deps(filename):
text = util.read_file(filename)
lex = lexer.Lexer(text=text, filename=filename)
lex.parse()
deps = []
for n in lex.template.nodes:
if getattr(n, 'keyword', None) == "inherit":
deps.append(n.attributes['file'])
# TODO: include tags are not handled
return deps
def get_template_lookup(directories):
cache_dir = os.path.join('cache', '.mako.tmp')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
return TemplateLookup(
directories=directories,
module_directory=cache_dir,
output_encoding='utf-8',
)
def render_template(template_name, output_name, context, global_context):
template = lookup.get_template(template_name)
context.update(global_context)
data = template.render_unicode(**context)
if output_name is not None:
try:
os.makedirs(os.path.dirname(output_name))
except:
pass
with open(output_name, 'w+') as output:
output.write(data)
return data
def template_deps(template_name):
# We can cache here because depedencies should
# not change between runs
if cache.get(template_name, None) is None:
template = lookup.get_template(template_name)
dep_filenames = get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
deps += template_deps(fname)
cache[template_name] = tuple(deps)
return list(cache[template_name])
|
########################################
# Mako template handlers
########################################
import os
import shutil
from mako import util, lexer
from mako.lookup import TemplateLookup
lookup = None
cache = {}
def get_deps(filename):
text = util.read_file(filename)
lex = lexer.Lexer(text=text, filename=filename)
lex.parse()
deps = []
for n in lex.template.nodes:
if getattr(n, 'keyword', None) == "inherit":
deps.append(n.attributes['file'])
# TODO: include tags are not handled
return deps
def get_template_lookup(directories):
print "Directories:", directories
cache_dir = os.path.join('cache', '.mako.tmp')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
return TemplateLookup(
directories=directories,
module_directory=cache_dir,
output_encoding='utf-8',
)
def render_template(template_name, output_name, context, global_context):
template = lookup.get_template(template_name)
print template.filename
context.update(global_context)
data = template.render_unicode(**context)
if output_name is not None:
try:
os.makedirs(os.path.dirname(output_name))
except:
pass
with open(output_name, 'w+') as output:
output.write(data)
return data
def template_deps(template_name):
# We can cache here because depedencies should
# not change between runs
if cache.get(template_name, None) is None:
template = lookup.get_template(template_name)
dep_filenames = get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
deps += template_deps(fname)
cache[template_name] = tuple(deps)
return list(cache[template_name])
|
mit
|
Python
|
4912027d6cb0f27c736e46498231595f50a36cd3
|
add cv element
|
oesteban/mriqc,oesteban/mriqc,poldracklab/mriqc,poldracklab/mriqc,oesteban/mriqc,poldracklab/mriqc,poldracklab/mriqc,oesteban/mriqc
|
mriqc/classifier/cv.py
|
mriqc/classifier/cv.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: oesteban
# @Date: 2015-11-19 16:44:27
# @Last Modified by: oesteban
# @Last Modified time: 2016-05-12 17:46:31
"""
MRIQC Cross-validation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import os.path as op
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
import pandas as pd
from sklearn import svm
from sklearn.cross_validation import LeaveOneLabelOut
def main():
"""Entry point"""
parser = ArgumentParser(description='MRI Quality Control',
formatter_class=RawTextHelpFormatter)
g_input = parser.add_argument_group('Inputs')
g_input.add_argument('-X', '--in-training', action='store',
required=True)
g_input.add_argument('-y', '--in-training-labels', action='store',
required=True)
# g_outputs = parser.add_argument_group('Outputs')
opts = parser.parse_args()
with open(opts.in_training, 'r') as fileX:
X_df = pd.read_csv(fileX).sort_values(by=['subject_id'])
with open(opts.in_training_labels, 'r') as fileY:
y_df = pd.read_csv(fileY).sort_values(by=['subject_id'])
# Remove columns that are not IQMs
columns = X_df.columns.ravel().to_list()
columns.remove('subject_id')
columns.remove('session_id')
columns.remove('run_id')
# Remove failed cases from Y, append new columns to X
y_df = y_df[y_df['subject_id'].isin(X_df.subject_id)]
sites = list(y_df.site.values)
X_df['rate'] = y_df.rate.values
# Convert all samples to tuples
X = [tuple(x) for x in X_df[columns].values]
lolo = LeaveOneLabelOut(labels)
clf = svm.SVC()
clf.fit(X, list(y_df.rate.values))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: oesteban
# @Date: 2015-11-19 16:44:27
# @Last Modified by: oesteban
# @Last Modified time: 2016-05-12 17:46:31
"""
MRIQC Cross-validation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import os.path as op
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
import pandas as pd
from sklearn import svm
def main():
"""Entry point"""
parser = ArgumentParser(description='MRI Quality Control',
formatter_class=RawTextHelpFormatter)
g_input = parser.add_argument_group('Inputs')
g_input.add_argument('-X', '--in-training', action='store',
required=True)
g_input.add_argument('-y', '--in-training-labels', action='store',
required=True)
# g_outputs = parser.add_argument_group('Outputs')
opts = parser.parse_args()
with open(opts.in_training, 'r') as fileX:
X_df = pd.read_csv(fileX).sort_values(by=['subject_id'])
with open(opts.in_training_labels, 'r') as fileY:
y_df = pd.read_csv(fileY).sort_values(by=['subject_id'])
# Remove columns that are not IQMs
columns = X_df.columns.ravel().to_list()
columns.remove('subject_id')
columns.remove('session_id')
columns.remove('run_id')
# Remove failed cases from Y, append new columns to X
y_df = y_df[y_df['subject_id'].isin(X_df.subject_id)]
X_df['site'] = y_df.site.values
X_df['rate'] = y_df.rate.values
# Convert all samples to tuples
X = [tuple(x) for x in X_df[columns].values]
clf = svm.SVC()
clf.fit(X, list(y_df.rate.values))
if __name__ == '__main__':
main()
|
apache-2.0
|
Python
|
2aeda5c12710e197282f015f7e4b8519f1d8bcc5
|
Update tests.py
|
jtokaz/checkio-mission-compare-functions,jtokaz/checkio-mission-compare-functions,jtokaz/checkio-mission-compare-functions
|
verification/tests.py
|
verification/tests.py
|
"""
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"answer": lambda:0
}
]
}
|
"""
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Basics": [
{
"input": "$5.34",
"two": "$5.34"
}
]
}
|
mit
|
Python
|
0626c8db3f2287d78c467c194e01cf004f0c7e78
|
Convert simple-mapped results back to Series.
|
jorisvandenbossche/pandas,zfrenchee/pandas,datapythonista/pandas,kdebrab/pandas,louispotok/pandas,jmmease/pandas,MJuddBooth/pandas,toobaz/pandas,amolkahat/pandas,Winand/pandas,pandas-dev/pandas,linebp/pandas,harisbal/pandas,nmartensen/pandas,winklerand/pandas,DGrady/pandas,rs2/pandas,Winand/pandas,zfrenchee/pandas,kdebrab/pandas,pratapvardhan/pandas,MJuddBooth/pandas,linebp/pandas,jmmease/pandas,GuessWhoSamFoo/pandas,jreback/pandas,TomAugspurger/pandas,pandas-dev/pandas,jorisvandenbossche/pandas,pandas-dev/pandas,amolkahat/pandas,kdebrab/pandas,toobaz/pandas,DGrady/pandas,louispotok/pandas,GuessWhoSamFoo/pandas,jorisvandenbossche/pandas,jorisvandenbossche/pandas,gfyoung/pandas,zfrenchee/pandas,dsm054/pandas,zfrenchee/pandas,harisbal/pandas,pratapvardhan/pandas,zfrenchee/pandas,jmmease/pandas,Winand/pandas,pratapvardhan/pandas,jreback/pandas,nmartensen/pandas,DGrady/pandas,DGrady/pandas,dsm054/pandas,linebp/pandas,pandas-dev/pandas,nmartensen/pandas,Winand/pandas,cbertinato/pandas,DGrady/pandas,nmartensen/pandas,MJuddBooth/pandas,harisbal/pandas,dsm054/pandas,jreback/pandas,gfyoung/pandas,Winand/pandas,jreback/pandas,gfyoung/pandas,datapythonista/pandas,harisbal/pandas,winklerand/pandas,amolkahat/pandas,gfyoung/pandas,winklerand/pandas,harisbal/pandas,rs2/pandas,TomAugspurger/pandas,louispotok/pandas,cbertinato/pandas,toobaz/pandas,GuessWhoSamFoo/pandas,winklerand/pandas,TomAugspurger/pandas,pratapvardhan/pandas,nmartensen/pandas,cbertinato/pandas,cython-testbed/pandas,datapythonista/pandas,jreback/pandas,linebp/pandas,kdebrab/pandas,GuessWhoSamFoo/pandas,pratapvardhan/pandas,TomAugspurger/pandas,gfyoung/pandas,MJuddBooth/pandas,cbertinato/pandas,cython-testbed/pandas,toobaz/pandas,dsm054/pandas,dsm054/pandas,winklerand/pandas,winklerand/pandas,linebp/pandas,amolkahat/pandas,DGrady/pandas,nmartensen/pandas,jmmease/pandas,amolkahat/pandas,louispotok/pandas,datapythonista/pandas,cython-testbed/pandas,jmmease/pandas,toobaz/pandas,louispotok/pandas,GuessWhoSamFoo/pandas,linebp/pandas,rs2/pandas,cython-testbed/pandas,cython-testbed/pandas,kdebrab/pandas,cbertinato/pandas,rs2/pandas,jmmease/pandas,Winand/pandas,MJuddBooth/pandas
|
pandas/util/map.py
|
pandas/util/map.py
|
import numpy as np
from pandas import _tseries as lib
from pandas import notnull, Series
from functools import wraps
class repeat(object):
def __init__(self, obj):
self.obj = obj
def __getitem__(self, i):
return self.obj
class azip(object):
def __init__(self, *args):
self.cols = []
for a in args:
if np.isscalar(a):
self.cols.append(repeat(a))
else:
self.cols.append(a)
def __getitem__(self, i):
return [col[i] for col in self.cols]
def map_iter_args(arr, f, otherargs, n_otherargs, required, n_results):
'''
Substitute for np.vectorize with pandas-friendly dtype inference
Parameters
----------
arr : ndarray
f : function
Returns
-------
mapped : ndarray
'''
n = len(arr)
result = np.empty((n, n_results), dtype=object)
for i, val in enumerate(arr):
args = otherargs[i]
if notnull(val) and all(notnull(args[r]) for r in required):
result[i] = f(val, *args)
else:
result[i] = [np.nan] * n_results
return [lib.maybe_convert_objects(col, try_float=0) for col in result.T]
def auto_map(arr, f, otherargs, n_results=1, required='all'):
if all(np.isscalar(a) for a in otherargs):
res = lib.map_infer(arr, lambda v: f(v, *otherargs))
return Series(res, index=arr.index, copy=False)
n_otherargs = len(otherargs)
if required == 'all':
required = list(range(n_otherargs))
res = map_iter_args(arr, f, azip(*otherargs), n_otherargs, required, n_results)
res = [Series(col, index=arr.index, copy=False) for col in res]
if n_results == 1:
return res[0]
return res
def mapwrap(f, n_results_default=1, required='all'):
@wraps(f)
def wrapped(arr, *otherargs, n_results=None):
n_results = n_results or n_results_default
return auto_map(arr, f, otherargs, n_results, required)
return wrapped
|
import numpy as np
from pandas import _tseries as lib
from pandas import notnull, Series
from functools import wraps
class repeat(object):
def __init__(self, obj):
self.obj = obj
def __getitem__(self, i):
return self.obj
class azip(object):
def __init__(self, *args):
self.cols = []
for a in args:
if np.isscalar(a):
self.cols.append(repeat(a))
else:
self.cols.append(a)
def __getitem__(self, i):
return [col[i] for col in self.cols]
def map_iter_args(arr, f, otherargs, n_otherargs, required, n_results):
'''
Substitute for np.vectorize with pandas-friendly dtype inference
Parameters
----------
arr : ndarray
f : function
Returns
-------
mapped : ndarray
'''
n = len(arr)
result = np.empty((n, n_results), dtype=object)
for i, val in enumerate(arr):
args = otherargs[i]
if notnull(val) and all(notnull(args[r]) for r in required):
result[i] = f(val, *args)
else:
result[i] = [np.nan] * n_results
return [lib.maybe_convert_objects(col, try_float=0) for col in result.T]
def auto_map(arr, f, otherargs, n_results=1, required='all'):
if all(np.isscalar(a) for a in otherargs):
return lib.map_infer(arr, lambda v: f(v, *otherargs))
n_otherargs = len(otherargs)
if required == 'all':
required = list(range(n_otherargs))
res = map_iter_args(arr, f, azip(*otherargs), n_otherargs, required, n_results)
res = [Series(col, index=arr.index, copy=False) for col in res]
if n_results == 1:
return res[0]
return res
def mapwrap(f, n_results_default=1, required='all'):
@wraps(f)
def wrapped(arr, otherargs=(), n_results=None):
n_results = n_results or n_results_default
return auto_map(arr, f, otherargs, n_results, required)
return wrapped
|
bsd-3-clause
|
Python
|
42f74f304d0ac404f17d6489033b6140816cb194
|
Implement Stonesplinter Trogg, Burly Rockjaw Trogg, Ship's Cannon
|
Ragowit/fireplace,NightKev/fireplace,jleclanche/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,amw2104/fireplace,beheh/fireplace,Meerkov/fireplace,amw2104/fireplace,oftc-ftw/fireplace,butozerca/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,butozerca/fireplace,Ragowit/fireplace,liujimj/fireplace
|
fireplace/cards/gvg/neutral_common.py
|
fireplace/cards/gvg/neutral_common.py
|
from ..utils import *
##
# Minions
# Stonesplinter Trogg
class GVG_067:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_067a")
class GVG_067a:
Atk = 1
# Burly Rockjaw Trogg
class GVG_068:
def CARD_PLAYED(self, player, card):
if player is not self.controller and card.type == CardType.SPELL:
self.buff("GVG_068a")
class GVG_068a:
Atk = 2
# Ship's Cannon
class GVG_075:
def OWN_MINION_SUMMONED(self, minion):
if minion.race == Race.PIRATE:
targets = self.controller.getTargets(TARGET_ENEMY_CHARACTERS)
self.hit(random.choice(targets), 2)
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
|
from ..utils import *
##
# Minions
# Explosive Sheep
class GVG_076:
def deathrattle(self):
for target in self.game.board:
self.hit(target, 2)
# Clockwork Gnome
class GVG_082:
deathrattle = giveSparePart
# Micro Machine
class GVG_103:
def TURN_BEGIN(self, player):
# That card ID is not a mistake
self.buff(self, "GVG_076a")
# Pistons
class GVG_076a:
Atk = 1
|
agpl-3.0
|
Python
|
0bd2fffcab47c79999e5bf20b881a69193855bd9
|
Fix install script
|
SpamapS/dstat-plugins
|
dstat_plugins/__init__.py
|
dstat_plugins/__init__.py
|
import glob
import shutil
import sys
import os
import os.path
import pkg_resources as pr
def install():
destdir = sys.argv[1]
datadir = pr.resource_filename('dstat_plugins', 'plugins')
try:
os.makedirs(destdir)
except OSError:
if not os.path.isdir(destdir):
sys.stderr.write("{} could not be created and does not "
"exist.\n".format(destdir))
sys.exit(1)
for plugin in glob.glob(os.path.join(datadir, 'dstat_*')):
shutil.copy(plugin, destdir)
|
import shutil
import sys
import pkg_resources as pr
def install():
destdir = sys.argv[1]
datadir = pr.resource_filename(__name__, 'plugins/dstat_mysql5_innodb.py')
shutil.copytree(datadir, destdir)
|
apache-2.0
|
Python
|
2cc505d3a3c54f3ce1e91941a905c6a298a46d05
|
Fix classifiers.
|
ralphbean/narcissus,ralphbean/narcissus,ralphbean/narcissus
|
narcissus.hub/setup.py
|
narcissus.hub/setup.py
|
# This file is part of Narcissus
# Copyright (C) 2011-2013 Ralph Bean
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
import sys
f = open('README.rst')
long_description = f.read().strip()
f.close()
setup(
name='narcissus.hub',
version='0.9.0.1',
description='Hub components for Narcissus, realtime log visualization',
long_description=long_description,
license="AGPLv3+",
author='Ralph Bean',
author_email='[email protected]',
url='http://narcissus.ws/',
install_requires=[
"moksha.hub",
"pygeoip",
"geojson",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
namespace_packages=['narcissus'],
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: System :: Logging",
"Topic :: System :: Monitoring",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
entry_points={
'moksha.stream' : (
## Enable this to *test* narcissus. It produces random ips.
#'random_lol = narcissus.hub.producers:RandomIPProducer',
# We used to keep these in an rrd database. That was too heavy.
#'series_pro = narcissus.hub.consumers:TimeSeriesProducer',
),
'moksha.consumer': (
'raw_ip = narcissus.hub.consumers:RawIPConsumer',
'httpdlight = narcissus.hub.consumers:HttpLightConsumer',
'latlon2geo = narcissus.hub.consumers:LatLon2GeoJsonConsumer',
# We used to keep these in an rrd database. That was too heavy.
#'series_con = narcissus.hub.consumers:TimeSeriesConsumer',
),
},
)
|
# This file is part of Narcissus
# Copyright (C) 2011-2013 Ralph Bean
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from setuptools import setup, find_packages
import sys
f = open('README.rst')
long_description = f.read().strip()
f.close()
setup(
name='narcissus.hub',
version='0.9.0.1',
description='Hub components for Narcissus, realtime log visualization',
long_description=long_description,
license="AGPLv3+",
author='Ralph Bean',
author_email='[email protected]',
url='http://narcissus.ws/',
install_requires=[
"moksha.hub",
"pygeoip",
"geojson",
],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
namespace_packages=['narcissus'],
classifiers=[
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Topic :: Scientific/Engineering :: Visualization"
"Topic :: System :: Logging"
"Topic :: System :: Monitoring",
"Intended Audience :: System Administrators",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
entry_points={
'moksha.stream' : (
## Enable this to *test* narcissus. It produces random ips.
#'random_lol = narcissus.hub.producers:RandomIPProducer',
# We used to keep these in an rrd database. That was too heavy.
#'series_pro = narcissus.hub.consumers:TimeSeriesProducer',
),
'moksha.consumer': (
'raw_ip = narcissus.hub.consumers:RawIPConsumer',
'httpdlight = narcissus.hub.consumers:HttpLightConsumer',
'latlon2geo = narcissus.hub.consumers:LatLon2GeoJsonConsumer',
# We used to keep these in an rrd database. That was too heavy.
#'series_con = narcissus.hub.consumers:TimeSeriesConsumer',
),
},
)
|
agpl-3.0
|
Python
|
624599bc0172e9166536abfc6be254b5117ac64c
|
Add error handling in plugin installation process
|
eayunstack/fuel-web,zhaochao/fuel-web,Fiware/ops.Fuel-main-dev,stackforge/fuel-web,huntxu/fuel-web,huntxu/fuel-web,Fiware/ops.Fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,ddepaoli3/fuel-main-dev,teselkin/fuel-main,prmtl/fuel-web,zhaochao/fuel-main,SmartInfrastructures/fuel-web-dev,dancn/fuel-main-dev,teselkin/fuel-main,prmtl/fuel-web,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,prmtl/fuel-web,zhaochao/fuel-web,zhaochao/fuel-main,AnselZhangGit/fuel-main,teselkin/fuel-main,SmartInfrastructures/fuel-main-dev,SmartInfrastructures/fuel-web-dev,Fiware/ops.Fuel-main-dev,eayunstack/fuel-main,zhaochao/fuel-main,zhaochao/fuel-main,teselkin/fuel-main,SergK/fuel-main,nebril/fuel-web,zhaochao/fuel-main,SergK/fuel-main,ddepaoli3/fuel-main-dev,stackforge/fuel-main,huntxu/fuel-main,stackforge/fuel-web,AnselZhangGit/fuel-main,huntxu/fuel-web,AnselZhangGit/fuel-main,Fiware/ops.Fuel-main-dev,eayunstack/fuel-web,koder-ua/nailgun-fcert,dancn/fuel-main-dev,koder-ua/nailgun-fcert,dancn/fuel-main-dev,SmartInfrastructures/fuel-main-dev,koder-ua/nailgun-fcert,nebril/fuel-web,zhaochao/fuel-web,eayunstack/fuel-main,nebril/fuel-web,eayunstack/fuel-web,dancn/fuel-main-dev,eayunstack/fuel-main,SergK/fuel-main,zhaochao/fuel-web,SmartInfrastructures/fuel-main-dev,prmtl/fuel-web,huntxu/fuel-main,stackforge/fuel-main,stackforge/fuel-web,AnselZhangGit/fuel-main,zhaochao/fuel-web,ddepaoli3/fuel-main-dev,stackforge/fuel-main,huntxu/fuel-main,huntxu/fuel-web,prmtl/fuel-web,SmartInfrastructures/fuel-main-dev,eayunstack/fuel-web,nebril/fuel-web,eayunstack/fuel-web,koder-ua/nailgun-fcert,SmartInfrastructures/fuel-web-dev,huntxu/fuel-web
|
nailgun/nailgun/plugin/process.py
|
nailgun/nailgun/plugin/process.py
|
# -*- coding: utf-8 -*-
import traceback
import time
from multiprocessing import Queue, Process
from sqlalchemy import update
from nailgun.api.models import Task
from nailgun.task.helpers import TaskHelper
from nailgun.logger import logger
from nailgun.db import make_session
import nailgun.plugin.manager
PLUGIN_PROCESSING_QUEUE = None
def get_queue():
global PLUGIN_PROCESSING_QUEUE
if not PLUGIN_PROCESSING_QUEUE:
PLUGIN_PROCESSING_QUEUE = Queue()
return PLUGIN_PROCESSING_QUEUE
class PluginProcessor(Process):
def __init__(self):
Process.__init__(self)
self.db = make_session()
self.plugin_manager = nailgun.plugin.manager.PluginManager(self.db)
self.queue = get_queue()
def run(self):
while True:
task_uuid = None
try:
task_uuid = self.queue.get()
self.plugin_manager.process(task_uuid)
except Exception as exc:
if task_uuid:
self.set_error(task_uuid, exc)
logger.error(traceback.format_exc())
time.sleep(2)
def set_error(self, task_uuid, msg):
self.db.query(Task).filter_by(uuid=task_uuid).update({
'status': 'error',
'progress': 100,
'msg': str(msg)})
|
# -*- coding: utf-8 -*-
import traceback
import time
from multiprocessing import Queue, Process
from nailgun.task.helpers import TaskHelper
from nailgun.logger import logger
from nailgun.db import make_session
import nailgun.plugin.manager
PLUGIN_PROCESSING_QUEUE = None
def get_queue():
global PLUGIN_PROCESSING_QUEUE
if not PLUGIN_PROCESSING_QUEUE:
PLUGIN_PROCESSING_QUEUE = Queue()
return PLUGIN_PROCESSING_QUEUE
class PluginProcessor(Process):
def __init__(self):
Process.__init__(self)
self.db = make_session()
self.plugin_manager = nailgun.plugin.manager.PluginManager(self.db)
self.queue = get_queue()
def run(self):
while True:
try:
task_uuid = self.queue.get()
self.plugin_manager.process(task_uuid)
except Exception as exc:
# TaskHelper.set_error(task_uuid, exc)
logger.error(traceback.format_exc())
time.sleep(2)
|
apache-2.0
|
Python
|
00556c84e23dd86eb4ca08ba4c6238425a3eba7e
|
Create Preparation model
|
iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api
|
project_fish/whats_fresh/models.py
|
project_fish/whats_fresh/models.py
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='%Y/%m/%d')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
class Product(models.Model):
"""
The Product model holds the information for a product, including the
origin, season, market price, and availability.
In addition, it holds a foreign key to the image and story related to the
product.
"""
name = models.TextField()
variety = models.TextField()
alt_name = models.TextField()
description = models.TextField()
origin = models.TextField()
season = models.TextField()
available = models.NullBooleanField()
market_price = models.TextField()
link = models.URLField()
image_id = models.ForeignKey('Image')
story_id = models.ForeignKey('Story')
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Story(models.Model):
pass
class Preparation(models.Model):
"""
The Preparation model contains possible preparations of product, to be
associated many-to-many with product (a product can have one or more
preparations, preparations apply to many products). Preparations may be
things like 'frozen', 'dried', 'fresh', 'live', etc, to be defined by
Sea Grant data input.
"""
name = models.TextField()
description = models.TextField()
additional_info = models.TextField()
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='%Y/%m/%d')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
class Product(models.Model):
"""
The Product model holds the information for a product, including the
origin, season, market price, and availability.
In addition, it holds a foreign key to the image and story related to the
product.
"""
name = models.TextField()
variety = models.TextField()
alt_name = models.TextField()
description = models.TextField()
origin = models.TextField()
season = models.TextField()
available = models.NullBooleanField()
market_price = models.TextField()
link = models.URLField()
image_id = models.ForeignKey('Image')
story_id = models.ForeignKey('Story')
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Story(models.Model):
pass
class Preparation(models.Model):
"""
The Preparation model contains possible preparations of product, to be
associated many-to-many with product (a product can have one or more
preparations, preparations apply to many products). Preparations may be
things like 'frozen', 'dried', 'fresh', 'live', etc, to be defined by
Sea Grant data input.
"""
pass
|
apache-2.0
|
Python
|
c0259abdd1b34cd195e3f1ffcb7fb5479d76a0fe
|
bump version to 1.0.0
|
sibson/vncdotool
|
vncdotool/__init__.py
|
vncdotool/__init__.py
|
__version__ = "1.0.0"
|
__version__ = "1.0.0dev"
|
mit
|
Python
|
7d1463fc732cdc6aef3299c6d2bbe916418e6d6e
|
Add full_name field to API
|
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
|
hkisaml/api.py
|
hkisaml/api.py
|
from django.contrib.auth.models import User
from rest_framework import permissions, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
if obj.first_name and obj.last_name:
ret['full_name'] = '%s %s' % (obj.first_name, obj.last_name)
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
from django.contrib.auth.models import User
from rest_framework import permissions, routers, serializers, generics, mixins
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope
class UserSerializer(serializers.ModelSerializer):
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
if hasattr(obj, 'profile'):
ret['department_name'] = obj.profile.department_name
return ret
class Meta:
fields = [
'last_login', 'username', 'email', 'date_joined',
'first_name', 'last_name'
]
model = User
# ViewSets define the view behavior.
class UserView(generics.RetrieveAPIView,
mixins.RetrieveModelMixin):
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return self.queryset
else:
return self.queryset.filter(id=user.id)
def get_object(self):
username = self.kwargs.get('username', None)
if username:
qs = self.get_queryset()
obj = generics.get_object_or_404(qs, username=username)
else:
obj = self.request.user
return obj
permission_classes = [permissions.IsAuthenticated, TokenHasReadWriteScope]
queryset = User.objects.all()
serializer_class = UserSerializer
#router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
|
mit
|
Python
|
7fb0e28ad6ef1190e61fc38bfb19744739b2e096
|
Remove unused deps from admin view
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
scoring_engine/web/views/admin.py
|
scoring_engine/web/views/admin.py
|
from flask import Blueprint, redirect, render_template, url_for
from flask_login import current_user, login_required
from operator import itemgetter
from scoring_engine.models.user import User
from scoring_engine.models.team import Team
mod = Blueprint('admin', __name__)
@mod.route('/admin')
@mod.route('/admin/status')
@login_required
def status():
if current_user.is_white_team:
return render_template('admin/status.html')
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/manage')
@login_required
def manage():
if current_user.is_white_team:
users = User.query.with_entities(User.id, User.username).all()
teams = Team.query.with_entities(Team.id, Team.name).all()
return render_template('admin/manage.html', users=sorted(users, key=itemgetter(0)), teams=teams)
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/stats')
@login_required
def stats():
if current_user.is_white_team:
return render_template('admin/stats.html')
else:
return redirect(url_for('auth.unauthorized'))
|
from flask import Blueprint, flash, redirect, render_template, request, url_for,
from flask_login import current_user, login_required
from operator import itemgetter
from scoring_engine.models.user import User
from scoring_engine.models.team import Team
mod = Blueprint('admin', __name__)
@mod.route('/admin')
@mod.route('/admin/status')
@login_required
def status():
if current_user.is_white_team:
return render_template('admin/status.html')
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/manage')
@login_required
def manage():
if current_user.is_white_team:
users = User.query.with_entities(User.id, User.username).all()
teams = Team.query.with_entities(Team.id, Team.name).all()
return render_template('admin/manage.html', users=sorted(users, key=itemgetter(0)), teams=teams)
else:
return redirect(url_for('auth.unauthorized'))
@mod.route('/admin/stats')
@login_required
def stats():
if current_user.is_white_team:
return render_template('admin/stats.html')
else:
return redirect(url_for('auth.unauthorized'))
|
mit
|
Python
|
a900501804a5a07ed9cea77d5d5348be5e100d67
|
Use Acapela TTS if available
|
chili-epfl/pyrobots,chili-epfl/pyrobots-nao
|
src/robots/actions/speech.py
|
src/robots/actions/speech.py
|
# coding=utf-8
import logging; logger = logging.getLogger("robot." + __name__)
logger.setLevel(logging.DEBUG)
from robots.action import *
@action
def say(robot, msg, callback = None, feedback =None):
""" Says loudly the message.
Several TTS systems are tested:
- first, try the Acapela TTS (through the acapela-ros Genom module)
- then the ROS 'sound_play' node
- eventually, the Genom 'textos' module
:param msg: a text to say.
"""
def execute(robot):
logger.info("Robot says: " + msg)
if robot.hasROS():
import roslib; roslib.load_manifest('sound_play')
import rospy, os, sys
from sound_play.libsoundplay import SoundClient
soundhandle = SoundClient()
soundhandle.say(msg)
return (True, None)
elif robot.hasmodule("textos"):
return robot.execute([
genom_request(
"textos",
"Say",
[msg],
wait_for_completion = False if callback else True,
callback = callback)])
else:
logger.warning("No ROS, no textos module: can not do speech synthesis.")
return (True, None)
if robot.hasROS():
import rosnode
nodes = rosnode.get_node_names()
if "/acapela" in nodes:
import actionlib
from acapela.msg import SayGoal, SayAction
# use Acapela TTS
client = actionlib.SimpleActionClient('/acapela/Say', SayAction)
ok = client.wait_for_server()
if not ok:
print("Could not connect to the Acapela ROS action server! Aborting action")
return
# Creates a goal to send to the action server.
goal = SayGoal()
goal.message = msg
return [ros_request(client,
goal,
wait_for_completion = False if callback else True,
callback = callback,
feedback=feedback
)] # Return a non-blocking action. Useful to be able to cancel it later!
return [python_request(execute)]
|
# coding=utf-8
import logging; logger = logging.getLogger("robot." + __name__)
logger.setLevel(logging.DEBUG)
from robots.action import *
@action
def say(robot, msg):
""" Says loudly the message.
Speech synthesis relies on the ROS wrapper around Festival.
:param msg: a text to say.
"""
def execute(robot):
logger.info("Robot says: " + msg)
if robot.hasROS():
import roslib; roslib.load_manifest('sound_play')
import rospy, os, sys
from sound_play.msg import SoundRequest
from sound_play.libsoundplay import SoundClient
soundhandle = SoundClient()
soundhandle.say(msg)
return (True, None)
elif robot.hasmodule("textos"):
return robot.execute([
genom_request(
"textos",
"Say",
[msg],
wait_for_completion = False if callback else True,
callback = callback)])
else:
logger.warning("No ROS, no textos module: can not do speech synthesis.")
return (True, None)
return [python_request(execute)]
|
isc
|
Python
|
a2982804011e808bd8bf8d9781d9b7bb20328ddc
|
remove import test line
|
baudren/NoteOrganiser,egolus/NoteOrganiser,egolus/NoteOrganiser,baudren/NoteOrganiser
|
noteorganiser/tests/test_utils.py
|
noteorganiser/tests/test_utils.py
|
"""tests for utilities"""
import os
import shutil
import datetime
from PySide import QtGui
from PySide import QtCore
#utils to test
from ..utils import fuzzySearch
from .custom_fixtures import parent
def test_fuzzySearch():
### these should return True
#starts with the searchstring
assert fuzzySearch('g', 'git got gut')
#starts with the (longer) searchstring
assert fuzzySearch('git', 'git got gut')
#searchstring not at the start
assert fuzzySearch('got', 'git got gut')
#multiple substrings (separated by a space) found somewhere in the string
assert fuzzySearch('gi go', 'git got gut')
#empty string
assert fuzzySearch('', 'git got gut')
#strange whitespace
assert fuzzySearch('gi go', 'git got gut')
assert fuzzySearch('gi go', 'git got gut')
### these should return False
#searchstring not found
assert not fuzzySearch('bot', 'git got gut')
#searchstring not found
assert not fuzzySearch('gran', 'this is a great neat thing')
|
"""tests for utilities"""
import os
import shutil
import datetime
from PySide import QtGui
from PySide import QtCore
import test
#utils to test
from ..utils import fuzzySearch
from .custom_fixtures import parent
def test_fuzzySearch():
### these should return True
#starts with the searchstring
assert fuzzySearch('g', 'git got gut')
#starts with the (longer) searchstring
assert fuzzySearch('git', 'git got gut')
#searchstring not at the start
assert fuzzySearch('got', 'git got gut')
#multiple substrings (separated by a space) found somewhere in the string
assert fuzzySearch('gi go', 'git got gut')
#empty string
assert fuzzySearch('', 'git got gut')
#strange whitespace
assert fuzzySearch('gi go', 'git got gut')
assert fuzzySearch('gi go', 'git got gut')
### these should return False
#searchstring not found
assert not fuzzySearch('bot', 'git got gut')
#searchstring not found
assert not fuzzySearch('gran', 'this is a great neat thing')
|
mit
|
Python
|
6e42e355d6ae60f115c9027ff6fcb17814b346c2
|
use mah special charm helpers
|
chuckbutler/docker-charm,chuckbutler/docker-charm
|
hooks/setup.py
|
hooks/setup.py
|
import subprocess
def pre_install():
"""
Do any setup required before the install hook.
"""
install_charmhelpers()
def install_charmhelpers():
"""
Install the charmhelpers library, if not present.
"""
try:
import charmhelpers # noqa
except ImportError:
subprocess.check_call(['apt-get', 'install', '-y', 'python-pip'])
subprocess.check_call(['pip', 'install', '-e', 'git+https://github.com/whitmo/charmhelpers.git#egg=charmhelpers'])
|
import subprocess
def pre_install():
"""
Do any setup required before the install hook.
"""
install_charmhelpers()
def install_charmhelpers():
"""
Install the charmhelpers library, if not present.
"""
try:
import charmhelpers # noqa
except ImportError:
subprocess.check_call(['apt-get', 'install', '-y', 'python-pip'])
subprocess.check_call(['pip', 'install', 'charmhelpers'])
|
bsd-3-clause
|
Python
|
a02624cdbacd666d4e0cdba6230e2ee67837f874
|
add AsText to __all__ list
|
geoalchemy/geoalchemy2
|
geoalchemy2/functions.py
|
geoalchemy2/functions.py
|
from sqlalchemy.sql import functions
from . import types
__all__ = [
'GenericFunction',
'GeometryType',
'AsText',
'Buffer'
]
class GenericFunction(functions.GenericFunction):
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
if expr is not None:
args = (expr,) + args
functions.GenericFunction.__init__(self, *args, **kwargs)
# Functions are classified as in the PostGIS doc.
# <http://www.postgis.org/documentation/manual-svn/reference.html>
#
# Geometry Accessors
#
class GeometryType(GenericFunction):
name = 'ST_GeometryType'
#
# Geometry Outputs
#
class AsText(GenericFunction):
name = 'ST_AsText'
#
# Geometry Processing
#
class Buffer(GenericFunction):
name = 'ST_Buffer'
type = types.Geometry
|
from sqlalchemy.sql import functions
from . import types
__all__ = [
'GenericFunction', 'GeometryType', 'Buffer'
]
class GenericFunction(functions.GenericFunction):
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
if expr is not None:
args = (expr,) + args
functions.GenericFunction.__init__(self, *args, **kwargs)
# Functions are classified as in the PostGIS doc.
# <http://www.postgis.org/documentation/manual-svn/reference.html>
#
# Geometry Accessors
#
class GeometryType(GenericFunction):
name = 'ST_GeometryType'
#
# Geometry Outputs
#
class AsText(GenericFunction):
name = 'ST_AsText'
#
# Geometry Processing
#
class Buffer(GenericFunction):
name = 'ST_Buffer'
type = types.Geometry
|
mit
|
Python
|
9fb1c2781582e52c6618b61d4a8a60c3363ee711
|
bump controller API to v1.1
|
Joshua-Anderson/controller-sdk-go,Joshua-Anderson/controller-sdk-go
|
api/__init__.py
|
api/__init__.py
|
"""
The **api** Django app presents a RESTful web API for interacting with the **deis** system.
"""
__version__ = '1.1.0'
|
"""
The **api** Django app presents a RESTful web API for interacting with the **deis** system.
"""
__version__ = '1.0.0'
|
apache-2.0
|
Python
|
ab3f331246e844812fd91b51908a0d0972a9793f
|
improve run_bin (#885)
|
google/graphicsfuzz,google/graphicsfuzz,google/graphicsfuzz,google/graphicsfuzz,google/graphicsfuzz,google/graphicsfuzz,google/graphicsfuzz
|
gfauto/gfauto/run_bin.py
|
gfauto/gfauto/run_bin.py
|
# -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs a binary from the given binary name and settings file."""
import argparse
import subprocess
import sys
from pathlib import Path
from typing import List
from gfauto import binaries_util, settings_util
from gfauto.gflogging import log
def main() -> int:
parser = argparse.ArgumentParser(
description="Runs a binary given the binary name and settings.json file. "
"Use -- to separate args to run_bin and your binary. "
)
parser.add_argument(
"--settings",
help="Path to the settings JSON file for this instance.",
default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH),
)
parser.add_argument(
"binary_name",
help="The name of the binary to run. E.g. spirv-opt, glslangValidator",
type=str,
)
parser.add_argument(
"arguments",
metavar="arguments",
type=str,
nargs="*",
help="The arguments to pass to the binary",
)
parsed_args = parser.parse_args(sys.argv[1:])
# Args.
settings_path: Path = Path(parsed_args.settings)
binary_name: str = parsed_args.binary_name
arguments: List[str] = parsed_args.arguments
try:
settings = settings_util.read_or_create(settings_path)
except settings_util.NoSettingsFile:
log(f"Settings file {str(settings_path)} was created for you; using this.")
settings = settings_util.read_or_create(settings_path)
binary_manager = binaries_util.get_default_binary_manager(settings=settings)
cmd = [str(binary_manager.get_binary_path_by_name(binary_name).path)]
cmd.extend(arguments)
return subprocess.run(cmd, check=False).returncode
if __name__ == "__main__":
sys.exit(main())
|
# -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs a binary from the given binary name and settings file."""
import argparse
import subprocess
import sys
from pathlib import Path
from typing import List
from gfauto import binaries_util, settings_util
def main() -> int:
parser = argparse.ArgumentParser(
description="Runs a binary given the binary name and settings.json file."
)
parser.add_argument(
"--settings",
help="Path to the settings JSON file for this instance.",
default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH),
)
parser.add_argument(
"binary_name",
help="The name of the binary to run. E.g. spirv-opt, glslangValidator",
type=str,
)
parser.add_argument(
"arguments",
metavar="arguments",
type=str,
nargs="*",
help="The arguments to pass to the binary",
)
parsed_args = parser.parse_args(sys.argv[1:])
# Args.
settings_path: Path = Path(parsed_args.settings)
binary_name: str = parsed_args.binary_name
arguments: List[str] = parsed_args.arguments
settings = settings_util.read_or_create(settings_path)
binary_manager = binaries_util.get_default_binary_manager(settings=settings)
cmd = [str(binary_manager.get_binary_path_by_name(binary_name).path)]
cmd.extend(arguments)
return subprocess.run(cmd, check=False).returncode
if __name__ == "__main__":
sys.exit(main())
|
apache-2.0
|
Python
|
15f22d7c0ac9ddce6cb14cb0cbb35c4d630605d2
|
Remove period so input corresponds to output.
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
api/ud_helper.py
|
api/ud_helper.py
|
import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
# Adding a period improves detection on especially short sentences
period_added = False
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
period_added = True
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
# Remove the period to make sure input corresponds to output
if period_added:
processed = "\n".join(processed.rstrip().split("\n")[:-1]) + "\n\n"
return processed
class ParserException(Exception):
pass
|
import re
from ufal.udpipe import Model, Pipeline, ProcessingError
class Parser:
MODELS = {
"swe": "data/swedish-ud-2.0-170801.udpipe",
}
def __init__(self, language):
model_path = self.MODELS.get(language, None)
if not model_path:
raise ParserException("Cannot find model for language '%s'" % language)
model = Model.load(model_path)
if not model:
raise ParserException("Cannot load model from file '%s'\n" % model_path)
self.model = model
def parse(self, text):
text = text.strip()
last_character = text.strip()[-1]
if re.match(r"\w", last_character, flags=re.UNICODE):
text += "."
pipeline = Pipeline(
self.model,
"tokenize",
Pipeline.DEFAULT,
Pipeline.DEFAULT,
"conllu"
)
error = ProcessingError()
processed = pipeline.process(text, error)
if error.occurred():
raise ParserException(error.message)
return processed
class ParserException(Exception):
pass
|
mit
|
Python
|
dffcfa42fbf4f200a22b739a0cd24f36317b054c
|
Fix so that /api/login/ follow the specified api documentation.
|
tobbez/lys-reader
|
api/userview.py
|
api/userview.py
|
from flask import abort, request, jsonify, make_response, session
from datetime import datetime, timedelta
from api import app
from api.user import *
@require_csrf_token
@app.route('/api/signup/', methods = ['POST'])
def api_user_signup():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
if register_user(request.json['email'], request.json['password']):
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 1
status['message'] = 'Could not register user, maybe user already exists?'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@app.route('/api/login/', methods = ['POST'])
def api_user_login():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
id = check_user_credentials(request.json['email'], request.json['password'])
if id is not None:
session['id'] = id
session['loggedin'] = True
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 4
status['message'] = 'Email and password combination did not match'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@require_authentication
@app.route('/api/logout/', methods = ['POST'])
def api_user_logout():
session.destroy()
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged out successfully'}), 200)
return response
@app.route('/api/')
def api_root():
generate_csrf_token(session)
status = {'code': 0, 'message': 'Sucess'}
response = make_response(jsonify({'csrf_token': session['csrf'], 'status': status}), 200)
return response
|
from flask import abort, request, jsonify, make_response, session
from datetime import datetime, timedelta
from api import app
from api.user import *
@require_csrf_token
@app.route('/api/signup/', methods = ['POST'])
def api_user_signup():
generate_csrf_token(session)
status = {}
httpcode = 200
if 'email' in request.json and 'password' in request.json:
if register_user(request.json['email'], request.json['password']):
status['code'] = 0
status['message'] = 'Success'
else:
status['code'] = 1
status['message'] = 'Could not register user, maybe user already exists?'
else:
status['code'] = 2
status['message'] = 'Missing paramter(s)'
httpcode = 400
return make_response(jsonify({ 'csrf_token': session['csrf'], 'status': status }), httpcode)
@require_csrf_token
@app.route('/api/login/', methods = ['POST'])
def api_user_login():
if 'email' in request.json and 'password' in request.json:
id = check_user_credentials(request.json['email'], request.json['password'])
if id is not None:
session = app.open_session(request)
session['id'] = id
session['loggedin'] = True
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged in successfully'}), 200)
app.save_session(session, response)
else:
response = make_response(jsonify({ 'status': 'FAIL', 'message': 'Email and password combination did not match'}), 200)
return response
return make_response(jsonify({ 'status': 'BAD REQUEST', 'message': 'Missing parameters'}), 400)
@require_csrf_token
@require_authentication
@app.route('/api/logout/', methods = ['POST'])
def api_user_logout():
session.destroy()
response = make_response(jsonify({ 'status': 'OK', 'message': 'User logged out successfully'}), 200)
return response
@app.route('/api/')
def api_root():
generate_csrf_token(session)
status = {'code': 0, 'message': 'Sucess'}
response = make_response(jsonify({'csrf_token': session['csrf'], 'status': status}), 200)
return response
|
isc
|
Python
|
bfb4ba8cb863d80cdd558ebad25f630fef5dc190
|
Stop to use the __future__ module.
|
openstack/oslo.middleware
|
oslo_middleware/debug.py
|
oslo_middleware/debug.py
|
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Debug middleware"""
import sys
import webob.dec
from oslo_middleware import base
class Debug(base.ConfigurableMiddleware):
"""Helper class that returns debug information.
Can be inserted into any WSGI application chain to get information about
the request and response.
"""
@webob.dec.wsgify
def __call__(self, req):
print(("*" * 40) + " REQUEST ENVIRON")
for key, value in req.environ.items():
print(key, "=", value)
print()
resp = req.get_response(self.application)
print(("*" * 40) + " RESPONSE HEADERS")
for (key, value) in resp.headers.items():
print(key, "=", value)
print()
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""Prints the contents of a wrapper string iterator when iterated."""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print()
|
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Debug middleware"""
from __future__ import print_function
import sys
import webob.dec
from oslo_middleware import base
class Debug(base.ConfigurableMiddleware):
"""Helper class that returns debug information.
Can be inserted into any WSGI application chain to get information about
the request and response.
"""
@webob.dec.wsgify
def __call__(self, req):
print(("*" * 40) + " REQUEST ENVIRON")
for key, value in req.environ.items():
print(key, "=", value)
print()
resp = req.get_response(self.application)
print(("*" * 40) + " RESPONSE HEADERS")
for (key, value) in resp.headers.items():
print(key, "=", value)
print()
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""Prints the contents of a wrapper string iterator when iterated."""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print()
|
apache-2.0
|
Python
|
f5de027e14e50ff5085ac1765bdfd2ee7646cabb
|
Adjust extras sublime plugin to follow new changes
|
facelessuser/PyMdown,facelessuser/PyMdown,facelessuser/PyMdown
|
extras/sublime_mdown.py
|
extras/sublime_mdown.py
|
import sublime
import sublime_plugin
from os.path import basename, dirname
import subprocess
def parse_file_name(file_name):
if file_name is None:
title = "Untitled"
basepath = None
else:
title = basename(file_name)
basepath = dirname(file_name)
return title, basepath
class MdownPreviewCommand(sublime_plugin.TextCommand):
def run(self, edit):
title, basepath = parse_file_name(self.view.file_name())
self.convert(title, basepath)
def convert(self, title, basepath):
binary = sublime.load_settings("mdown.sublime-settings").get("binary", None)
if binary is not None:
cmd = [binary, "-T", title, "-s", "-p"]
if basepath is not None:
cmd += ["-b", basepath]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in self.view.lines(sublime.Region(0, self.view.size())):
p.stdin.write((self.view.substr(line) + '\n').encode('utf-8'))
print(p.communicate()[0].decode("utf-8"))
|
import sublime
import sublime_plugin
from os.path import basename, dirname
import subprocess
def parse_file_name(file_name):
if file_name is None:
title = "Untitled"
basepath = None
else:
title = basename(file_name)
basepath = dirname(file_name)
return title, basepath
class MdownPreviewCommand(sublime_plugin.TextCommand):
def run(self, edit):
title, basepath = parse_file_name(self.view.file_name())
self.convert(title, basepath)
def convert(self, title, basepath):
binary = sublime.load_settings("mdown.sublime-settings").get("binary", None)
if binary is not None:
cmd = [binary, "-t", title, "-s", "-p"]
if basepath is not None:
cmd += ["-b", basepath]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in self.view.lines(sublime.Region(0, self.view.size())):
p.stdin.write((self.view.substr(line) + '\n').encode('utf-8'))
print(p.communicate()[0].decode("utf-8"))
|
mit
|
Python
|
2e8a2d2ac8b90a0806bea90c25d9b06ce8cc3a96
|
check roi for each layer
|
carlomt/dicom_tools,carlomt/dicom_tools
|
dicom_tools/myroi2roi.py
|
dicom_tools/myroi2roi.py
|
import numpy as np
from skimage.measure import grid_points_in_poly
from dicom_tools.roiFileHandler import roiFileHandler
def myroi2roi(myrois, shape, verbose=False):
if verbose:
print("myroi2roi: called \n")
outroi = np.full(shape,False,dtype=bool)
if len(myrois) != len(outroi):
print("error: len rois = ",len(rois)," but len dicom=",len(outroi))
for myroi, layer in zip(myrois,outroi):
if not myroi is None:
layer = grid_points_in_poly(layer.shape, myroi['points'])
if verbose:
print("myroi2roi: returning \n")
return outroi
|
import numpy as np
from skimage.measure import grid_points_in_poly
from dicom_tools.roiFileHandler import roiFileHandler
def myroi2roi(myrois, shape, verbose=False):
if verbose:
print("myroi2roi: called \n")
outroi = np.full(shape,False,dtype=bool)
if len(myrois) != len(outroi):
print("error: len rois = ",len(rois)," but len dicom=",len(outroi))
for myroi, layer in zip(myrois,outroi):
layer = grid_points_in_poly(layer.shape, myroi['points'])
if verbose:
print("myroi2roi: returning \n")
return outroi
|
mit
|
Python
|
ba5edd102ddd53f2e95da8b673bf14bdd72dc012
|
Add quotes around user-provided values
|
google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed,google/pigweed
|
pw_cli/py/pw_cli/argument_types.py
|
pw_cli/py/pw_cli/argument_types.py
|
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Defines argument types for use with argparse."""
import argparse
import logging
from pathlib import Path
def directory(arg: str) -> Path:
path = Path(arg)
if path.is_dir():
return path.resolve()
raise argparse.ArgumentTypeError(f'"{path}" is not a directory')
def log_level(arg: str) -> int:
try:
return getattr(logging, arg.upper())
except AttributeError:
raise argparse.ArgumentTypeError(
f'"{arg.upper()}" is not a valid log level')
|
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Defines argument types for use with argparse."""
import argparse
import logging
from pathlib import Path
def directory(arg: str) -> Path:
path = Path(arg)
if path.is_dir():
return path.resolve()
raise argparse.ArgumentTypeError(f'{path} is not a directory')
def log_level(arg: str) -> int:
try:
return getattr(logging, arg.upper())
except AttributeError:
raise argparse.ArgumentTypeError(
f'{arg.upper()} is not a valid log level')
|
apache-2.0
|
Python
|
4283aaf601482ee2512c642101f587ffe3515ef9
|
raise if user doesn't exist in forgotten password form
|
Psycojoker/voltairine,Psycojoker/voltairine,Psycojoker/voltairine
|
authentification/forms.py
|
authentification/forms.py
|
from django import forms
from django.contrib.auth.models import User
class ForgottenPasswordForm(forms.Form):
username = forms.CharField(label="Identifiant")
email = forms.EmailField(label="Votre adresse e-mail")
def clean_username(self):
username = self.cleaned_data['username']
if not User.objects.filter(username=username).exists():
raise forms.ValidationError("Cet utilisateur n'existe pas")
return username
|
from django import forms
class ForgottenPasswordForm(forms.Form):
username = forms.CharField(label="Identifiant")
email = forms.EmailField(label="Votre adresse e-mail")
|
agpl-3.0
|
Python
|
f55c0bd8db7850668582bb7b47da4d0acafabc46
|
Optimize imports
|
gwhigs/digital-manifesto,gwhigs/digital-manifesto,gwhigs/digital-manifesto,gwhigs/digital-manifesto
|
digitalmanifesto/urls.py
|
digitalmanifesto/urls.py
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from . import views
urlpatterns = [
# Admin
url(r'^jet/', include('jet.urls', 'jet')), # Django JET URLS
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.IndexView.as_view(), name='index'),
# Simple template views
url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'),
url(r'^contact/$', TemplateView.as_view(template_name='contact.html'), name='contact'),
url(r'^news/$', TemplateView.as_view(template_name='news.html'), name='news'),
url(r'^projects-we-like/$', TemplateView.as_view(template_name='projects_we_like.html'), name='projects'),
url(r'^resources/$', TemplateView.as_view(template_name='resources.html'), name='resources'),
url(r'^twitterbot/$', TemplateView.as_view(template_name='twitterbot.html'), name='twitterbot'),
url(r'^manifestos/', include('manifestos.urls', namespace='manifestos')),
url(r'^annotations/', include('annotations.urls', namespace='annotations')),
# Let's Encrypt challenge
url(r'^\.well-known/acme-challenge/(?P<key>.*)/', views.acme_challenge),
# allauth
url(r'^accounts/', include('allauth.urls')),
]
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from . import views
urlpatterns = [
# Admin
url(r'^jet/', include('jet.urls', 'jet')), # Django JET URLS
url(r'^admin/', include(admin.site.urls)),
url(r'^$', views.IndexView.as_view(), name='index'),
# Simple template views
url(r'^about/$', TemplateView.as_view(template_name='about.html'), name='about'),
url(r'^contact/$', TemplateView.as_view(template_name='contact.html'), name='contact'),
url(r'^news/$', TemplateView.as_view(template_name='news.html'), name='news'),
url(r'^projects-we-like/$', TemplateView.as_view(template_name='projects_we_like.html'), name='projects'),
url(r'^resources/$', TemplateView.as_view(template_name='resources.html'), name='resources'),
url(r'^twitterbot/$', TemplateView.as_view(template_name='twitterbot.html'), name='twitterbot'),
url(r'^manifestos/', include('manifestos.urls', namespace='manifestos')),
url(r'^annotations/', include('annotations.urls', namespace='annotations')),
# Let's Encrypt challenge
url(r'^\.well-known/acme-challenge/(?P<key>.*)/', views.acme_challenge),
# allauth
url(r'^accounts/', include('allauth.urls')),
]
|
mit
|
Python
|
a5f3ad5700aa766fec99a184bae1d732d0754491
|
Support of HACluster added
|
viswesn/charm-murano
|
src/reactive/murano_handlers.py
|
src/reactive/murano_handlers.py
|
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import charms_openstack.charm as charm
import charms.reactive as reactive
import charmhelpers.core.hookenv as hookenv
# This charm's library contains all of the handler code associated with
# sdn_charm
import charm.openstack.murano as murano # noqa
charm.use_defaults(
'charm.installed',
'amqp.connected',
'shared-db.connected',
'identity-service.connected',
'identity-service.available', # enables SSL support
'config.changed',
'update-status')
COMPLETE_INTERFACE_STATES = [
'shared-db.available',
'identity-service.available',
'amqp.available',
]
@reactive.when(*COMPLETE_INTERFACE_STATES)
def render_config(*args):
"""Render the configuration for charm when all the interfaces are
available.
"""
with charm.provide_charm_instance() as charm_class:
charm_class.render_with_interfaces(args)
charm_class.assess_status()
murano.render_novarc_config(args)
reactive.set_state('config.rendered')
# db_sync checks if sync has been done so rerunning is a noop
@reactive.when('config.rendered')
def init_db():
with charm.provide_charm_instance() as charm_class:
charm_class.db_sync()
@reactive.when_not('io-murano.imported')
@reactive.when(*COMPLETE_INTERFACE_STATES)
@reactive.when('config.rendered')
def import_io_murano(*args):
murano.import_io_murano()
reactive.set_state('io-murano.imported')
@reactive.when('ha.connected')
def cluster_connected(hacluster):
murano.configure_ha_resources(hacluster)
murano.assess_status()
@reactive.hook('upgrade-charm')
def upgrade_charm():
murano.install()
|
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import charms_openstack.charm as charm
import charms.reactive as reactive
import charmhelpers.core.hookenv as hookenv
# This charm's library contains all of the handler code associated with
# sdn_charm
import charm.openstack.murano as murano # noqa
charm.use_defaults(
'charm.installed',
'amqp.connected',
'shared-db.connected',
'identity-service.connected',
'identity-service.available', # enables SSL support
'config.changed',
'update-status')
COMPLETE_INTERFACE_STATES = [
'shared-db.available',
'identity-service.available',
'amqp.available',
]
@reactive.when(*COMPLETE_INTERFACE_STATES)
def render_config(*args):
"""Render the configuration for charm when all the interfaces are
available.
"""
with charm.provide_charm_instance() as charm_class:
charm_class.render_with_interfaces(args)
charm_class.assess_status()
murano.render_novarc_config(args)
reactive.set_state('config.rendered')
# db_sync checks if sync has been done so rerunning is a noop
@reactive.when('config.rendered')
def init_db():
with charm.provide_charm_instance() as charm_class:
charm_class.db_sync()
@reactive.when_not('io-murano.imported')
@reactive.when(*COMPLETE_INTERFACE_STATES)
@reactive.when('config.rendered')
def import_io_murano(*args):
murano.import_io_murano()
reactive.set_state('io-murano.imported')
|
apache-2.0
|
Python
|
f5d948c159a4d398a1347220a4fcd4315c725b04
|
Fix issue handling Image as a paint source
|
mfnch/pyrtist,mfnch/pyrtist,mfnch/pyrtist,mfnch/pyrtist,mfnch/pyrtist,mfnch/pyrtist
|
pyrtist/pyrtist/lib2d/primitive.py
|
pyrtist/pyrtist/lib2d/primitive.py
|
__all__ = ('Primitive',)
from .core_types import Point
from .style import Stroke, Fill, StrokeStyle, Style
from .pattern import Pattern
from .path import Path
from .base import Taker, combination
from .cmd_stream import CmdStream, Cmd
from .window import Window
from .bbox import BBox
class Primitive(Taker):
def __init__(self, *args):
super(Primitive, self).__init__()
self.style = Style()
self.take(*args)
def build_path(self):
return []
@combination(Pattern, Primitive)
@combination(StrokeStyle, Primitive)
@combination(Style, Primitive)
def style_at_primitive(style, primitive):
primitive.style.take(style)
@combination(Primitive, Path)
def primitive_at_path(primitive, path):
path.cmd_stream.take(*primitive.build_path())
@combination(Primitive, CmdStream)
def primitive_at_cmd_stream(primitive, cmd_stream):
cmd_stream.take(Path(primitive), primitive.style)
@combination(Primitive, Window)
def primitive_at_window(primitive, window):
window.take(CmdStream(primitive))
@combination(Primitive, Stroke)
def primitive_at_stroke(primitive, stroke):
stroke.take(Path(primitive))
@combination(Primitive, Fill)
def primitive_at_fill(primitive, fill):
fill.take(Path(primitive))
@combination(Primitive, BBox)
def primitive_at_bbox(primitive, bbox):
bbox.take(Window(primitive))
|
__all__ = ('Primitive',)
from .core_types import Point
from .style import Color, Stroke, Fill, StrokeStyle, Style
from .path import Path
from .base import Taker, combination
from .cmd_stream import CmdStream, Cmd
from .window import Window
from .bbox import BBox
class Primitive(Taker):
def __init__(self, *args):
super(Primitive, self).__init__()
self.style = Style()
self.take(*args)
def build_path(self):
return []
@combination(Color, Primitive)
@combination(StrokeStyle, Primitive)
@combination(Style, Primitive)
def style_at_primitive(style, primitive):
primitive.style.take(style)
@combination(Primitive, Path)
def primitive_at_path(primitive, path):
path.cmd_stream.take(*primitive.build_path())
@combination(Primitive, CmdStream)
def primitive_at_cmd_stream(primitive, cmd_stream):
cmd_stream.take(Path(primitive), primitive.style)
@combination(Primitive, Window)
def primitive_at_window(primitive, window):
window.take(CmdStream(primitive))
@combination(Primitive, Stroke)
def primitive_at_stroke(primitive, stroke):
stroke.take(Path(primitive))
@combination(Primitive, Fill)
def primitive_at_fill(primitive, fill):
fill.take(Path(primitive))
@combination(Primitive, BBox)
def primitive_at_bbox(primitive, bbox):
bbox.take(Window(primitive))
|
lgpl-2.1
|
Python
|
9783844b1597598fad833794b4b291fce49438d4
|
Send alerts as one mail
|
nikdoof/test-auth
|
app/hr/tasks.py
|
app/hr/tasks.py
|
from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from hr.utils import blacklist_values
from django.contrib.auth.models import User
from django.core.mail import send_mail
@task(ignore_result=True)
def blacklist_check():
log = blacklist_check.get_logger()
users = User.objects.filter(is_active=True)
alerts = 0
msg = ""
for u in users:
if u.groups.count() > 0:
# Has groups
val = blacklist_values(u)
if len(val) > 0:
alerts += 1
# Report possible issue
log.warning("Suspect User: %s, %s entries found: %s" % (u.username, len(val), val))
blstr = ""
for i in val:
blstr = "%s%s - %s - %s\n" % (blstr, i.get_type_display(), i.value, i.reason)
msg += "\n\n-----\n\n"
msg += "Suspect User found: %s\nGroups: %s\nBlacklist Items:\n\n%s" % (u.username, ", ".join(u.groups.all().values_list('name', flat=True)), blstr)
if alerts:
send_mail('Automated blacklist checker alerts', msg, '[email protected]', ['[email protected]'])
|
from django.conf import settings
import logging
from datetime import datetime, timedelta
from celery.decorators import task
from hr.utils import blacklist_values
from django.contrib.auth.models import User
from django.core.mail import send_mail
@task(ignore_result=True)
def blacklist_check():
log = blacklist_check.get_logger()
users = User.objects.filter(is_active=True)
for u in users:
if u.groups.count() > 0:
# Has groups
val = blacklist_values(u)
if len(val) > 0:
# Report possible issue
log.warning("Suspect User: %s, %s entries found: %s" % (u.username, len(val), val))
blstr = ""
for i in val:
blstr = "%s%s - %s - %s\n" % (blstr, i.get_type_display(), i.value, i.reason)
msg = "Suspect User found: %s\nGroups: %s\nBlacklist Items:\n\n%s" % (u.username, ", ".join(u.groups.all().values_list('name', flat=True)), blstr)
send_mail('Automated blacklist checker alert - %s' % u.username, msg, '[email protected]', ['[email protected]'])
|
bsd-3-clause
|
Python
|
0dfd0ec2beb069d56d7b81911bb468199565672a
|
remove print
|
ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt
|
python/ccxtpro/base/fast_client.py
|
python/ccxtpro/base/fast_client.py
|
"""A faster version of aiohttp's websocket client that uses select and other optimizations"""
import asyncio
import collections
from ccxt import NetworkError
from ccxtpro.base.aiohttp_client import AiohttpClient
class FastClient(AiohttpClient):
transport = None
def __init__(self, url, on_message_callback, on_error_callback, on_close_callback, config={}):
super(FastClient, self).__init__(url, on_message_callback, on_error_callback, on_close_callback, config)
# instead of using the deque in aiohttp we implement our own for speed
# https://github.com/aio-libs/aiohttp/blob/1d296d549050aa335ef542421b8b7dad788246d5/aiohttp/streams.py#L534
self.stack = collections.deque()
def receive_loop(self):
def handler():
if not self.stack:
return
message = self.stack.popleft()
self.handle_message(message)
self.asyncio_loop.call_soon(handler)
def feed_data(message, size):
if not self.stack:
self.asyncio_loop.call_soon(handler)
self.stack.append(message)
def feed_eof():
self.on_error(NetworkError(1006))
def wrapper(func):
def parse_frame(buf):
while len(self.stack) > 1:
self.handle_message(self.stack.popleft())
return func(buf)
return parse_frame
connection = self.connection._conn
if connection.closed:
# connection got terminated after the connection was made and before the receive loop ran
self.on_close(1006)
return
self.transport = connection.transport
ws_reader = connection.protocol._payload_parser
ws_reader.parse_frame = wrapper(ws_reader.parse_frame)
ws_reader.queue.feed_data = feed_data
ws_reader.queue.feed_eof = feed_eof
# return a future so super class won't complain
return asyncio.sleep(0)
def reset(self, error):
super(FastClient, self).reset(error)
self.stack.clear()
if self.transport:
self.transport.abort()
|
"""A faster version of aiohttp's websocket client that uses select and other optimizations"""
import asyncio
import collections
from ccxt import NetworkError
from ccxtpro.base.aiohttp_client import AiohttpClient
class FastClient(AiohttpClient):
transport = None
def __init__(self, url, on_message_callback, on_error_callback, on_close_callback, config={}):
super(FastClient, self).__init__(url, on_message_callback, on_error_callback, on_close_callback, config)
# instead of using the deque in aiohttp we implement our own for speed
# https://github.com/aio-libs/aiohttp/blob/1d296d549050aa335ef542421b8b7dad788246d5/aiohttp/streams.py#L534
self.stack = collections.deque()
def receive_loop(self):
def handler():
if not self.stack:
return
message = self.stack.popleft()
self.handle_message(message)
self.asyncio_loop.call_soon(handler)
def feed_data(message, size):
if not self.stack:
self.asyncio_loop.call_soon(handler)
self.stack.append(message)
def feed_eof():
self.on_error(NetworkError(1006))
def wrapper(func):
def parse_frame(buf):
while len(self.stack) > 1:
self.handle_message(self.stack.popleft())
return func(buf)
return parse_frame
connection = self.connection._conn
if connection.closed:
# connection got terminated after the connection was made and before the receive loop ran
self.on_close(1006)
return
self.transport = connection.transport
ws_reader = connection.protocol._payload_parser
ws_reader.parse_frame = wrapper(ws_reader.parse_frame)
ws_reader.queue.feed_data = feed_data
ws_reader.queue.feed_eof = feed_eof
# return a future so super class won't complain
return asyncio.sleep(0)
def reset(self, error):
super(FastClient, self).reset(error)
self.stack.clear()
if self.transport:
self.transport.abort()
def resolve(self, result, message_hash=None):
super(FastClient, self).resolve(result, message_hash)
print('resolved', message_hash)
|
mit
|
Python
|
d51adea3d19578da9165202696d80c44949c43f6
|
remove debug level logging from i2tun.py
|
str4d/i2p-tools,str4d/i2p-tools,str4d/i2p-tools,majestrate/i2p-tools,str4d/i2p-tools,majestrate/i2p-tools,majestrate/i2p-tools,majestrate/i2p-tools,majestrate/i2p-tools
|
i2tun/i2tun.py
|
i2tun/i2tun.py
|
#!/usr/bin/env python3.4
from i2p.i2cp import client as i2cp
import pytun
import threading
import logging
import struct
import select
class IPV4Handler(i2cp.I2CPHandler):
def __init__(self, remote_dest, our_addr, their_addr, mtu):
self._them = remote_dest
self._iface = pytun.TunTapDevice()
self._iface.addr = our_addr
self._iface.dstaddr = their_addr
self._iface.mtu = mtu
self._iface.up()
def session_made(self, con):
print ('we are {}'.format(con.dest.base32()))
self.con = con
threading.Thread(target=self.mainloop, args=(con,)).start()
def mainloop(self, con):
while True:
print ('read')
buff = self._iface.read(self._iface.mtu)
print ('send')
self.con.send_dsa_dgram(self._them, buff)
def got_dgram(self, dest, data, srcport, dstport):
if dest.base32() == self._them:
self._iface.write(data)
def main():
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--remote', required=True, type=str)
ap.add_argument('--our-addr', required=True, type=str)
ap.add_argument('--their-addr', required=True, type=str)
ap.add_argument('--mtu', default=3600 ,type=int)
ap.add_argument('--i2cp-host', default='127.0.0.1', type=str)
ap.add_argument('--i2cp-port', default=7654, type=int)
args = ap.parse_args()
handler = IPV4Handler(args.remote, args.our_addr, args.their_addr, args.mtu)
con = i2cp.Connection(handler, i2cp_host=args.i2cp_host, i2cp_port=args.i2cp_port)
con.open()
con.start()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3.4
from i2p.i2cp import client as i2cp
import pytun
import threading
import logging
import struct
import select
class IPV4Handler(i2cp.I2CPHandler):
def __init__(self, remote_dest, our_addr, their_addr, mtu):
self._them = remote_dest
self._iface = pytun.TunTapDevice()
self._iface.addr = our_addr
self._iface.dstaddr = their_addr
self._iface.mtu = mtu
self._iface.up()
def session_made(self, con):
print ('we are {}'.format(con.dest.base32()))
self.con = con
threading.Thread(target=self.mainloop, args=(con,)).start()
def mainloop(self, con):
while True:
print ('read')
buff = self._iface.read(self._iface.mtu)
print ('send')
self.con.send_dgram(self._them, buff)
def got_dgram(self, dest, data, srcport, dstport):
if dest.base32() == self._them:
self._iface.write(data)
def main():
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
ap = argparse.ArgumentParser()
ap.add_argument('--remote', required=True, type=str)
ap.add_argument('--our-addr', required=True, type=str)
ap.add_argument('--their-addr', required=True, type=str)
ap.add_argument('--mtu', default=3600 ,type=int)
ap.add_argument('--i2cp-host', default='127.0.0.1', type=str)
ap.add_argument('--i2cp-port', default=7654, type=int)
args = ap.parse_args()
handler = IPV4Handler(args.remote, args.our_addr, args.their_addr, args.mtu)
con = i2cp.Connection(handler, i2cp_host=args.i2cp_host, i2cp_port=args.i2cp_port)
con.open()
con.start()
if __name__ == '__main__':
main()
|
mit
|
Python
|
abe4f0577baef3dbbceb06fc6d569d2bec69257e
|
Fix internal import
|
tensorflow/probability,tensorflow/probability
|
tensorflow_probability/python/internal/backend/jax/rewrite.py
|
tensorflow_probability/python/internal/backend/jax/rewrite.py
|
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for NP->JAX."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
# Dependency imports
from absl import app
def main(argv):
contents = open(argv[1]).read()
contents = contents.replace(
"tensorflow_probability.python.internal.backend.numpy",
"tensorflow_probability.python.internal.backend.jax")
contents = contents.replace(
"from tensorflow_probability.python.internal.backend import numpy",
"from tensorflow_probability.python.internal.backend import jax")
contents = contents.replace("scipy.linalg", "jax.scipy.linalg")
contents = contents.replace("scipy.special", "jax.scipy.special")
contents = contents.replace(
"MODE_JAX = False",
"MODE_JAX = True\n"
"from jax.config import config; config.update('jax_enable_x64', True)")
contents = contents.replace("\nimport numpy as np",
"\nimport numpy as onp\nimport jax.numpy as np")
contents = contents.replace("np.bool", "onp.bool")
contents = contents.replace("np.dtype", "onp.dtype")
contents = contents.replace("np.generic", "onp.generic")
contents = contents.replace("np.broadcast", "onp.broadcast")
contents = contents.replace("JAX_MODE = False", "JAX_MODE = True")
print(contents)
if __name__ == "__main__":
app.run(main)
|
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for NP->JAX."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
from absl import app
def main(argv):
contents = open(argv[1]).read()
contents = contents.replace(
"tensorflow_probability.python.internal.backend.numpy",
"tensorflow_probability.python.internal.backend.jax")
contents = contents.replace(
"from tensorflow_probability.python.internal.backend import numpy",
"from tensorflow_probability.python.internal.backend import jax")
contents = contents.replace("scipy.linalg", "jax.scipy.linalg")
contents = contents.replace("scipy.special", "jax.scipy.special")
contents = contents.replace(
"MODE_JAX = False",
"MODE_JAX = True\n"
"from jax.config import config; config.update('jax_enable_x64', True)")
contents = contents.replace("\nimport numpy as np",
"\nimport numpy as onp\nimport jax.numpy as np")
contents = contents.replace("np.bool", "onp.bool")
contents = contents.replace("np.dtype", "onp.dtype")
contents = contents.replace("np.generic", "onp.generic")
contents = contents.replace("np.broadcast", "onp.broadcast")
contents = contents.replace("JAX_MODE = False", "JAX_MODE = True")
print(contents)
if __name__ == "__main__":
app.run(main)
|
apache-2.0
|
Python
|
695e171d1eca459075ad03adf0712f5b7427cac4
|
Add get_or_404() to __all__
|
dirn/Flask-Simon,dirn/Flask-Simon
|
flask_simon/__init__.py
|
flask_simon/__init__.py
|
from flask import abort
from pymongo import uri_parser
import simon.connection
__all__ = ('Simon', 'get_or_404')
class Simon(object):
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
if 'simon' not in app.extensions:
app.extensions['simon'] = {}
if 'MONGO_URI' in app.config:
parsed = uri_parser.parse_uri(app.config['MONGO_URI'])
if not parsed.get('database'):
raise ValueError('MONGO_URI does not contain a database name.')
app.config['MONGO_DBNAME'] = parsed['database']
app.config['MONGO_USERNAME'] = parsed['username']
app.config['MONGO_PASSWORD'] = parsed['password']
app.config['REPLICA_SET'] = parsed['options'].get('replica_set')
host = app.config['MONGO_URI']
name = app.config['MONGO_DBNAME']
username = app.config['MONGO_USERNAME']
password = app.config['MONGO_PASSWORD']
replica_set = app.config['REPLICA_SET']
simon.connection.connect(host_or_uri=host, name=name,
username=username, password=password,
replica_set=replica_set)
else:
host = app.config['HOST'] = 'localhost'
name = app.config['MONGO_DBNAME'] = app.name
simon.connection.connect(host=host, name=name)
def get_or_404(model, *qs, **fields):
try:
return model.get(*qs, **fields)
except (model.NoDocumentFound, model.MultipleDocumentsFound):
abort(404)
|
__all__ = ('Simon',)
import simon.connection
from flask import abort
from pymongo import uri_parser
class Simon(object):
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
if 'simon' not in app.extensions:
app.extensions['simon'] = {}
if 'MONGO_URI' in app.config:
parsed = uri_parser.parse_uri(app.config['MONGO_URI'])
if not parsed.get('database'):
raise ValueError('MONGO_URI does not contain a database name.')
app.config['MONGO_DBNAME'] = parsed['database']
app.config['MONGO_USERNAME'] = parsed['username']
app.config['MONGO_PASSWORD'] = parsed['password']
app.config['REPLICA_SET'] = parsed['options'].get('replica_set')
host = app.config['MONGO_URI']
name = app.config['MONGO_DBNAME']
username = app.config['MONGO_USERNAME']
password = app.config['MONGO_PASSWORD']
replica_set = app.config['REPLICA_SET']
simon.connection.connect(host_or_uri=host, name=name,
username=username, password=password,
replica_set=replica_set)
else:
host = app.config['HOST'] = 'localhost'
name = app.config['MONGO_DBNAME'] = app.name
simon.connection.connect(host=host, name=name)
def get_or_404(model, *qs, **fields):
try:
return model.get(*qs, **fields)
except (model.NoDocumentFound, model.MultipleDocumentsFound):
abort(404)
|
bsd-3-clause
|
Python
|
24ee61ecf5767d10b2fb92acc5d0217ffbfb3834
|
Update get_branches.py
|
YufeiZhang/nothingButUseless
|
Group8/get_branches.py
|
Group8/get_branches.py
|
ny branches we have
#print(branches) # this shows all branches in a list
#print(branches_posi)
from pyfbsdk import *
import math
'''
This file is to read all branches of both target and source skeleton
This should be using motion-builder
I used People.FBX as a testcase
'''
def get_banch(parents, children, index, branches):
parents.append(children.Name)
# if there is no children, append this branch to branches
if len(children.Children) == 0:
branches.append(parents)
# if there is a children, then go to the child
elif len(children.Children) == 1:
parents = get_banch(parents, children.Children[0], index+1, branches)
# if there are several leaves, then search each leaf
else:
for i in range(len(children.Children)):
new = []
new = get_banch(parents[:index+1], children.Children[i], index+1, branches)
return parents
def get_branches(root):
branches = []
if len(root.Children) > 0:
# you need to check len(root.Children)
for i in range(len(root.Children)): # this is to stop the loop
branch = []
branch.append(root.Name) # skeleton[0] -> root
# initialize the node and get its children
parents = branch[:len(branch)]
children = root.Children[i]
# start the loop to find all leaves
# the initial index may be wrong, you'd better check it.
branch = get_banch(parents, children, 1, branches)
return branches
def get_branches_posi(branches, file_name):
out = open(file_name,"w")
branches_posi = []
node = FBVector3d()
for b in branches:
bran_posi = []
for name in b:
n = FBFindModelByLabelName(name)
n.GetVector(node, FBModelTransformationType.kModelTranslation)
bran_posi.append(node)
out.write(repr(node[0]) + " ")
out.write(repr(node[1]) + " ")
out.write(repr(node[2]) + '\n')
out.write('------------------------------------\n')
branches_posi.append(bran_posi)
out.close()
return branches_posi
# Chose the node that has the highest betweeness
#root = FBFindModelByLabelName('PMD_Kristoff__summer_')
root = FBFindModelByLabelName('Bip01')
branches = get_branches(root)
branches_posi = get_branches_posi(branches, "1.txt")
print(len(branches)) # this tells you how many branches we have
#print(branches) # this shows all branches in a list
root2 = FBFindModelByLabelName('PMD_Kristoff__summer_')
branches2 = get_branches(root2)
branches_posi2 = get_branches_posi(branches2, "2.txt")
print(len(branches2))
|
from pyfbsdk import *
import math
'''
This file is to read all branches of both target and source skeleton
This should be using motion-builder
I used People.FBX as a testcase
'''
def get_banch(parents, children, index, branches):
parents.append(children.Name)
# if there is no children, append this branch to branches
if len(children.Children) == 0:
branches.append(parents)
# if there is a children, then go to the child
elif len(children.Children) == 1:
parents = get_banch(parents, children.Children[0], index+1, branches)
# if there are several leaves, then search each leaf
else:
for i in range(len(children.Children)):
new = []
new = get_banch(parents[:index+1], children.Children[i], index+1, branches)
return parents
def get_branches(root):
branches = []
if len(root.Children) > 0:
# you need to check len(root.Children)
for i in range(len(root.Children)): # this is to stop the loop
branch = []
branch.append(root.Name) # skeleton[0] -> root
# initialize the node and get its children
parents = branch[:len(branch)]
children = root.Children[i]
# start the loop to find all leaves
# the initial index may be wrong, you'd better check it.
branch = get_banch(parents, children, 1, branches)
#print()
#print("\n\n\n\n")
return branches
def get_branches_posi(branches):
branches_posi = []
node = FBVector3d()
for b in branches:
bran_posi = []
for name in b:
n = FBFindModelByLabelName(name)
n.GetVector(node, FBModelTransformationType.kModelTranslation)
bran_posi.append(node)
branches_posi.append(bran_posi)
return branches_posi
# Chose the node that has the highest betweeness
root = FBFindModelByLabelName('Bip01')
branches = get_branches(root)
branches_posi = get_branches_posi(branches)
#print(len(branches)) # this tells you how many branches we have
#print(branches) # this shows all branches in a list
#print(branches_posi)
|
mit
|
Python
|
c9df16f35af2cf51a4612eb76fab59819a32df64
|
Handle TypeError in is_float
|
ewdurbin/sentry,BuildingLink/sentry,boneyao/sentry,JTCunning/sentry,TedaLIEz/sentry,gencer/sentry,zenefits/sentry,looker/sentry,looker/sentry,BuildingLink/sentry,drcapulet/sentry,jean/sentry,argonemyth/sentry,nicholasserra/sentry,kevinastone/sentry,korealerts1/sentry,daevaorn/sentry,mvaled/sentry,mvaled/sentry,vperron/sentry,zenefits/sentry,gencer/sentry,JamesMura/sentry,songyi199111/sentry,camilonova/sentry,fotinakis/sentry,beeftornado/sentry,fotinakis/sentry,1tush/sentry,llonchj/sentry,mvaled/sentry,boneyao/sentry,mvaled/sentry,fuziontech/sentry,drcapulet/sentry,songyi199111/sentry,imankulov/sentry,JTCunning/sentry,TedaLIEz/sentry,wujuguang/sentry,wujuguang/sentry,hongliang5623/sentry,zenefits/sentry,ngonzalvez/sentry,vperron/sentry,fotinakis/sentry,alexm92/sentry,wujuguang/sentry,hongliang5623/sentry,gencer/sentry,argonemyth/sentry,Kryz/sentry,ifduyue/sentry,ifduyue/sentry,beeftornado/sentry,daevaorn/sentry,BuildingLink/sentry,boneyao/sentry,alexm92/sentry,looker/sentry,songyi199111/sentry,Natim/sentry,vperron/sentry,felixbuenemann/sentry,BayanGroup/sentry,imankulov/sentry,fotinakis/sentry,looker/sentry,gg7/sentry,mvaled/sentry,ewdurbin/sentry,jean/sentry,llonchj/sentry,jokey2k/sentry,pauloschilling/sentry,JackDanger/sentry,felixbuenemann/sentry,1tush/sentry,ewdurbin/sentry,hongliang5623/sentry,kevinastone/sentry,mitsuhiko/sentry,argonemyth/sentry,gencer/sentry,wong2/sentry,JamesMura/sentry,ifduyue/sentry,drcapulet/sentry,Natim/sentry,BuildingLink/sentry,gg7/sentry,BuildingLink/sentry,looker/sentry,JackDanger/sentry,llonchj/sentry,korealerts1/sentry,jokey2k/sentry,TedaLIEz/sentry,fuziontech/sentry,zenefits/sentry,beeftornado/sentry,camilonova/sentry,jean/sentry,kevinastone/sentry,JackDanger/sentry,ifduyue/sentry,felixbuenemann/sentry,daevaorn/sentry,kevinlondon/sentry,pauloschilling/sentry,JamesMura/sentry,Kryz/sentry,fuziontech/sentry,wong2/sentry,1tush/sentry,jokey2k/sentry,imankulov/sentry,mitsuhiko/sentry,JTCunning/sentry,Kryz/sentry,Natim/sentry,ngonzalvez/sentry,JamesMura/sentry,BayanGroup/sentry,alexm92/sentry,ifduyue/sentry,mvaled/sentry,nicholasserra/sentry,jean/sentry,JamesMura/sentry,daevaorn/sentry,gencer/sentry,korealerts1/sentry,kevinlondon/sentry,gg7/sentry,wong2/sentry,pauloschilling/sentry,zenefits/sentry,camilonova/sentry,kevinlondon/sentry,BayanGroup/sentry,nicholasserra/sentry,jean/sentry,ngonzalvez/sentry
|
src/sentry/utils/__init__.py
|
src/sentry/utils/__init__.py
|
"""
sentry.utils
~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.utils.encoding import force_unicode
import six
def to_unicode(value):
try:
value = six.text_type(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def is_float(var):
try:
float(var)
except (TypeError, ValueError):
return False
return True
|
"""
sentry.utils
~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.utils.encoding import force_unicode
import six
def to_unicode(value):
try:
value = six.text_type(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def is_float(var):
try:
float(var)
except ValueError:
return False
return True
|
bsd-3-clause
|
Python
|
7fb89e4dbe2cbed4ef37e13073d4fa3f2a650049
|
Check for missing part thumbnails when the server first runs
|
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
|
InvenTree/part/apps.py
|
InvenTree/part/apps.py
|
from __future__ import unicode_literals
import os
from django.db.utils import OperationalError, ProgrammingError
from django.apps import AppConfig
from django.conf import settings
class PartConfig(AppConfig):
name = 'part'
def ready(self):
"""
This function is called whenever the Part app is loaded.
"""
self.generate_part_thumbnails()
def generate_part_thumbnails(self):
from .models import Part
print("Checking Part image thumbnails")
try:
for part in Part.objects.all():
if part.image:
url = part.image.thumbnail.name
#if url.startswith('/'):
# url = url[1:]
loc = os.path.join(settings.MEDIA_ROOT, url)
if not os.path.exists(loc):
print("InvenTree: Generating thumbnail for Part '{p}'".format(p=part.name))
part.image.render_variations(replace=False)
except (OperationalError, ProgrammingError):
print("Could not generate Part thumbnails")
|
from __future__ import unicode_literals
from django.apps import AppConfig
class PartConfig(AppConfig):
name = 'part'
|
mit
|
Python
|
7f5f10132334c1f6685497d3fff48c2c65617845
|
Remove broken URL (#3623)
|
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree
|
InvenTree/part/urls.py
|
InvenTree/part/urls.py
|
"""URL lookup for Part app. Provides URL endpoints for:
- Display / Create / Edit / Delete PartCategory
- Display / Create / Edit / Delete Part
- Create / Edit / Delete PartAttachment
- Display / Create / Edit / Delete SupplierPart
"""
from django.urls import include, re_path
from . import views
part_detail_urls = [
re_path(r'^bom-download/?', views.BomDownload.as_view(), name='bom-download'),
re_path(r'^pricing/', views.PartPricing.as_view(), name='part-pricing'),
re_path(r'^bom-upload/?', views.BomUpload.as_view(), name='upload-bom'),
re_path(r'^qr_code/?', views.PartQRCode.as_view(), name='part-qr'),
# Normal thumbnail with form
re_path(r'^thumb-select/?', views.PartImageSelect.as_view(), name='part-image-select'),
# Any other URLs go to the part detail page
re_path(r'^.*$', views.PartDetail.as_view(), name='part-detail'),
]
category_urls = [
# Category detail views
re_path(r'(?P<pk>\d+)/', views.CategoryDetail.as_view(), name='category-detail'),
]
# URL list for part web interface
part_urls = [
# Upload a part
re_path(r'^import/', views.PartImport.as_view(), name='part-import'),
re_path(r'^import-api/', views.PartImportAjax.as_view(), name='api-part-import'),
# Download a BOM upload template
re_path(r'^bom_template/?', views.BomUploadTemplate.as_view(), name='bom-upload-template'),
# Individual part using pk
re_path(r'^(?P<pk>\d+)/', include(part_detail_urls)),
# Part category
re_path(r'^category/', include(category_urls)),
# Individual part using IPN as slug
re_path(r'^(?P<slug>[-\w]+)/', views.PartDetailFromIPN.as_view(), name='part-detail-from-ipn'),
# Top level part list (display top level parts and categories)
re_path(r'^.*$', views.PartIndex.as_view(), name='part-index'),
]
|
"""URL lookup for Part app. Provides URL endpoints for:
- Display / Create / Edit / Delete PartCategory
- Display / Create / Edit / Delete Part
- Create / Edit / Delete PartAttachment
- Display / Create / Edit / Delete SupplierPart
"""
from django.urls import include, re_path
from . import views
part_detail_urls = [
re_path(r'^bom-download/?', views.BomDownload.as_view(), name='bom-download'),
re_path(r'^pricing/', views.PartPricing.as_view(), name='part-pricing'),
re_path(r'^bom-upload/?', views.BomUpload.as_view(), name='upload-bom'),
re_path(r'^qr_code/?', views.PartQRCode.as_view(), name='part-qr'),
# Normal thumbnail with form
re_path(r'^thumb-select/?', views.PartImageSelect.as_view(), name='part-image-select'),
# Any other URLs go to the part detail page
re_path(r'^.*$', views.PartDetail.as_view(), name='part-detail'),
]
category_urls = [
# Top level subcategory display
re_path(r'^subcategory/', views.PartIndex.as_view(template_name='part/subcategory.html'), name='category-index-subcategory'),
# Category detail views
re_path(r'(?P<pk>\d+)/', views.CategoryDetail.as_view(), name='category-detail'),
]
# URL list for part web interface
part_urls = [
# Upload a part
re_path(r'^import/', views.PartImport.as_view(), name='part-import'),
re_path(r'^import-api/', views.PartImportAjax.as_view(), name='api-part-import'),
# Download a BOM upload template
re_path(r'^bom_template/?', views.BomUploadTemplate.as_view(), name='bom-upload-template'),
# Individual part using pk
re_path(r'^(?P<pk>\d+)/', include(part_detail_urls)),
# Part category
re_path(r'^category/', include(category_urls)),
# Individual part using IPN as slug
re_path(r'^(?P<slug>[-\w]+)/', views.PartDetailFromIPN.as_view(), name='part-detail-from-ipn'),
# Top level part list (display top level parts and categories)
re_path(r'^.*$', views.PartIndex.as_view(), name='part-index'),
]
|
mit
|
Python
|
d0e31fdb5ec99e91f7b5f7da5b81fc7a391689df
|
Update django_facebook/admin.py
|
danosaure/Django-facebook,danosaure/Django-facebook,danosaure/Django-facebook
|
django_facebook/admin.py
|
django_facebook/admin.py
|
from django.contrib import admin
from django.conf import settings
from django.core.urlresolvers import reverse
from django_facebook import admin_actions
from django_facebook import models
class FacebookUserAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'facebook_id',)
search_fields = ('name',)
class FacebookLikeAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'category', 'facebook_id',)
search_fields = ('name',)
filter_fields = ('category',)
class FacebookProfileAdmin(admin.ModelAdmin):
list_display = ('image_', 'user_', 'facebook_name', 'facebook_id',)
raw_id_fields = ('user',)
search_fields = ('facebook_name', 'facebook_id',)
def image_(self, instance):
return """<span style="
background-image: url({0});
background-size: cover;
width: 21px;
height: 21px;
display: inline-block;
outline: 1px solid #DDD;
position: absolute;
margin-top: -3px;
"></span>""".format(
instance.image.url if (instance and instance.image) else ''
)
image_.allow_tags = True
def user_(self, instance):
admin_url = reverse('admin:auth_user_change', args=[instance.user.pk])
return '<a href="{0}">{1}</a>'.format(
admin_url,
instance.user
)
user_.allow_tags = True
def facebook_profile(open_graph_share):
'''
Nicely displayed version of the facebook user
with user id and image and link to facebook :)
'''
user = open_graph_share.user
profile = user.get_profile()
facebook_id = profile.facebook_id
facebook_url = 'http://www.facebook.com/%s/' % facebook_id
link = '<p><a href="%s"><img src="http://graph.facebook.com/%s/picture/?type=large" width="100px" style="float:left"/>%s</a><br/></p>' % (facebook_url, facebook_id, facebook_id)
return link
facebook_profile.allow_tags = True
facebook_profile.short_description = 'Profile'
class OpenGraphShareAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
list_display = ['user', 'action_domain', facebook_profile,
'completed_at', 'error_message']
actions = [admin_actions.retry_open_graph_share,
admin_actions.retry_open_graph_share_for_user]
if settings.AUTH_PROFILE_MODULE == 'django_facebook.FacebookProfile':
admin.site.register(models.FacebookProfile, FacebookProfileAdmin)
admin.site.register(models.FacebookUser, FacebookUserAdmin)
admin.site.register(models.FacebookLike, FacebookLikeAdmin)
admin.site.register(models.OpenGraphShare, OpenGraphShareAdmin)
|
from django.contrib import admin
from django.conf import settings
from django.core.urlresolvers import reverse
from django_facebook import admin_actions
from django_facebook import models
class FacebookUserAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'facebook_id',)
search_fields = ('name',)
class FacebookLikeAdmin(admin.ModelAdmin):
list_display = ('user_id', 'name', 'category', 'facebook_id',)
search_fields = ('name',)
filter_fields = ('category',)
class FacebookProfileAdmin(admin.ModelAdmin):
list_display = ('image_', 'user_', 'facebook_name', 'facebook_id',)
raw_id_fields = ('user',)
search_fields = ('facebook_name', 'facebook_id',)
def image_(self, instance):
return """<span style="
background-image: url({0});
background-size: cover;
width: 21px;
height: 21px;
display: inline-block;
outline: 1px solid #DDD;
position: absolute;
margin-top: -3px;
"></span>""".format(
instance.image.url
)
image_.allow_tags = True
def user_(self, instance):
admin_url = reverse('admin:auth_user_change', args=[instance.user.pk])
return '<a href="{0}">{1}</a>'.format(
admin_url,
instance.user
)
user_.allow_tags = True
def facebook_profile(open_graph_share):
'''
Nicely displayed version of the facebook user
with user id and image and link to facebook :)
'''
user = open_graph_share.user
profile = user.get_profile()
facebook_id = profile.facebook_id
facebook_url = 'http://www.facebook.com/%s/' % facebook_id
link = '<p><a href="%s"><img src="http://graph.facebook.com/%s/picture/?type=large" width="100px" style="float:left"/>%s</a><br/></p>' % (facebook_url, facebook_id, facebook_id)
return link
facebook_profile.allow_tags = True
facebook_profile.short_description = 'Profile'
class OpenGraphShareAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
list_display = ['user', 'action_domain', facebook_profile,
'completed_at', 'error_message']
actions = [admin_actions.retry_open_graph_share,
admin_actions.retry_open_graph_share_for_user]
if settings.AUTH_PROFILE_MODULE == 'django_facebook.FacebookProfile':
admin.site.register(models.FacebookProfile, FacebookProfileAdmin)
admin.site.register(models.FacebookUser, FacebookUserAdmin)
admin.site.register(models.FacebookLike, FacebookLikeAdmin)
admin.site.register(models.OpenGraphShare, OpenGraphShareAdmin)
|
bsd-3-clause
|
Python
|
dbf736ba66fe6b530bfe3d9d503caa2e24ee8f01
|
Make /config more CORS-y
|
matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse
|
synapse/rest/media/v1/config_resource.py
|
synapse/rest/media/v1/config_resource.py
|
# -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.http.server import respond_with_json, wrap_json_request_handler, set_cors_headers
class MediaConfigResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self)
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {
"m.upload.size": config.max_upload_size,
}
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
@wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
set_cors_headers(request)
yield self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict)
def render_OPTIONS(self, request):
set_cors_headers(request)
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
|
# -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from synapse.http.server import respond_with_json, wrap_json_request_handler
class MediaConfigResource(Resource):
isLeaf = True
def __init__(self, hs):
Resource.__init__(self)
config = hs.get_config()
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {
"m.upload.size": config.max_upload_size,
}
def render_GET(self, request):
self._async_render_GET(request)
return NOT_DONE_YET
@wrap_json_request_handler
@defer.inlineCallbacks
def _async_render_GET(self, request):
yield self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict)
def render_OPTIONS(self, request):
respond_with_json(request, 200, {}, send_cors=True)
return NOT_DONE_YET
|
apache-2.0
|
Python
|
54e1cb0048ffd0024feae4e5dc0c1e047ca55328
|
remove debug print
|
openaps/openaps,openaps/openaps
|
openaps/devices/device.py
|
openaps/devices/device.py
|
import json
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
|
import json
from openaps.configurable import Configurable
class ExtraConfig (Configurable):
prefix = 'device'
pass
class Device (Configurable):
vendor = None
required = ['name', 'vendor']
optional = [ ]
prefix = 'device'
_uses = [ ]
def __init__ (self, name, vendor):
self.name = name
self.vendor = vendor
self.fields = dict(vendor=vendor.__name__)
self.extra = ExtraConfig(name)
def read (self, args=None, config=None):
if args:
self.name = args.name
print "args", args
if getattr(args, 'extra', None):
self.fields['extra'] = args.extra.format(name=self.name, **self.fields)
self.vendor.set_config(args, self.extra)
else:
self.vendor.set_config(args, self)
if config:
# self.vendor.read_config(config)
self.fields.update(dict(config.items(self.section_name( ))))
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.fields.update(dict(extra.items(self.section_name( ))))
def get (self, k, *args):
return self.fields.get(k, self.extra.get(k, *args))
def format_url (self):
parts = ['{0:s}://{1:s}'.format(self.vendor.__name__.split('.').pop( ), self.name), ]
parts.append(self.vendor.display_device(self))
return ''.join(parts)
def register_uses (self, uses):
for u in uses.usages:
if u not in self._uses:
self._uses.append(u)
def store (self, config):
extra_ini = self.fields.get('extra', None)
if extra_ini:
extra = config.Read(name=extra_ini)
self.extra.store(extra)
extra.save( )
super(Device, self).store(config)
@classmethod
def FromConfig (klass, vendors, config):
devices = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
vendor = vendors.lookup(config.get(candidate, 'vendor').split('.').pop( ), config)
device = klass(name, vendor)
device.read(config=config)
devices.append(device)
return devices
|
mit
|
Python
|
294e8b120d507237f1129338c476939b20604f26
|
Save release test metrics under a single column (#30215)
|
ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray,ray-project/ray
|
release/ray_release/reporter/db.py
|
release/ray_release/reporter/db.py
|
import time
import json
import boto3
from botocore.config import Config
from ray_release.reporter.reporter import Reporter
from ray_release.result import Result
from ray_release.config import Test
from ray_release.logger import logger
class DBReporter(Reporter):
def __init__(self):
self.firehose = boto3.client("firehose", config=Config(region_name="us-west-2"))
def report_result(self, test: Test, result: Result):
logger.info("Persisting result to the databricks delta lake...")
result_json = {
"_table": "release_test_result",
"report_timestamp_ms": int(time.time() * 1000),
"status": result.status or "",
"results": result.results or {},
"name": test.get("name", ""),
"group": test.get("group", ""),
"team": test.get("team", ""),
"frequency": test.get("frequency", ""),
"cluster_url": result.cluster_url or "",
"wheel_url": result.wheels_url or "",
"buildkite_url": result.buildkite_url or "",
"runtime": result.runtime or -1.0,
"stable": result.stable,
"return_code": result.return_code,
"smoke_test": result.smoke_test,
"prometheus_metrics": result.prometheus_metrics or {},
}
logger.debug(f"Result json: {json.dumps(result_json)}")
try:
self.firehose.put_record(
DeliveryStreamName="ray-ci-results",
Record={"Data": json.dumps(result_json)},
)
except Exception:
logger.exception("Failed to persist result to the databricks delta lake")
else:
logger.info("Result has been persisted to the databricks delta lake")
|
import time
import json
import boto3
from botocore.config import Config
from ray_release.reporter.reporter import Reporter
from ray_release.result import Result
from ray_release.config import Test
from ray_release.logger import logger
class DBReporter(Reporter):
def __init__(self):
self.firehose = boto3.client("firehose", config=Config(region_name="us-west-2"))
def report_result(self, test: Test, result: Result):
logger.info("Persisting result to the databricks delta lake...")
result_json = {
"_table": "release_test_result",
"report_timestamp_ms": int(time.time() * 1000),
"status": result.status or "",
"results": result.results or {},
"name": test.get("name", ""),
"group": test.get("group", ""),
"team": test.get("team", ""),
"frequency": test.get("frequency", ""),
"cluster_url": result.cluster_url or "",
"wheel_url": result.wheels_url or "",
"buildkite_url": result.buildkite_url or "",
"runtime": result.runtime or -1.0,
"stable": result.stable,
"return_code": result.return_code,
"smoke_test": result.smoke_test,
}
result_json.update(result.prometheus_metrics)
logger.debug(f"Result json: {json.dumps(result_json)}")
try:
self.firehose.put_record(
DeliveryStreamName="ray-ci-results",
Record={"Data": json.dumps(result_json)},
)
except Exception:
logger.exception("Failed to persist result to the databricks delta lake")
else:
logger.info("Result has been persisted to the databricks delta lake")
|
apache-2.0
|
Python
|
0a94b8a4756e9b46211567c430560a314c554a1d
|
add help for org command
|
RhubarbSin/arin-whois-rws
|
parse.py
|
parse.py
|
import argparse
class Parser(argparse.ArgumentParser):
def populate(self):
self.add_argument('--output', choices=('xml', 'text', 'html'),
default='text')
subparsers = self.add_subparsers(title='Commands', metavar='',
dest='call')
self._add_org(subparsers)
self._add_orgs(subparsers)
def _add_org(self, subparsers):
self.org = subparsers.add_parser('org', help='HANDLE')
self.org.add_argument('handle', metavar='HANDLE', help='Org handle')
def _add_orgs(self, subparsers):
self.org = subparsers.add_parser('orgs', help='--handle HANDLE --name NAME --dba DBA')
self.org.add_argument('--handle', help='Org handle')
self.org.add_argument('--name', help='Org name')
self.org.add_argument('--dba', help='Org DBA')
def run(self):
return self.parse_args()
|
import argparse
class Parser(argparse.ArgumentParser):
def populate(self):
self.add_argument('--output', choices=('xml', 'text', 'html'),
default='text')
subparsers = self.add_subparsers(title='Commands', metavar='',
dest='call')
self._add_org(subparsers)
self._add_orgs(subparsers)
def _add_org(self, subparsers):
self.org = subparsers.add_parser('org')
self.org.add_argument('handle', metavar='HANDLE', help='Org handle')
def _add_orgs(self, subparsers):
self.org = subparsers.add_parser('orgs', help='--handle HANDLE --name NAME --dba DBA')
self.org.add_argument('--handle', help='Org handle')
self.org.add_argument('--name', help='Org name')
self.org.add_argument('--dba', help='Org DBA')
def run(self):
return self.parse_args()
|
mit
|
Python
|
65fb9244df69646721c8273afae22fe6248976f0
|
optimise common.py
|
Tocknicsu/nctuoj,Tocknicsu/nctuoj,Tocknicsu/nctuoj
|
backend/service/common.py
|
backend/service/common.py
|
from service.base import BaseService
import config
### need to add rs
class CommonService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
CommonService.inst = self
def get_execute_type(self):
res ={ x['id']: x for x in (yield self.db.execute("SELECT * FROM execute_types order by id"))}
return res
def get_verdict_type(self):
res = { x['id']: x for x in (yield self.db.execute("SELECT * FROM map_verdict_string order by id"))}
return res
|
from service.base import BaseService
import config
### need to add rs
class CommonService(BaseService):
def __init__(self, db, rs):
super().__init__(db, rs)
CommonService.inst = self
def get_execute_type(self):
res = (yield self.db.execute("SELECT * FROM execute_types order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
def get_verdict_type(self):
res = (yield self.db.execute("SELECT * FROM map_verdict_string order by id")).fetchall()
ret = {}
for x in res:
ret[x['id']] = x
return ret
|
mit
|
Python
|
b9c076865f4e0ff9b4ab007472cbab735ccf01ab
|
Bump version to 3.1.2
|
opensciencegrid/osg-configure,matyasselmeci/osg-configure,opensciencegrid/osg-configure,matyasselmeci/osg-configure
|
osg_configure/version.py
|
osg_configure/version.py
|
__version__ = "3.1.2"
|
__version__ = "3.1.1"
|
apache-2.0
|
Python
|
214f4094b6b5c2f4a43ff96567a7bbe87ba63d28
|
Update bob.py
|
malvikasharan/software_writing_skills_potsdam
|
Python_sessions/session-2/practice_codes/bob.py
|
Python_sessions/session-2/practice_codes/bob.py
|
hello = "Hi Human, I am B.O.B. "
question1 = "What is your name? "
response1 = "Thats a lovely name! "
input(hello+question1)
print response1
answer_type = "Please answer in 'yes' or 'no'. "
question2 = "Can I help you? "
response2 = "I am a computer, not a human. "
input(question2+answer_type)
print response2
question3 = "Did you like that information? "
goodbye = "Great. Goodbye! "
input(question3+answer_type)
print goodbye
|
hello = "Hi Human, I am B.O.B. "
question1 = "What is your name? "
response1 = "Thats a lovely name! "
input(hello+question1)
print response1
answer_type = "Please answer in 'yes' of 'no'. "
question2 = "Can I help you? "
response2 = "I am a computer, not a human. "
input(question2+answer_type)
print response2
question3 = "Did you like that information? "
goodbye = "Great. Goodbye! "
input(question3+answer_type)
print goodbye
|
isc
|
Python
|
f44c7670ee06d0ff3976c11b921cc3f288b0259b
|
add TestMPEventLoopRunner.test_ProgressMonitor
|
alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl
|
tests/EventReader/test_MPEventLoopRunner.py
|
tests/EventReader/test_MPEventLoopRunner.py
|
from AlphaTwirl.EventReader import MPEventLoopRunner
import unittest
import os
##____________________________________________________________________________||
class MockReader(object):
def __init__(self):
self._results = None
def setResults(self, results):
self._results = results
def results(self):
return self._results
##____________________________________________________________________________||
class MockEventLoop(object):
def __init__(self, readers):
self.readers = readers
def __call__(self, progressReporter):
for reader in self.readers:
reader._results = 3456
return self.readers
##____________________________________________________________________________||
class MockEventLoopForProgressReporterTest(object):
def __init__(self, readers):
self.readers = readers
def __call__(self, progressReporter):
for reader in self.readers:
reader._results = [3456, progressReporter]
return self.readers
##____________________________________________________________________________||
class MockProgressReporter(object):
def report(self, event, component): pass
##____________________________________________________________________________||
class MockProgressMonitor(object):
def createReporter(self): return MockProgressReporter()
def addWorker(self, worker): pass
def monitor(self): pass
def last(self): pass
##____________________________________________________________________________||
class TestMPEventLoopRunner(unittest.TestCase):
def test_begin_end(self):
runner = MPEventLoopRunner()
runner.begin()
runner.end()
def test_run(self):
runner = MPEventLoopRunner()
runner.begin()
reader1 = MockReader()
reader2 = MockReader()
eventLoop = MockEventLoop([reader1, reader2])
runner.run(eventLoop)
self.assertIsNone(reader1._results)
self.assertIsNone(reader2._results)
runner.end()
self.assertEqual(3456, reader1._results)
self.assertEqual(3456, reader2._results)
def test_ProgressMonitor(self):
progressMonitor = MockProgressMonitor()
runner = MPEventLoopRunner(nprocesses = 3, progressMonitor = progressMonitor)
runner.begin()
reader1 = MockReader()
reader2 = MockReader()
eventLoop = MockEventLoopForProgressReporterTest([reader1, reader2])
runner.run(eventLoop)
self.assertIsNone(reader1._results)
self.assertIsNone(reader2._results)
runner.end()
self.assertEqual(3456, reader1._results[0])
self.assertEqual(3456, reader2._results[0])
# assert that the EventLoop received a ProgressReporter
self.assertIsInstance(reader1._results[1], MockProgressReporter)
self.assertIsInstance(reader2._results[1], MockProgressReporter)
##____________________________________________________________________________||
|
from AlphaTwirl.EventReader import MPEventLoopRunner
import unittest
##____________________________________________________________________________||
class MockReader(object):
def __init__(self):
self._results = None
def setResults(self, results):
self._results = results
def results(self):
return self._results
##____________________________________________________________________________||
class MockEventLoop(object):
def __init__(self, readers):
self.readers = readers
def __call__(self, progressReporter):
for reader in self.readers:
reader._results = 3456
return self.readers
##____________________________________________________________________________||
class TestMPEventLoopRunner(unittest.TestCase):
def test_begin_end(self):
runner = MPEventLoopRunner()
runner.begin()
runner.end()
def test_run(self):
runner = MPEventLoopRunner()
runner.begin()
reader1 = MockReader()
reader2 = MockReader()
eventLoop = MockEventLoop([reader1, reader2])
runner.run(eventLoop)
self.assertIsNone(reader1._results)
self.assertIsNone(reader2._results)
runner.end()
self.assertEqual(3456, reader1._results)
self.assertEqual(3456, reader2._results)
##____________________________________________________________________________||
|
bsd-3-clause
|
Python
|
e6e0d96790d71caccb3f00487bfeeddccdc78139
|
Fix variable and return value
|
legco-watch/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,comsaint/legco-watch,legco-watch/legco-watch
|
app/raw/tasks.py
|
app/raw/tasks.py
|
from __future__ import absolute_import
from celery import shared_task
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import log, signals
from scrapy.utils.project import get_project_settings
import os
from raw.scraper.spiders.legco_library import LibraryAgendaSpider
from raw.scraper.spiders.members import LibraryMemberSpider
@shared_task
def run_scraper():
output_name = 'foo.jl'
spider = LibraryAgendaSpider()
settings = get_project_settings()
output_path = os.path.join(settings.get('DATA_DIR_BASE'), 'scrapes', output_name)
settings.overrides['FEED_URI'] = output_path
crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
log.start(loglevel=log.INFO, logstdout=True)
reactor.run()
return output_path
|
from __future__ import absolute_import
from celery import shared_task
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import log, signals
from scrapy.utils.project import get_project_settings
import os
from raw.scraper.spiders.legco_library import LibraryAgendaSpider
from raw.scraper.spiders.members import LibraryMemberSpider
@shared_task
def run_scraper():
output_name = 'foo.jl'
spider = LibraryAgendaSpider()
settings = get_project_settings()
url_path = os.path.join(settings.get('DATA_DIR_BASE'), 'scrapes', output_name)
settings.overrides['FEED_URI'] = url_path
crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
log.start(loglevel=log.INFO, logstdout=True)
reactor.run()
return output_name
|
mit
|
Python
|
6f06728b7ce48084f7ae5d00f96aec086509640f
|
Accelerate the presubmit check.
|
wi-ed/wi
|
presubmit.py
|
presubmit.py
|
#!/usr/bin/env python
# Copyright 2014 Marc-Antoine Ruel. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import os
import subprocess
import sys
import time
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def call(cmd, reldir):
return subprocess.Popen(
cmd, cwd=os.path.join(ROOT_DIR, reldir),
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def errcheck(reldir):
cmd = ['errcheck']
try:
return call(cmd, reldir)
except OSError:
print('Warning: installing github.com/kisielk/errcheck')
out = drain(call(['go', 'get', '-u', 'github.com/kisielk/errcheck'], '.'))
if out:
print out
return call(cmd, reldir)
def drain(proc):
out = proc.communicate()[0]
if proc.returncode:
return out
def main():
start = time.time()
procs = [
call(['go', 'test'], '.'),
call(['go', 'test'], 'wi-plugin'),
call(['go', 'build'], 'wi-plugin-sample'),
#call(['go', 'test'], 'wi-plugin-sample'),
errcheck('.'),
]
failed = False
out = drain(procs.pop(0))
if out:
failed = True
print out
for p in procs:
out = drain(p)
if out:
failed = True
print out
end = time.time()
if failed:
print('Presubmit checks failed in %1.3fs!' % (end-start))
return 1
print('Presubmit checks succeeded in %1.3fs!' % (end-start))
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright 2014 Marc-Antoine Ruel. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import os
import subprocess
import sys
import time
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def call(cmd, reldir):
return subprocess.Popen(
cmd, cwd=os.path.join(ROOT_DIR, reldir),
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def errcheck(reldir):
cmd = ['errcheck']
try:
return call(cmd, reldir)
except OSError:
print('Warning: installing github.com/kisielk/errcheck')
out = drain(call(['go', 'get', '-u', 'github.com/kisielk/errcheck'], '.'))
if out:
print out
return call(cmd, reldir)
def drain(proc):
out = proc.communicate()[0]
if proc.returncode:
return out
def main():
start = time.time()
# Builds all the prerequisite first, this accelerates the following calls.
# TODO(maruel): This *installs* stuff, which is definitely not going to be
# appreciated by folks, so it's likely better to remove this call.
out = drain(call(['go', 'test', '-i'], '.'))
if out:
print out
return 1
procs = [
call(['go', 'test'], '.'),
call(['go', 'test'], 'wi-plugin'),
call(['go', 'build'], 'wi-plugin-sample'),
#call(['go', 'test'], 'wi-plugin-sample'),
errcheck('.'),
]
failed = False
out = drain(procs.pop(0))
if out:
failed = True
print out
for p in procs:
out = drain(p)
if out:
failed = True
print out
end = time.time()
if failed:
print('Presubmit checks failed in %1.3fs!' % (end-start))
return 1
print('Presubmit checks succeeded in %1.3fs!' % (end-start))
return 0
if __name__ == '__main__':
sys.exit(main())
|
apache-2.0
|
Python
|
600e68fc3e4b708090f5c3349d002ea9c3d2fbf8
|
improve examples group
|
rsalmei/clearly
|
tests/examples/user_code/publisher_group.py
|
tests/examples/user_code/publisher_group.py
|
import time
from celery import chord, group
from .worker import function_aggregate, function_test
chord(
group(function_test.s(0, value=i) for i in range(1000)),
function_aggregate.s(from_chord=True)
)()
time.sleep(5)
|
import time
from celery import chord, group
from .tasks import *
chord(
group(function_value.s(0, value=i) for i in range(1000)),
function_any.s(from_chord=True)
)()
time.sleep(5)
|
mit
|
Python
|
902cbd511f2f42948991713cdf0a98c4473c66c0
|
add tqdm to hagrid setup.py
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
packages/hagrid/setup.py
|
packages/hagrid/setup.py
|
# stdlib
import platform
# third party
from setuptools import find_packages
from setuptools import setup
__version__ = "0.2.89"
DATA_FILES = {
"img": ["hagrid/img/*.png"],
}
packages = [
"ascii_magic",
"click",
"cryptography>=37.0.2",
"gitpython",
"jinja2",
"names",
"packaging>=21.3",
"paramiko",
"pyOpenSSL>=22.0.0",
"requests",
"rich",
"setuptools",
"virtualenv-api",
"virtualenv",
"PyYAML",
"tqdm",
]
if platform.system().lower() != "windows":
packages.extend(["ansible", "ansible-core"])
# Pillow binary wheels for Apple Silicon on Python 3.8 don't seem to work well
# try using Python 3.9+ for HAGrid on Apple Silicon
setup(
name="hagrid",
description="Happy Automation for Grid",
long_description="HAGrid is the swiss army knife of OpenMined's PySyft and PyGrid.",
long_description_content_type="text/plain",
version=__version__,
author="Andrew Trask <[email protected]>",
packages=find_packages(),
package_data=DATA_FILES,
install_requires=packages,
include_package_data=True,
entry_points={"console_scripts": ["hagrid = hagrid.cli:cli"]},
)
|
# stdlib
import platform
# third party
from setuptools import find_packages
from setuptools import setup
__version__ = "0.2.89"
DATA_FILES = {
"img": ["hagrid/img/*.png"],
}
packages = [
"ascii_magic",
"click",
"cryptography>=37.0.2",
"gitpython",
"jinja2",
"names",
"packaging>=21.3",
"paramiko",
"pyOpenSSL>=22.0.0",
"requests",
"rich",
"setuptools",
"virtualenv-api",
"virtualenv",
"PyYAML",
]
if platform.system().lower() != "windows":
packages.extend(["ansible", "ansible-core"])
# Pillow binary wheels for Apple Silicon on Python 3.8 don't seem to work well
# try using Python 3.9+ for HAGrid on Apple Silicon
setup(
name="hagrid",
description="Happy Automation for Grid",
long_description="HAGrid is the swiss army knife of OpenMined's PySyft and PyGrid.",
long_description_content_type="text/plain",
version=__version__,
author="Andrew Trask <[email protected]>",
packages=find_packages(),
package_data=DATA_FILES,
install_requires=packages,
include_package_data=True,
entry_points={"console_scripts": ["hagrid = hagrid.cli:cli"]},
)
|
apache-2.0
|
Python
|
10ba0ea095e4765a2d60751371f7dca8e36e2d18
|
Fix infinite loop in grit headers clobbering script.
|
mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,jaruba/chromium.src,Just-D/chromium-1,Just-D/chromium-1,chuan9/chromium-crosswalk,keishi/chromium,anirudhSK/chromium,Just-D/chromium-1,ltilve/chromium,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,patrickm/chromium.src,rogerwang/chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,rogerwang/chromium,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,M4sse/chromium.src,timopulkkinen/BubbleFish,jaruba/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,bright-sparks/chromium-spacewalk,ltilve/chromium,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,robclark/chromium,bright-sparks/chromium-spacewalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,jaruba/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,robclark/chromium,fujunwei/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,patrickm/chromium.src,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,dushu1203/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,keishi/chromium,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,rogerwang/chromium,M4sse/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,robclark/chromium,anirudhSK/chromium,keishi/chromium,zcbenz/cefode-chromium,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,Jonekee/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,nacl-webkit/chrome_deps,ltilve/chromium,keishi/chromium,hujiajie/pa-chromium,markYoungH/chromium.src,ChromiumWebApps/chromium,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,patrickm/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,keishi/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,rogerwang/chromium,ChromiumWebApps/chromium,Chilledheart/chromium,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,keishi/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,anirudhSK/chromium,patrickm/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,dednal/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,keishi/chromium,Fireblend/chromium-crosswalk,keishi/chromium,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,robclark/chromium,dushu1203/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,keishi/chromium,jaruba/chromium.src,littlstar/chromium.src,ltilve/chromium,zcbenz/cefode-chromium,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,ondra-novak/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,rogerwang/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,M4sse/chromium.src,Chilledheart/chromium,robclark/chromium,Just-D/chromium-1,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,keishi/chromium,Jonekee/chromium.src,patrickm/chromium.src,robclark/chromium,Jonekee/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,keishi/chromium,markYoungH/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,dednal/chromium.src,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,dednal/chromium.src,Chilledheart/chromium,dednal/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,hujiajie/pa-chromium,ChromiumWebApps/chromium,robclark/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,robclark/chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,robclark/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,timopulkkinen/BubbleFish,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,rogerwang/chromium,patrickm/chromium.src,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,rogerwang/chromium,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,fujunwei/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,robclark/chromium,junmin-zhu/chromium-rivertrail,dednal/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,hujiajie/pa-chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,rogerwang/chromium,mogoweb/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish
|
build/win/clobber_generated_headers.py
|
build/win/clobber_generated_headers.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script helps workaround IncrediBuild problem on Windows.
# See http://crbug.com/17706.
import os
import sys
_SRC_PATH = os.path.join(os.path.dirname(__file__), '..', '..')
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'grit'))
import grit.grd_reader
# We need to apply the workaround only on Windows.
if os.name != 'nt':
sys.exit(0)
def total_split(path):
components = []
while path:
head, tail = os.path.split(path)
if not tail:
break
components.append(tail)
path = head
return list(reversed(components))
for path in sys.argv[1:]:
path = os.path.join('src', path)
path_components = total_split(path)
root = grit.grd_reader.Parse(path)
output_files = [node.GetOutputFilename() for node in root.GetOutputFiles()]
output_headers = [file for file in output_files if file.endswith('.h')]
for build_type in ('Debug', 'Release'):
build_path = os.path.join(_SRC_PATH, 'chrome', build_type)
# We guess target file output based on path of the grd file (the first
# path component after 'src').
intermediate_path = os.path.join(build_path, 'obj',
'global_intermediate', path_components[1])
for header in output_headers:
full_path = os.path.join(intermediate_path, header)
try:
os.remove(full_path)
print 'Clobbered ' + full_path
except OSError:
print 'Could not remove ' + full_path + '. Continuing.'
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script helps workaround IncrediBuild problem on Windows.
# See http://crbug.com/17706.
import os
import sys
_SRC_PATH = os.path.join(os.path.dirname(__file__), '..', '..')
sys.path.append(os.path.join(_SRC_PATH, 'tools', 'grit'))
import grit.grd_reader
# We need to apply the workaround only on Windows.
if os.name != 'nt':
sys.exit(0)
def total_split(path):
components = []
while path:
head, tail = os.path.split(path)
components.append(tail)
path = head
return list(reversed(components))
for path in sys.argv[1:]:
path = os.path.join('src', path)
path_components = total_split(path)
root = grit.grd_reader.Parse(path)
output_files = [node.GetOutputFilename() for node in root.GetOutputFiles()]
output_headers = [file for file in output_files if file.endswith('.h')]
for build_type in ('Debug', 'Release'):
build_path = os.path.join(_SRC_PATH, 'chrome', build_type)
# We guess target file output based on path of the grd file (the first
# path component after 'src').
intermediate_path = os.path.join(build_path, 'obj',
'global_intermediate', path_components[1])
for header in output_headers:
full_path = os.path.join(intermediate_path, header)
try:
os.remove(full_path)
print 'Clobbered ' + full_path
except OSError:
print 'Could not remove ' + full_path + '. Continuing.'
|
bsd-3-clause
|
Python
|
e71870736959efcde2188bdcbd89838b67ca8582
|
Add AbstractSanitizer/AbstractValidator class to import path
|
thombashi/pathvalidate
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from .__version__ import __author__, __copyright__, __email__, __license__, __version__
from ._base import AbstractSanitizer, AbstractValidator
from ._common import (
Platform,
ascii_symbols,
normalize_platform,
replace_ansi_escape,
replace_unprintable_char,
unprintable_ascii_chars,
validate_null_string,
validate_pathtype,
)
from ._filename import FileNameSanitizer, is_valid_filename, sanitize_filename, validate_filename
from ._filepath import (
FilePathSanitizer,
is_valid_filepath,
sanitize_file_path,
sanitize_filepath,
validate_file_path,
validate_filepath,
)
from ._ltsv import sanitize_ltsv_label, validate_ltsv_label
from ._symbol import replace_symbol, validate_symbol
from .error import (
ErrorReason,
InvalidCharError,
InvalidLengthError,
InvalidReservedNameError,
NullNameError,
ReservedNameError,
ValidationError,
ValidReservedNameError,
)
|
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from .__version__ import __author__, __copyright__, __email__, __license__, __version__
from ._common import (
Platform,
ascii_symbols,
normalize_platform,
replace_ansi_escape,
replace_unprintable_char,
unprintable_ascii_chars,
validate_null_string,
validate_pathtype,
)
from ._filename import FileNameSanitizer, is_valid_filename, sanitize_filename, validate_filename
from ._filepath import (
FilePathSanitizer,
is_valid_filepath,
sanitize_file_path,
sanitize_filepath,
validate_file_path,
validate_filepath,
)
from ._ltsv import sanitize_ltsv_label, validate_ltsv_label
from ._symbol import replace_symbol, validate_symbol
from .error import (
ErrorReason,
InvalidCharError,
InvalidLengthError,
InvalidReservedNameError,
NullNameError,
ReservedNameError,
ValidationError,
ValidReservedNameError,
)
|
mit
|
Python
|
395617afca4d242de12e2a75a3ae7d2a258f75a7
|
use template string
|
andela-sjames/paystack-python
|
paystackapi/constants.py
|
paystackapi/constants.py
|
"""Script used to define constants used across codebase."""
PAYSTACK_SECRET_KEY = 'sk_test_0a246ef179dc841f42d20959bebdd790f69605d8'
HEADERS = {'Authorization': 'Bearer {}'}
API_URL = 'https://api.paystack.co/'
|
"""Script used to define constants used across codebase."""
PAYSTACK_SECRET_KEY = 'sk_test_0a246ef179dc841f42d20959bebdd790f69605d8'
HEADERS = {'Authorization': 'Bearer ' + PAYSTACK_SECRET_KEY}
API_URL = 'https://api.paystack.co/'
|
mit
|
Python
|
39beb9cbb3d0158dab58787cbe95651c8ec66db9
|
Bump up minor version.
|
pgexperts/patroni,zalando/patroni,pgexperts/patroni,zalando/patroni
|
patroni/version.py
|
patroni/version.py
|
__version__ = '0.76'
|
__version__ = '0.75'
|
mit
|
Python
|
d0568b2c132ebe2cdf1f656ee96442a0888257cd
|
add NSecurity class
|
MBALearnsToCode/CorpFin,MBALearnsToCode/FinSymPy,MBALearnsToCode/FinSymPy,MBALearnsToCode/CorpFin
|
CorpFin/Security.py
|
CorpFin/Security.py
|
from HelpyFuncs.SymPy import sympy_theanify
class Security:
def __init__(self, label='', bs_val=0., val=0.):
self.label = label
self.bs_val_expr = bs_val
self.bs_val = sympy_theanify(bs_val)
self.val_expr = val
self.val = sympy_theanify(val)
def __call__(self, **kwargs):
if self.label:
s = ' "%s"' % self.label
else:
s = ''
return 'Security' + s + ': BS Val = %.3g, Val = %.3g' % (self.bs_val(**kwargs), self.val(**kwargs))
DOLLAR = Security(label='$', bs_val=1., val=1.)
class NSecurity:
def __init__(self, n=1, security=DOLLAR):
self.n = n
self.security = security
|
from HelpyFuncs.SymPy import sympy_theanify
class Security:
def __init__(self, label='', bs_val=0., val=0.):
self.label = label
self.bs_val_expr = bs_val
self.bs_val = sympy_theanify(bs_val)
self.val_expr = val
self.val = sympy_theanify(val)
def __call__(self, **kwargs):
if self.label:
s = ' "%s"' % self.label
else:
s = ''
return 'Security' + s + ': BS Val = %.3g, Val = %.3g' % (self.bs_val(**kwargs), self.val(**kwargs))
DOLLAR = Security(label='$', bs_val=1., val=1.)
|
mit
|
Python
|
99818f02ebc46debe349a6c1b6bba70be6e04968
|
Update error message for no plugins
|
oew1v07/scikit-image,robintw/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,oew1v07/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,ajaybhat/scikit-image,chriscrosscutler/scikit-image,newville/scikit-image,blink1073/scikit-image,Britefury/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,GaZ3ll3/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,michaelpacer/scikit-image,emon10005/scikit-image,youprofit/scikit-image,ClinicalGraphics/scikit-image,ofgulban/scikit-image,newville/scikit-image,Britefury/scikit-image,robintw/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,paalge/scikit-image,emon10005/scikit-image,GaZ3ll3/scikit-image,michaelaye/scikit-image,Hiyorimi/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,bsipocz/scikit-image,WarrenWeckesser/scikits-image,Midafi/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,rjeli/scikit-image,pratapvardhan/scikit-image,youprofit/scikit-image,rjeli/scikit-image,warmspringwinds/scikit-image,michaelpacer/scikit-image
|
skimage/io/_plugins/null_plugin.py
|
skimage/io/_plugins/null_plugin.py
|
__all__ = ['imshow', 'imread', 'imsave', '_app_show']
import warnings
message = '''\
No plugin has been loaded. Please refer to the docstring for ``skimage.io``
for a list of available plugins. You may specify a plugin explicitly as
an argument to ``imread``, e.g. ``imread("image.jpg", plugin='pil')``.
'''
def imshow(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imread(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imsave(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
_app_show = imshow
|
__all__ = ['imshow', 'imread', 'imsave', '_app_show']
import warnings
message = '''\
No plugin has been loaded. Please refer to
skimage.io.plugins()
for a list of available plugins.'''
def imshow(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imread(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
def imsave(*args, **kwargs):
warnings.warn(RuntimeWarning(message))
_app_show = imshow
|
bsd-3-clause
|
Python
|
c9170cb4c0d63a6dc75f0fa7ca76faa688a1678a
|
Make tags optional
|
techtonik/blog.pinaxproject.com,pinax/blog.pinaxproject.com,pinax/blog.pinaxproject.com,pinax/blog.pinaxproject.com,techtonik/blog.pinaxproject.com
|
ppb/forms.py
|
ppb/forms.py
|
from pinax.blog.forms import FIELDS, AdminPostForm
from pinax.blog.models import Post
from taggit.forms import TagField
FIELDS.append("tags")
class AdminPostTagsForm(AdminPostForm):
tags = TagField(required=False)
class Meta:
model = Post
fields = FIELDS
|
from pinax.blog.forms import FIELDS, AdminPostForm
from pinax.blog.models import Post
from taggit.forms import TagField
FIELDS.append("tags")
class AdminPostTagsForm(AdminPostForm):
tags = TagField()
class Meta:
model = Post
fields = FIELDS
|
mit
|
Python
|
9ee9ba34e447e99c868fcb43d40ce905cebf5fb9
|
Add list and define functions.
|
maxdeviant/noah
|
noah/noah.py
|
noah/noah.py
|
import json
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
if __name__ == '__main__':
main()
|
import json
class Noah(object):
pass
|
mit
|
Python
|
0dd2bd0a8d2b041672afdf66666df63e2dd1a044
|
Add author friends url.
|
CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project
|
rest/urls.py
|
rest/urls.py
|
# Author: Braedy Kuzma
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends')
]
|
# Author: Braedy Kuzma
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments')
]
|
apache-2.0
|
Python
|
6fce2e52715f1a77edb19eca8b1133875fff3d34
|
Set HearingViewSet read Only
|
vikoivun/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi
|
kk/views/hearing.py
|
kk/views/hearing.py
|
import django_filters
from rest_framework import viewsets
from rest_framework import serializers
from rest_framework import filters
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from kk.models import Hearing
from .image import ImageFieldSerializer, ImageSerializer
class HearingFilter(django_filters.FilterSet):
next_closing = django_filters.DateTimeFilter(name='close_at', lookup_type='gt')
class Meta:
model = Hearing
fields = ['next_closing', ]
# Serializer for labels. Get label names instead of IDs.
class LabelSerializer(serializers.RelatedField):
def to_representation(self, value):
return value.label
class HearingSerializer(serializers.ModelSerializer):
labels = LabelSerializer(many=True, read_only=True)
images = ImageFieldSerializer(many=True, read_only=True)
class Meta:
model = Hearing
fields = ['abstract', 'heading', 'borough', 'n_comments', 'labels', 'close_at', 'created_at',
'latitude', 'longitude', 'servicemap_url', 'images']
class HearingViewSet(viewsets.ReadOnlyModelViewSet):
"""
API endpoint for hearings.
"""
queryset = Hearing.objects.all()
serializer_class = HearingSerializer
filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter)
#ordering_fields = ('created_at',)
#ordering = ('-created_at',)
#filter_class = HearingFilter
def get_queryset(self):
next_closing = self.request.query_params.get('next_closing', None)
if next_closing is not None:
return self.queryset.filter(close_at__gt=next_closing).order_by('close_at')[:1]
return self.queryset.order_by('-created_at')
@detail_route(methods=['get'])
def images(self, request, pk=None):
hearing = self.get_object()
images = hearing.images.all()
page = self.paginate_queryset(images)
if page is not None:
serializer = ImageSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = ImageSerializer(images, many=True)
return Response(serializer.data)
# temporary for query debug purpose
def _list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
print(queryset.query)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
|
import django_filters
from rest_framework import viewsets
from rest_framework import serializers
from rest_framework import filters
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from kk.models import Hearing
from .image import ImageFieldSerializer, ImageSerializer
class HearingFilter(django_filters.FilterSet):
next_closing = django_filters.DateTimeFilter(name='close_at', lookup_type='gt')
class Meta:
model = Hearing
fields = ['next_closing', ]
# Serializer for labels. Get label names instead of IDs.
class LabelSerializer(serializers.RelatedField):
def to_representation(self, value):
return value.label
class HearingSerializer(serializers.ModelSerializer):
labels = LabelSerializer(many=True, read_only=True)
images = ImageFieldSerializer(many=True, read_only=True)
class Meta:
model = Hearing
fields = ['abstract', 'heading', 'borough', 'n_comments', 'labels', 'close_at', 'created_at',
'latitude', 'longitude', 'servicemap_url', 'images']
class HearingViewSet(viewsets.ModelViewSet):
"""
API endpoint for hearings.
"""
queryset = Hearing.objects.all()
serializer_class = HearingSerializer
filter_backends = (filters.DjangoFilterBackend, filters.OrderingFilter)
#ordering_fields = ('created_at',)
#ordering = ('-created_at',)
#filter_class = HearingFilter
def get_queryset(self):
next_closing = self.request.query_params.get('next_closing', None)
if next_closing is not None:
return self.queryset.filter(close_at__gt=next_closing).order_by('close_at')[:1]
return self.queryset.order_by('-created_at')
@detail_route(methods=['get'])
def images(self, request, pk=None):
hearing = self.get_object()
images = hearing.images.all()
page = self.paginate_queryset(images)
if page is not None:
serializer = ImageSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = ImageSerializer(images, many=True)
return Response(serializer.data)
# temporary for query debug purpose
def _list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
print(queryset.query)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
|
mit
|
Python
|
829ddcdf0ceff4f43cf871b7438170d4e4971a70
|
Fix cyclomatic complexity problem in exception handling
|
Administrate/surveymonkey
|
surveymonkey/exceptions.py
|
surveymonkey/exceptions.py
|
# -*- coding: utf-8 -*-
class SurveyMonkeyException(Exception):
def __init__(self, response):
data = response.json()
super(SurveyMonkeyException, self).__init__(data["error"]["message"])
self.status_code = response.status_code
self.error_code = data["error"]["id"]
class SurveyMonkeyBadRequest(SurveyMonkeyException):
pass
class SurveyMonkeyAuthorizationError(SurveyMonkeyException):
pass
class SurveyMonkeyPermissionError(SurveyMonkeyException):
pass
class SurveyMonkeyResourceNotFound(SurveyMonkeyException):
pass
class SurveyMonkeyResourceConflict(SurveyMonkeyException):
pass
class SurveyMonkeyRequestEntityTooLarge(SurveyMonkeyException):
pass
class SurveyMonkeyInternalServerError(SurveyMonkeyException):
pass
class SurveyMonkeyUserSoftDeleted(SurveyMonkeyException):
pass
class SurveyMonkeyUserDeleted(SurveyMonkeyException):
pass
def response_raises(response):
def _not_found(response):
if response.json()["error"]["id"] == "1052":
return SurveyMonkeyUserSoftDeleted
else:
return SurveyMonkeyResourceNotFound
def _client_error(code):
return {
400: SurveyMonkeyBadRequest,
401: SurveyMonkeyAuthorizationError,
403: SurveyMonkeyPermissionError,
409: SurveyMonkeyResourceConflict,
413: SurveyMonkeyRequestEntityTooLarge,
410: SurveyMonkeyUserDeleted
}.get(code)
def _server_error(code):
return {
500: SurveyMonkeyInternalServerError,
503: SurveyMonkeyInternalServerError
}.get(code)
code = response.status_code
if code == 200:
return
elif code == 404:
exception = _not_found(response)
elif 400 <= code <= 499:
exception = _client_error(code)
elif 500 <= code <= 599:
exception = _server_error(code)
raise exception(response)
|
# -*- coding: utf-8 -*-
class SurveyMonkeyException(Exception):
def __init__(self, response):
data = response.json()
super(SurveyMonkeyException, self).__init__(data["error"]["message"])
self.status_code = response.status_code
self.error_code = data["error"]["id"]
class SurveyMonkeyBadRequest(SurveyMonkeyException):
pass
class SurveyMonkeyAuthorizationError(SurveyMonkeyException):
pass
class SurveyMonkeyPermissionError(SurveyMonkeyException):
pass
class SurveyMonkeyResourceNotFound(SurveyMonkeyException):
pass
class SurveyMonkeyResourceConflict(SurveyMonkeyException):
pass
class SurveyMonkeyRequestEntityTooLarge(SurveyMonkeyException):
pass
class SurveyMonkeyInternalServerError(SurveyMonkeyException):
pass
class SurveyMonkeyUserSoftDeleted(SurveyMonkeyException):
pass
class SurveyMonkeyUserDeleted(SurveyMonkeyException):
pass
def response_raises(response):
if response.status_code == 200:
return
elif response.status_code == 400:
raise SurveyMonkeyBadRequest(response)
elif response.status_code == 401:
raise SurveyMonkeyAuthorizationError(response)
elif response.status_code == 403:
raise SurveyMonkeyPermissionError(response)
elif response.status_code == 404:
if response.json()["error"]["id"] == "1052":
raise SurveyMonkeyUserSoftDeleted(response)
else:
raise SurveyMonkeyResourceNotFound(response)
elif response.status_code == 409:
raise SurveyMonkeyResourceConflict(response)
elif response.status_code == 413:
raise SurveyMonkeyRequestEntityTooLarge(response)
elif response.status_code in [500, 503]:
raise SurveyMonkeyInternalServerError(response)
elif response.status_code == 410:
raise SurveyMonkeyUserDeleted(response)
|
mit
|
Python
|
954c06d2715090e15dbe9a76dffb0eeabda06a48
|
make flake8 happy
|
INCF/pybids
|
bids/grabbids/__init__.py
|
bids/grabbids/__init__.py
|
from .bids_layout import BIDSLayout
__all__ = ["BIDSLayout"]
|
__all__ = ["bids_layout"]
|
mit
|
Python
|
93eb1fb058629f25f919a9c5f3647702c2767b22
|
test parsing nested rules and toplevel imports
|
nivertech/peru,scalp42/peru,scalp42/peru,enzochiau/peru,olson-sean-k/peru,oconnor663/peru,ierceg/peru,oconnor663/peru,buildinspace/peru,ierceg/peru,enzochiau/peru,nivertech/peru,olson-sean-k/peru,buildinspace/peru
|
peru/test/test_parser.py
|
peru/test/test_parser.py
|
from textwrap import dedent
import unittest
from peru.parser import parse_string
from peru.remote_module import RemoteModule
from peru.rule import Rule
class ParserTest(unittest.TestCase):
def test_parse_empty_file(self):
scope, local_module = parse_string("")
self.assertDictEqual(scope, {})
self.assertDictEqual(local_module.imports, {})
def test_parse_rule(self):
input = dedent("""\
rule foo:
build: echo hi
export: out/
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
rule = scope["foo"]
self.assertIsInstance(rule, Rule)
self.assertEqual(rule.name, "foo")
self.assertEqual(rule.build_command, "echo hi")
self.assertEqual(rule.export, "out/")
def test_parse_module(self):
input = dedent("""\
git module foo:
url: http://www.example.com/
rev: abcdefg
imports:
wham: bam/
thank: you/maam
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
module = scope["foo"]
self.assertIsInstance(module, RemoteModule)
self.assertEqual(module.name, "foo")
self.assertDictEqual(module.imports,
{"wham": "bam/",
"thank": "you/maam"})
self.assertDictEqual(module.plugin_fields,
{"url": "http://www.example.com/",
"rev": "abcdefg"})
def test_parse_nested_rule(self):
input = dedent("""\
git module bar:
rule baz:
""")
scope, local_module = parse_string(input)
self.assertIn("bar", scope)
module = scope["bar"]
self.assertIsInstance(module, RemoteModule)
self.assertIn("bar.baz", scope)
rule = scope["bar.baz"]
self.assertIsInstance(rule, Rule)
def test_parse_toplevel_imports(self):
input = dedent("""\
imports:
foo: bar/
""")
scope, local_module = parse_string(input)
self.assertDictEqual(scope, {})
self.assertDictEqual(local_module.imports, {"foo": "bar/"})
|
from textwrap import dedent
import unittest
from peru.parser import parse_string
from peru.remote_module import RemoteModule
from peru.rule import Rule
class ParserTest(unittest.TestCase):
def test_parse_empty_file(self):
scope, local_module = parse_string("")
self.assertDictEqual(scope, {})
self.assertDictEqual(local_module.imports, {})
def test_parse_rule(self):
input = dedent("""\
rule foo:
build: echo hi
export: out/
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
rule = scope["foo"]
self.assertIsInstance(rule, Rule)
self.assertEqual(rule.name, "foo")
self.assertEqual(rule.build_command, "echo hi")
self.assertEqual(rule.export, "out/")
def test_parse_module(self):
input = dedent("""\
git module foo:
url: http://www.example.com/
rev: abcdefg
imports:
wham: bam/
thank: you/maam
""")
scope, local_module = parse_string(input)
self.assertIn("foo", scope)
module = scope["foo"]
self.assertIsInstance(module, RemoteModule)
self.assertEqual(module.name, "foo")
self.assertDictEqual(module.imports,
{"wham": "bam/",
"thank": "you/maam"})
self.assertDictEqual(module.plugin_fields,
{"url": "http://www.example.com/",
"rev": "abcdefg"})
|
mit
|
Python
|
8bb77e1cf4c5ec284641a178a106300db2f5575d
|
Use UTC
|
ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints
|
petitions/views.py
|
petitions/views.py
|
from django.shortcuts import render, get_object_or_404, render, redirect
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from django.db.models import F
from datetime import datetime
from petitions.models import Petition
from profile.models import Profile
def petition(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
author = Profile.objects.get(petitions_created=petition)
user = request.user
curr_user_signed = user.partner_set.filter(petitions_signed=petition).exists()
users_signed = Profile.objects.filter(petitions_signed=petition)
data_object = {
'petition': petition,
'current_user': user,
'curr_user_signed': curr_user_signed,
'users_signed': users_signed
}
return render(request, 'petition/'+str(petition_id), data_object)
@login_required
@require_POST
def petition_sign(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
user = request.user
user.profile.petitions_signed.add(petition)
user.save()
petition.update(signatures=F('signatures')+1)
petition.update(last_signed=datetime.utcnow())
petition.save()
return redirect('petition/sign/' + str(petition_id))
# HELPER FUNCTIONS #
# SORTING
def most_recent():
return Petition.objects.all() \
.filter(expires__gt=datetime.utcnow()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-created_at')
def most_signatures():
return Petition.objects.all() \
.filter(expires__gt=datetime.utcnow()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-signatures')
def last_signed():
return Petition.objects.all() \
.filter(expires_gt=datetime.utcnow()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-last_signed')
|
from django.shortcuts import render, get_object_or_404, render, redirect
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from django.db.models import F
from datetime import datetime
from petitions.models import Petition
from profile.models import Profile
def petition(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
author = Profile.objects.get(petitions_created=petition)
user = request.user
curr_user_signed = user.partner_set.filter(petitions_signed=petition).exists()
users_signed = Profile.objects.filter(petitions_signed=petition)
data_object = {
'petition': petition,
'current_user': user,
'curr_user_signed': curr_user_signed,
'users_signed': users_signed
}
return render(request, '', data_object)
@login_required
@require_POST
def petition_sign(request, petition_id):
petition = get_object_or_404(Petition, pk=petition_id)
user = request.user
user.profile.petitions_signed.add(petition)
user.save()
petition.update(signatures=F('signatures')+1)
petition.update(last_signed=datetime.now())
petition.save()
return redirect('petition/' + str(petition_id))
# HELPER FUNCTIONS #
# SORTING
def most_recent():
return Petition.objects.all() \
.filter(expires__gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-created_at')
def most_signatures():
return Petition.objects.all() \
.filter(expires__gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-signatures')
def last_signed():
return Petition.objects.all() \
.filter(expires_gt=datetime.now()) \
.exclude(has_response=True) \
.filter(published=True) \
.order_by('-last_signed')
|
apache-2.0
|
Python
|
03b17837ed2c88692f1b99ec5b9b477f86fdddb6
|
Update version to 2.2b4-dev
|
CatoTH/OpenSlides,tsiegleauq/OpenSlides,boehlke/OpenSlides,normanjaeckel/OpenSlides,ostcar/OpenSlides,jwinzer/OpenSlides,jwinzer/OpenSlides,ostcar/OpenSlides,emanuelschuetze/OpenSlides,normanjaeckel/OpenSlides,emanuelschuetze/OpenSlides,CatoTH/OpenSlides,OpenSlides/OpenSlides,boehlke/OpenSlides,jwinzer/OpenSlides,jwinzer/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides,FinnStutzenstein/OpenSlides,CatoTH/OpenSlides,emanuelschuetze/OpenSlides,boehlke/OpenSlides,FinnStutzenstein/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,normanjaeckel/OpenSlides,emanuelschuetze/OpenSlides,FinnStutzenstein/OpenSlides,FinnStutzenstein/OpenSlides,boehlke/OpenSlides,OpenSlides/OpenSlides,normanjaeckel/OpenSlides,tsiegleauq/OpenSlides
|
openslides/__init__.py
|
openslides/__init__.py
|
__author__ = 'OpenSlides Team <[email protected]>'
__description__ = 'Presentation and assembly system'
__version__ = '2.2b4-dev'
__license__ = 'MIT'
__url__ = 'https://openslides.org'
args = None
|
__author__ = 'OpenSlides Team <[email protected]>'
__description__ = 'Presentation and assembly system'
__version__ = '2.2b3'
__license__ = 'MIT'
__url__ = 'https://openslides.org'
args = None
|
mit
|
Python
|
356fdc5d69dadbddeb7cd064593ab31b7993a0bc
|
Use shared helper code for palevoccbot.
|
abusesa/abusehelper
|
abusehelper/contrib/abusech/palevoccbot.py
|
abusehelper/contrib/abusech/palevoccbot.py
|
"""
abuse.ch Palevo C&C feed RSS bot.
Maintainer: Lari Huttunen <[email protected]>
"""
from abusehelper.core import bot
from . import is_ip, split_description, AbuseCHFeedBot
class PalevoCcBot(AbuseCHFeedBot):
feeds = bot.ListParam(default=["https://palevotracker.abuse.ch/?rssfeed"])
# If treat_as_dns_source is set, the feed ip is dropped.
treat_as_dns_source = bot.BoolParam()
def parse_title(self, title):
pieces = title.split(None, 1)
host = pieces[0]
if is_ip(host):
yield "ip", host
else:
yield "host", host
if len(pieces) > 1:
yield "source time", pieces[1]
def parse_description(self, description):
for key, value in split_description(description):
if key == "status":
yield key, value
elif key == "sbl" and value.lower() != "not listed":
yield key + " id", value
elif key == "ip address" and not self.treat_as_dns_source:
yield "ip", value
if __name__ == "__main__":
PalevoCcBot.from_command_line().execute()
|
"""
abuse.ch Palevo C&C feed RSS bot.
Maintainer: Lari Huttunen <[email protected]>
"""
from abusehelper.core import bot, events
from abusehelper.contrib.rssbot.rssbot import RSSBot
from . import is_ip
class PalevoCcBot(RSSBot):
feeds = bot.ListParam(default=["https://palevotracker.abuse.ch/?rssfeed"])
# If treat_as_dns_source is set, the feed ip is dropped.
treat_as_dns_source = bot.BoolParam()
def create_event(self, **keys):
event = events.Event()
# handle link data
link = keys.get("link", None)
if link:
event.add("description url", link)
# handle title data
title = keys.get("title", None)
if title:
host, date = title.split()
if is_ip(host):
event.add("ip", host)
else:
event.add("host", host)
event.add("source time", date)
# handle description data
description = keys.get("description", None)
if description:
for part in description.split(","):
pair = part.split(":", 1)
if len(pair) < 2:
continue
key = pair[0].strip()
value = pair[1].strip()
if not key or not value:
continue
if key == "Status":
event.add(key.lower(), value)
elif key == "SBL" and value != "Not listed":
key = key.lower() + " id"
event.add(key, value)
elif key == "IP address":
if not self.treat_as_dns_source:
event.add("ip", value)
event.add("feed", "abuse.ch")
event.add("malware", "Palevo")
event.add("type", "c&c")
return event
if __name__ == "__main__":
PalevoCcBot.from_command_line().execute()
|
mit
|
Python
|
490230242d51d23650406085a7af92dfbb14c16d
|
Use shop ID from order
|
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
byceps/blueprints/shop/orders/views.py
|
byceps/blueprints/shop/orders/views.py
|
"""
byceps.blueprints.shop.orders.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, g
from ....services.party import service as party_service
from ....services.shop.order import service as order_service
from ....services.shop.shop import service as shop_service
from ....services.snippet.transfer.models import Scope
from ....util.framework.blueprint import create_blueprint
from ....util.framework.templating import templated
from ...authentication.decorators import login_required
from ...snippet.templating import render_snippet_as_partial
blueprint = create_blueprint('shop_orders', __name__)
@blueprint.route('')
@login_required
@templated
def index():
"""List orders placed by the current user for the current party."""
current_user = g.current_user
party = party_service.find_party(g.party_id)
shop = shop_service.find_shop_for_party(party.id)
if shop:
orders = order_service.get_orders_placed_by_user_for_shop(
current_user.id, shop.id)
else:
orders = []
return {
'party_title': party.title,
'orders': orders,
}
@blueprint.route('/<uuid:order_id>')
@login_required
@templated
def view(order_id):
"""Show a single order (if it belongs to the current user and party)."""
current_user = g.current_user
order = order_service.find_order_with_details(order_id)
if order is None:
abort(404)
if order.placed_by_id != current_user.id:
# Order was not placed by the current user.
abort(404)
shop = shop_service.get_shop(order.shop_id)
if shop.party_id != g.party_id:
# Order does not belong to the current party.
abort(404)
template_context = {
'order': order,
}
if order.is_open:
template_context['payment_instructions'] \
= _get_payment_instructions(order)
return template_context
def _get_payment_instructions(order):
scope = Scope('shop', str(order.shop_id))
context = {'order_number': order.order_number}
return render_snippet_as_partial('payment_instructions', scope=scope,
context=context)
|
"""
byceps.blueprints.shop.orders.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, g
from ....services.party import service as party_service
from ....services.shop.order import service as order_service
from ....services.shop.shop import service as shop_service
from ....services.snippet.transfer.models import Scope
from ....util.framework.blueprint import create_blueprint
from ....util.framework.templating import templated
from ...authentication.decorators import login_required
from ...snippet.templating import render_snippet_as_partial
blueprint = create_blueprint('shop_orders', __name__)
@blueprint.route('')
@login_required
@templated
def index():
"""List orders placed by the current user for the current party."""
current_user = g.current_user
party = party_service.find_party(g.party_id)
shop = shop_service.find_shop_for_party(party.id)
if shop:
orders = order_service.get_orders_placed_by_user_for_shop(
current_user.id, shop.id)
else:
orders = []
return {
'party_title': party.title,
'orders': orders,
}
@blueprint.route('/<uuid:order_id>')
@login_required
@templated
def view(order_id):
"""Show a single order (if it belongs to the current user and party)."""
current_user = g.current_user
order = order_service.find_order_with_details(order_id)
if order is None:
abort(404)
if order.placed_by_id != current_user.id:
# Order was not placed by the current user.
abort(404)
shop = shop_service.get_shop(order.shop_id)
if shop.party_id != g.party_id:
# Order does not belong to the current party.
abort(404)
template_context = {
'order': order,
}
if order.is_open:
template_context['payment_instructions'] \
= _get_payment_instructions(shop.id, order.order_number)
return template_context
def _get_payment_instructions(shop_id, order_number):
scope = Scope('shop', str(shop_id))
context = {'order_number': order_number}
return render_snippet_as_partial('payment_instructions', scope=scope,
context=context)
|
bsd-3-clause
|
Python
|
652711e9a4884a31be74df6ae791e47dcd401871
|
remove deprecated test suite declarations
|
ddico/account-financial-tools,ddico/account-financial-tools
|
account_partner_required/tests/__init__.py
|
account_partner_required/tests/__init__.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account partner required module for OpenERP
# Copyright (C) 2014 Acsone (http://acsone.eu).
# @author Stéphane Bidoul <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_account_partner_required
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account partner required module for OpenERP
# Copyright (C) 2014 Acsone (http://acsone.eu).
# @author Stéphane Bidoul <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_account_partner_required
fast_suite = [
]
checks = [
test_account_partner_required,
]
|
agpl-3.0
|
Python
|
55983401814bc0e7158d213885ebdfdbc7e02e9b
|
Add dependency on the requests module and refactor
|
loarabia/DeployUtil
|
DeployUtil/authentication.py
|
DeployUtil/authentication.py
|
import requests
import json
#TODO: give an indicator of success
#TODO: handle errors a bit better.
def do_pair(ip, pin, **_args):
# IF YOU DON'T DO THIS OVER HTTPS YOU WILL GET 308s to goto HTTPS
# But we cannot verify our HTTPS cert yet because we cannot get it off
# of all devices.
# If the tooling gets smarter about what its talking to, then we can
# make an educated choice.
scheme = 'https://'
port = ''
api = '/api/authorize/pair?pin={pin}&persistent=0'
request_url = scheme + ip + port + api.format_map({'pin':pin})
with requests.Session() as session:
response = session.post(request_url, verify=False)
cookie_filename = 'deployUtil.cookies'
cookies = requests.utils.dict_from_cookiejar(response.cookies)
with open(cookie_filename,'w') as cookie_file:
json.dump(cookies, cookie_file)
|
import urllib.request
import http.cookiejar
import DeployUtil.toolsession as session
#TODO: give an indicator of success
#TODO: handle errors a bit better.
def do_pair(ip, pin, **_args):
# IF YOU DON'T DO THIS OVER HTTPS YOU WILL GET 308s to goto HTTPS
scheme = 'https://'
port = ''
api = '/api/authorize/pair?pin={pin}&persistent=0'
verb = 'POST'
request_url = scheme + ip + port + api.format_map({'pin':pin})
https_handler = session.create_toolsess_httpsHandler()
request = urllib.request.Request(url=request_url, method=verb)
cookies = urllib.request.HTTPCookieProcessor(http.cookiejar.MozillaCookieJar("deployUtil.cookies"))
opener = urllib.request.build_opener(https_handler, cookies)
resp = opener.open(request)
cookies.cookiejar.save(ignore_discard=True)
|
mit
|
Python
|
40957fe0b273f92a28e0b5f27cc4a46ba5e1f2b8
|
Add coverage pragma
|
bnoi/scikit-tracker,bnoi/scikit-tracker,bnoi/scikit-tracker
|
sktracker/trajectories/__init__.py
|
sktracker/trajectories/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import logging
log = logging.getLogger(__name__)
from .trajectories import Trajectories
try: # pragma: no cover
from . import draw
__all__ = ['Trajectories', 'draw']
except ImportError: # pragma: no cover
log.warning('''Matplotlib can't be imported,'''
'''drawing module won't be available ''')
__all__ = ['Trajectories']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import logging
log = logging.getLogger(__name__)
from .trajectories import Trajectories
try:
from . import draw
__all__ = ['Trajectories', 'draw']
except ImportError:
log.warning('''Matplotlib can't be imported,'''
'''drawing module won't be available ''')
__all__ = ['Trajectories']
|
bsd-3-clause
|
Python
|
92ebaf9e1507acf1fc10f4448fc00db43508f23b
|
Allow alternate string key lookup for enums
|
tkwon/dj-stripe,dj-stripe/dj-stripe,jleclanche/dj-stripe,pydanny/dj-stripe,jleclanche/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe,pydanny/dj-stripe,tkwon/dj-stripe,jameshiew/dj-stripe,jameshiew/dj-stripe,kavdev/dj-stripe
|
djstripe/enums.py
|
djstripe/enums.py
|
from enum import Enum as _Enum
from django.utils.decorators import classproperty
class Enum(_Enum):
@classproperty
def choices(cls):
return tuple((cls.keys.get(k, k), v.value) for k, v in cls.__members__.items())
@classproperty
def keys(cls):
# Returns a mapping of key overrides.
# This allows using syntactically-incorrect values as keys,
# such as keywords ("pass") or spaces ("Diners Club").
# This cannot be an attribute, otherwise it would show up as a choice.
return {}
class CardTokenizationMethod(Enum):
apple_pay = "Apple Pay"
android_pay = "Android Pay"
|
from enum import Enum as _Enum
from django.utils.decorators import classproperty
class Enum(_Enum):
@classproperty
def choices(cls):
return tuple((k, v.value) for k, v in cls.__members__.items())
class CardTokenizationMethod(Enum):
apple_pay = "Apple Pay"
android_pay = "Android Pay"
|
mit
|
Python
|
ec5cc5d30b50e12c2e11f6206c220b4f2731e352
|
implement class
|
plepe/pgmapcss,plepe/pgmapcss
|
pgmapcss/misc/pgcache.py
|
pgmapcss/misc/pgcache.py
|
class PGCache:
def __init__(self, id, read_id=False, read_geo=False):
global PGCaches
try:
PGCaches
except:
PGCaches = {}
PGCaches[id] = self
self.id = id
self.read_id = read_id
self.read_geo = read_geo
self.cache_id = len(PGCaches)
def add(self, data, id=None, geo=None):
import pickle
try:
self.plan_add
except:
self.plan_add = plpy.prepare('insert into _pgmapcss_PGCache values (\'' + str(self.cache_id).replace("'", "''") + '\', $1, $2, $3)', [ 'bytea', 'text', 'geometry' ])
if id is None and self.read_id and 'id' in data:
id = data['id']
if geo is None and self.read_geo and 'geo' in data:
geo = data['geo']
plpy.execute(self.plan_add, [ pickle.dumps(data), id, geo ])
def get(self, id=None):
import pickle
if id is None:
try:
self.plan_get
except:
self.plan_get = plpy.prepare('select * from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''"), [])
cursor = plpy.cursor(self.plan_get, [])
else:
try:
self.plan_get_id
except:
self.plan_get_id = plpy.prepare('select * from _pgmapcss_PGCache where id=ANY($1) and cache_id=' + str(self.cache_id).replace("'", "''"), ['text[]'])
if type(id) == str:
id = [ id ]
cursor = plpy.cursor(self.plan_get_id, [id])
for r in cursor:
yield pickle.loads(r['data'])
def prepare(self, query, param_type=[]):
return plpy.prepare(query.replace('{table}', '(select data, id, geo from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''") + ') t'), param_type)
def execute(self, plan, param=[]):
import pickle
ret = []
for r in plpy.execute(plan, param):
if 'data' in r:
r['data'] = pickle.loads(r['data'])
ret.append(r)
return ret
def cursor(self, plan, param=[]):
import pickle
ret = []
for r in plpy.cursor(plan, param):
if 'data' in r:
r['data'] = pickle.loads(r['data'])
yield r
def get_PGCache(id, read_id=False, read_geo=False):
global PGCaches
try:
PGCaches
except:
PGCaches = {}
return PGCaches[id]
|
class PGCache:
def __init__(self, id, read_id=False, read_geo=False):
global PGCaches
try:
PGCaches
except:
PGCaches = {}
PGCaches[id] = self
def add(self, data, id=None, geo=None):
pass
def get(self, id=None):
pass
def query(self, qry):
pass
def get_PGCache(id, read_id=False, read_geo=False):
global PGCaches
try:
PGCaches
except:
PGCaches = {}
return PGCaches[id]
|
agpl-3.0
|
Python
|
6094b147dccc4abf3ef23d5e54b1e23a955d6ecb
|
remove prints
|
Krozark/django-slider,Krozark/django-slider,Krozark/django-slider
|
slider/templatetags/slider_tags.py
|
slider/templatetags/slider_tags.py
|
# -*- coding: utf-8 -*-
from django import template
from slider.models import SliderImage
register = template.Library()
@register.assignment_tag
def get_slider_images(limit=False, randomize=True, slider=1):
qs = SliderImage.objects.filter(is_visible=True,slider=slider)
if randomize:
qs = qs.order_by('?')
if limit:
qs = qs[0:limit]
return qs
|
# -*- coding: utf-8 -*-
from django import template
from slider.models import SliderImage
register = template.Library()
@register.assignment_tag
def get_slider_images(limit=False, randomize=True, slider=1):
qs = SliderImage.objects.filter(is_visible=True,slider=slider)
print randomize
if randomize:
qs = qs.order_by('?')
if limit:
qs = qs[0:limit]
return qs
|
bsd-2-clause
|
Python
|
2378a64ab1e106c8f6f455a9023d350eaf627767
|
add manual
|
Impactstory/oadoi,Impactstory/sherlockoa,Impactstory/oadoi,Impactstory/sherlockoa,Impactstory/oadoi
|
oa_manual.py
|
oa_manual.py
|
from collections import defaultdict
from time import time
from util import elapsed
# things to set here:
# license, free_metadata_url, free_pdf_url
# free_fulltext_url is set automatically from free_metadata_url and free_pdf_url
def get_overrides_dict():
override_dict = defaultdict(dict)
# cindy wu example
override_dict["10.1038/nature21360"]["free_pdf_url"] = "https://arxiv.org/pdf/1703.01424.pdf"
# example from twitter
override_dict["10.1021/acs.jproteome.5b00852"]["free_pdf_url"] = "http://pubs.acs.org/doi/pdfplus/10.1021/acs.jproteome.5b00852"
# have the unpaywall example go straight to the PDF, not the metadata page
override_dict["10.1098/rspa.1998.0160"]["free_pdf_url"] = "https://arxiv.org/pdf/quant-ph/9706064.pdf"
# missed, not in BASE, from Maha Bali in email
override_dict["10.1080/13562517.2014.867620"]["free_pdf_url"] = "http://dar.aucegypt.edu/bitstream/handle/10526/4363/Final%20Maha%20Bali%20TiHE-PoD-Empowering_Sept30-13.pdf"
# otherwise links to figshare match that only has data, not the article
override_dict["10.1126/science.aaf3777"]["free_pdf_url"] = None
override_dict["10.1126/science.aaf3777"]["free_metadata_url"] = None
#otherwise links to a metadata page that doesn't have the PDF because have to request a copy: https://openresearch-repository.anu.edu.au/handle/1885/103608
override_dict["10.1126/science.aad2622"]["free_pdf_url"] = "https://lra.le.ac.uk/bitstream/2381/38048/6/Waters%20et%20al%20draft_post%20review_v2_clean%20copy.pdf"
# otherwise led to http://www.researchonline.mq.edu.au/vital/access/services/Download/mq:39727/DS01 and authorization error
override_dict["10.1111/j.1461-0248.2008.01185.x"]["free_pdf_url"] = None
# override old-style webpage
override_dict["10.1210/jc.2016-2141"]["free_pdf_url"] = "https://academic.oup.com/jcem/article-lookup/doi/10.1210/jc.2016-2141"
override_dict["10.1210/jc.2016-2141"]["evidence"] = "hybrid manual"
# not indexing this location yet, from @rickypo
override_dict["10.1207/s15327957pspr0203_4"]["free_pdf_url"] = "http://www2.psych.ubc.ca/~schaller/528Readings/Kerr1998.pdf"
return override_dict
|
from collections import defaultdict
from time import time
from util import elapsed
# things to set here:
# license, free_metadata_url, free_pdf_url
# free_fulltext_url is set automatically from free_metadata_url and free_pdf_url
def get_overrides_dict():
override_dict = defaultdict(dict)
# cindy wu example
override_dict["10.1038/nature21360"]["free_pdf_url"] = "https://arxiv.org/pdf/1703.01424.pdf"
# example from twitter
override_dict["10.1021/acs.jproteome.5b00852"]["free_pdf_url"] = "http://pubs.acs.org/doi/pdfplus/10.1021/acs.jproteome.5b00852"
# have the unpaywall example go straight to the PDF, not the metadata page
override_dict["10.1098/rspa.1998.0160"]["free_pdf_url"] = "https://arxiv.org/pdf/quant-ph/9706064.pdf"
# missed, not in BASE, from Maha Bali in email
override_dict["10.1080/13562517.2014.867620"]["free_pdf_url"] = "http://dar.aucegypt.edu/bitstream/handle/10526/4363/Final%20Maha%20Bali%20TiHE-PoD-Empowering_Sept30-13.pdf"
# otherwise links to figshare match that only has data, not the article
override_dict["10.1126/science.aaf3777"]["free_pdf_url"] = None
override_dict["10.1126/science.aaf3777"]["free_metadata_url"] = None
#otherwise links to a metadata page that doesn't have the PDF because have to request a copy: https://openresearch-repository.anu.edu.au/handle/1885/103608
override_dict["10.1126/science.aad2622"]["free_pdf_url"] = "https://lra.le.ac.uk/bitstream/2381/38048/6/Waters%20et%20al%20draft_post%20review_v2_clean%20copy.pdf"
# otherwise led to http://www.researchonline.mq.edu.au/vital/access/services/Download/mq:39727/DS01 and authorization error
override_dict["10.1111/j.1461-0248.2008.01185.x"]["free_pdf_url"] = None
# override old-style webpage
override_dict["10.1210/jc.2016-2141"]["free_pdf_url"] = "https://academic.oup.com/jcem/article-lookup/doi/10.1210/jc.2016-2141"
override_dict["10.1210/jc.2016-2141"]["evidence"] = "hybrid manual"
return override_dict
|
mit
|
Python
|
8206ea76804cf08298eeab8673b2326440aa8663
|
check for existing bonds before drawing
|
randlet/Orbis
|
orbis/gui/sketchpad.py
|
orbis/gui/sketchpad.py
|
import matplotlib
import matplotlib.patches
import numpy
import wx
from plots import Plot
#====================================================================================
class SketchPad(Plot):
"""sketch pad for drawing molecules"""
ATOM_RADIUS = 0.1
PICK_TOLERANCE = 5
#----------------------------------------------------------------------
def __init__(self,*args,**kwargs):
super(SketchPad,self).__init__(*args,**kwargs)
self.axes = self.figure.add_subplot(1,1,1)
self.axes.set_aspect("equal")
self.up_atom = None
self.down_atom = None
#---------------------------------------------------------------------------
def on_button_down(self,event):
super(SketchPad,self).on_button_down(event)
self.down_atom = self.atom_at_event_point(event)
#---------------------------------------------------------------------------
def atom_at_event_point(self,event):
for patch in self.axes.patches:
event_in_atom,_ = patch.contains(event)
if event_in_atom:
return patch
#---------------------------------------------------------------------------
def on_button_up(self,event):
super(SketchPad,self).on_button_up(event)
self.up_atom = self.atom_at_event_point(event)
if self.new_atom_requested():
self.add_atom()
elif self.new_bond_requested() and not self.bond_exists(self.up_atom,self.down_atom):
self.add_bond()
#---------------------------------------------------------------------------
def on_pick(self,event):
super(SketchPad,self).on_pick(event)
#---------------------------------------------------------------------------
def new_atom_requested(self):
return self.was_click() and not self.was_pick()
#---------------------------------------------------------------------------
def new_bond_requested(self):
start_and_finish_atoms = None not in (self.up_atom, self.down_atom)
unique_atoms = self.up_atom is not self.down_atom
return start_and_finish_atoms and unique_atoms
#---------------------------------------------------------------------------
def bond_exists(self,atom_1,atom_2):
bond_coords = [sorted(bond.get_xydata().tolist()) for bond in self.axes.lines]
bond_to_check = sorted([list(atom_1.xy),list(atom_2.xy)])
return bond_to_check in bond_coords
#---------------------------------------------------------------------------
def get_atom_locations(self):
"""returns xy points of all atoms on sketchpad"""
return [atom.xy for atom in self.axes.patches]
#---------------------------------------------------------------------------
def add_atom(self):
"""Add a new atom to the sketchpad"""
coords = (self.mouse_up_x,self.mouse_up_y)
circ = matplotlib.patches.CirclePolygon(coords,self.ATOM_RADIUS,picker=self.PICK_TOLERANCE,resolution=40)
self.axes.add_patch(circ)
self.figure.canvas.draw()
#---------------------------------------------------------------------------
def add_bond(self):
"""add a new bond between down_atom and up_atom"""
x1,y1 = self.down_atom.xy
x2,y2 = self.up_atom.xy
self.axes.plot([x1,x2],[y1,y2])
self.figure.canvas.draw()
if __name__ == "__main__":
app = wx.App()
frame = wx.Frame(None)
sp = SketchPad(frame)
frame.Show()
app.MainLoop()
|
import matplotlib
import matplotlib.patches
import numpy
import wx
from plots import Plot
#====================================================================================
class SketchPad(Plot):
"""sketch pad for drawing molecules"""
ATOM_RADIUS = 0.1
PICK_TOLERANCE = 5
#----------------------------------------------------------------------
def __init__(self,*args,**kwargs):
super(SketchPad,self).__init__(*args,**kwargs)
self.axes = self.figure.add_subplot(1,1,1)
self.axes.set_aspect("equal")
#---------------------------------------------------------------------------
def on_button_up(self,event):
super(SketchPad,self).on_button_up(event)
if self.new_atom_requested():
self.add_atom()
#---------------------------------------------------------------------------
def new_atom_requested(self):
return self.was_click() and not self.was_pick()
#---------------------------------------------------------------------------
def add_atom(self):
"""Add a new atom to the sketchpad"""
coords = (self.mouse_up_x,self.mouse_up_y)
circ = matplotlib.patches.Ellipse(coords,self.ATOM_RADIUS,self.ATOM_RADIUS,picker=self.PICK_TOLERANCE)
self.axes.add_patch(circ)
self.figure.canvas.draw()
if __name__ == "__main__":
app = wx.App()
frame = wx.Frame(None)
sp = SketchPad(frame)
frame.Show()
app.MainLoop()
|
bsd-3-clause
|
Python
|
c55f21aa4925f6227086dedca2a3f839db98d8e1
|
implement unit tests for debug command
|
fretboardfreak/space,fretboardfreak/space
|
tests/lib/cmdline/commands/test_debug.py
|
tests/lib/cmdline/commands/test_debug.py
|
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import patch
import lib.cmdline.commands as commands
from .test_base import BaseCommandTest
class DebugTest(BaseCommandTest):
def setUp(self):
super().setUp()
self.command_class = commands.Debug
self.alias_commands = ['do_dbg']
# note: need patch the default action using the mangled named
@patch('lib.cmdline.commands.Debug._Debug__print_state')
def test_default_command_action(self, mock_print_state):
debug_cmd = self.get_instance()
debug_cmd.do_debug('')
self.assertTrue(mock_print_state.called)
@patch('builtins.print')
def test_print_state(self, mock_print):
debug_cmd = self.get_instance()
debug_cmd.do_debug('--print-state')
self.assertTrue(mock_print.called)
mock_print.assert_called_with(debug_cmd.engine)
@patch('code.interact')
def test_interactive(self, mock_interact):
debug_cmd = self.get_instance()
debug_cmd.do_debug('--interact')
self.assertTrue(mock_interact.called)
def test_new_state(self):
debug_cmd = self.get_instance()
debug_cmd.do_debug('--new-state')
self.assertTrue(self.mock_engine.new_game.called)
|
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import skip
import lib.cmdline.commands as commands
from .test_base import BaseCommandTest
class DebugTest(BaseCommandTest):
def setUp(self):
super().setUp()
self.command_class = commands.Debug
self.alias_commands = ['do_dbg']
@skip('NI')
def test_print_state(self):
pass
@skip('NI')
def test_interactive(self):
pass
@skip('NI')
def test_new_state(self):
pass
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.