commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
5a33d1c5e52bf26eb90e53381a58ed89c9a1185e
|
Make 0.3.1
|
simplefin/siloscript,simplefin/siloscript,simplefin/siloscript
|
siloscript/version.py
|
siloscript/version.py
|
# Copyright (c) The SimpleFIN Team
# See LICENSE for details.
__version__ = "0.3.1"
|
# Copyright (c) The SimpleFIN Team
# See LICENSE for details.
__version__ = "0.4.0-dev"
|
apache-2.0
|
Python
|
87c7899f7ed14d64f2015ce6363bf50e7d5b5008
|
Update yle_articles collector
|
HIIT/mediacollection
|
sites/yle_articles.py
|
sites/yle_articles.py
|
import requests
def parse(api_request):
app_id = ""
app_key = ""
example_request = "https://articles.api.yle.fi/v2/articles.json?published_after=2016-12-20T12:00:00%2b0300&offset=0&limit=10"
#r = requests.get( api_request )
r = requests.get( example_request + "&app_id=" + app_id + "&app_key=" + app_key )
print r.json()
|
import requests
def parse(api_request):
app_id = "f3365695"
app_key = "7010dcef0cf2393423e747473b6068c"
example_request = "https://articles.api.yle.fi/v2/articles.json?published_after=2016-12-20T12:00:00%2b0300&offset=0&limit=10"
#r = requests.get( api_request )
r = requests.get( example_request + "&app_id=" + app_id + "&app_key=" + app_key )
print r.json()
|
mit
|
Python
|
8913a1ca25b51fc52b08187ab67a1e8763015d07
|
handle ndarray to matrix conversion
|
scikit-multilearn/scikit-multilearn
|
skmultilearn/utils.py
|
skmultilearn/utils.py
|
import numpy as np
import scipy.sparse as sp
SPARSE_FORMAT_TO_CONSTRUCTOR = {
"bsr": sp.bsr_matrix,
"coo": sp.coo_matrix,
"csc": sp.csc_matrix,
"csr": sp.csr_matrix,
"dia": sp.dia_matrix,
"dok": sp.dok_matrix,
"lil": sp.lil_matrix
}
def get_matrix_in_format(original_matrix, matrix_format):
if isinstance(original_matrix, np.ndarray):
return SPARSE_FORMAT_TO_CONSTRUCTOR[matrix_format](original_matrix)
if original_matrix.getformat() == matrix_format:
return original_matrix
return original_matrix.asformat(matrix_format)
def matrix_creation_function_for_format(sparse_format):
if sparse_format not in SPARSE_FORMAT_TO_CONSTRUCTOR:
return None
return SPARSE_FORMAT_TO_CONSTRUCTOR[sparse_format]
|
import scipy.sparse as sp
def get_matrix_in_format(original_matrix, matrix_format):
if original_matrix.getformat() == matrix_format:
return original_matrix
return original_matrix.asformat(matrix_format)
def matrix_creation_function_for_format(sparse_format):
SPARSE_FORMAT_TO_CONSTRUCTOR = {
"bsr": sp.bsr_matrix,
"coo": sp.coo_matrix,
"csc": sp.csc_matrix,
"csr": sp.csr_matrix,
"dia": sp.dia_matrix,
"dok": sp.dok_matrix,
"lil": sp.lil_matrix
}
if sparse_format not in SPARSE_FORMAT_TO_CONSTRUCTOR:
return None
return SPARSE_FORMAT_TO_CONSTRUCTOR[sparse_format]
|
bsd-2-clause
|
Python
|
ae70fccdc7fe5348e3729283866df4ed0c256beb
|
Fix tabs.
|
jettan/boot_wisp5,jettan/boot_wisp5,jettan/boot_wisp5,jettan/boot_wisp5,jettan/boot_wisp5
|
sllurp/sllurp/host.py
|
sllurp/sllurp/host.py
|
#!/usr/bin/python
from twisted.internet import reactor, defer
index = 0
# Read the hex file.
f = open("wisp_app.hex", 'r')
lines = f.readlines()
class Getter:
def processLine(self, line):
if self.d is None:
print "No callback given!"
return
d = self.d
self.d = None
global lines
global index
index = index + 1
print "Processing line " + str(index)
print line
if index < len(lines):
#d.callback(index)
else:
print "Finished!\n"
def sendLine(self, x):
global lines
global index
line = lines[index]
self.d = defer.Deferred()
# simulate a delayed result by asking the reactor to process the next line every second.
reactor.callLater(1, self.processLine, line)
self.d.addCallback(self.sendLine)
return self.d
def printData(d):
print "Entered printData()"
print d
def printError(failure):
import sys
sys.stderr.write(str(failure))
"""
Here starts the main program.
"""
g = Getter()
'''
for line in lines:
# Byte count of data in line.
print int(line[1:3],16)
# Start address to write line data.
print line[3:7]
# Record type.
print line[7:9]
# Data
print line[9:(len(line) - 3)]
'''
d = g.sendLine(index)
#d.addCallback(printData)
#d.addErrback(printError)
#reactor.callLater(4, reactor.stop)
reactor.run()
|
#!/usr/bin/python
from twisted.internet import reactor, defer
index = 0
# Read the hex file.
f = open("wisp_app.hex", 'r')
lines = f.readlines()
class Getter:
def processLine(self, line):
"""
The Deferred mechanism provides a mechanism to signal error
conditions. In this case, odd numbers are bad.
This function demonstrates a more complex way of starting
the callback chain by checking for expected results and
choosing whether to fire the callback or errback chain
"""
print "Entered processLine()\n"
if self.d is None:
print "Nowhere to put results"
return
d = self.d
self.d = None
print line
global lines
global index
index = index + 1
if index < len(lines):
d.callback(index)
else:
print "Finished!\n"
def sendLine(self, x):
"""
The Deferred mechanism allows for chained callbacks.
In this example, the output of gotResults is first
passed through _toHTML on its way to printData.
Again this function is a dummy, simulating a delayed result
using callLater, rather than using a real asynchronous
setup.
"""
print "Entered sendLine()\n"
global lines
global index
line = lines[index]
self.d = defer.Deferred()
# simulate a delayed result by asking the reactor to schedule
# gotResults in 2 seconds time
reactor.callLater(1, self.processLine, line)
self.d.addCallback(self.sendLine)
return self.d
def printData(d):
print "Entered printData()"
print d
def printError(failure):
import sys
sys.stderr.write(str(failure))
"""
Here starts the main program.
"""
g = Getter()
'''
for line in lines:
# Byte count of data in line.
print int(line[1:3],16)
# Start address to write line data.
print line[3:7]
# Record type.
print line[7:9]
# Data
print line[9:(len(line) - 3)]
'''
d = g.sendLine(index)
#d.addCallback(printData)
#d.addErrback(printError)
#reactor.callLater(4, reactor.stop)
reactor.run()
|
bsd-2-clause
|
Python
|
374981fd60c0116e861d598eec763cc2b8165189
|
Add -- to git log call
|
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
|
cs251tk/common/check_submit_date.py
|
cs251tk/common/check_submit_date.py
|
import os
from dateutil.parser import parse
from ..common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601', '--',
os.path.join(basedir, file['filename'])])
# If we didn't get an error and got an output, add date to array
if status == 'success' and res:
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
if not dates:
return "ERROR"
return min(dates).strftime("%x %X")
|
import os
from dateutil.parser import parse
from ..common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error and got an output, add date to array
if status == 'success' and res:
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
if not dates:
return "ERROR"
return min(dates).strftime("%x %X")
|
mit
|
Python
|
30d9d45612b760e2ce6c2d90e516ba8de58a0c12
|
put start_date back in ordering fields
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
api/v2/views/instance_history.py
|
api/v2/views/instance_history.py
|
from django.db.models import Q
from rest_framework import filters
import django_filters
from core.models import InstanceStatusHistory
from api.v2.serializers.details import InstanceStatusHistorySerializer
from api.v2.views.base import AuthReadOnlyViewSet
from api.v2.views.mixins import MultipleFieldLookup
class InstanceStatusHistoryFilter(django_filters.FilterSet):
instance = django_filters.MethodFilter(action='filter_instance_id')
created_by = django_filters.CharFilter('instance__created_by__username')
def filter_instance_id(self, queryset, value):
try:
int_val = int(value)
return queryset.filter(
Q(instance__provider_alias=int_val)
| Q(instance_id=int_val))
except ValueError:
#Dealing with a UUID
return queryset.filter(instance__provider_alias=value)
class Meta:
model = InstanceStatusHistory
fields = ['instance', 'created_by']
class InstanceStatusHistoryViewSet(MultipleFieldLookup, AuthReadOnlyViewSet):
"""
API endpoint that allows instance tags to be viewed
"""
queryset = InstanceStatusHistory.objects.all()
serializer_class = InstanceStatusHistorySerializer
ordering = ('-instance__start_date', 'instance__id')
ordering_fields = ('start_date', 'instance__id')
lookup_fields = ("id", "uuid")
filter_class = InstanceStatusHistoryFilter
filter_backends = (filters.OrderingFilter, filters.DjangoFilterBackend)
def get_queryset(self):
"""
Filter out tags for deleted instances
"""
user_id = self.request.user.id
if self.request.query_params.get('unique', "").lower() == 'true':
# filtering distinct instance__start_date effectively gives us a unique instance list. Also the order of fields in distinct()
# must match the order of fields in ordering set above
return InstanceStatusHistory.objects.filter(instance__created_by_id=user_id).distinct('instance__start_date')
return InstanceStatusHistory.objects.filter(instance__created_by_id=user_id)
|
from django.db.models import Q
from rest_framework import filters
import django_filters
from core.models import InstanceStatusHistory
from api.v2.serializers.details import InstanceStatusHistorySerializer
from api.v2.views.base import AuthReadOnlyViewSet
from api.v2.views.mixins import MultipleFieldLookup
class InstanceStatusHistoryFilter(django_filters.FilterSet):
instance = django_filters.MethodFilter(action='filter_instance_id')
created_by = django_filters.CharFilter('instance__created_by__username')
def filter_instance_id(self, queryset, value):
try:
int_val = int(value)
return queryset.filter(
Q(instance__provider_alias=int_val)
| Q(instance_id=int_val))
except ValueError:
#Dealing with a UUID
return queryset.filter(instance__provider_alias=value)
class Meta:
model = InstanceStatusHistory
fields = ['instance', 'created_by']
class InstanceStatusHistoryViewSet(MultipleFieldLookup, AuthReadOnlyViewSet):
"""
API endpoint that allows instance tags to be viewed
"""
queryset = InstanceStatusHistory.objects.all()
serializer_class = InstanceStatusHistorySerializer
ordering = ('-instance__start_date', 'instance__id')
ordering_fields = ('-instance__start_date', 'instance__id')
lookup_fields = ("id", "uuid")
filter_class = InstanceStatusHistoryFilter
filter_backends = (filters.OrderingFilter, filters.DjangoFilterBackend)
def get_queryset(self):
"""
Filter out tags for deleted instances
"""
user_id = self.request.user.id
if self.request.query_params.get('unique', "").lower() == 'true':
# filtering distinct instance__start_date effectively gives us a unique instance list. Also the order of fields in distinct()
# must match the order of fields in ordering set above
return InstanceStatusHistory.objects.filter(instance__created_by_id=user_id).distinct('instance__start_date')
return InstanceStatusHistory.objects.filter(instance__created_by_id=user_id)
|
apache-2.0
|
Python
|
a6ab9d8af09eace392a3d9320eb46af4ec6394c9
|
add test_report_progress()
|
alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl
|
tests/unit/loop/test_EventLoop.py
|
tests/unit/loop/test_EventLoop.py
|
# Tai Sakuma <[email protected]>
import sys
import pytest
try:
import unittest.mock as mock
except ImportError:
import mock
from alphatwirl.loop import EventLoop
from alphatwirl import progressbar
##__________________________________________________________________||
@pytest.fixture()
def events():
event1 = mock.Mock(name='event1')
event2 = mock.Mock(name='event2')
event3 = mock.Mock(name='event3')
return [event1, event2, event3]
@pytest.fixture()
def build_events(events):
ret = mock.Mock()
ret.return_value = events
return ret
@pytest.fixture()
def reader():
return mock.Mock()
@pytest.fixture()
def obj(build_events, reader):
return EventLoop(build_events, reader)
##__________________________________________________________________||
def test_name(build_events, reader):
obj = EventLoop(build_events, reader)
assert 'EventLoop' == obj.name
obj = EventLoop(build_events, reader, name='TTJets')
assert 'TTJets' == obj.name
def test_repr(obj):
repr(obj)
def test_call(obj, events, reader):
assert reader == obj()
assert [
mock.call.begin(events),
mock.call.event(events[0]),
mock.call.event(events[1]),
mock.call.event(events[2]),
mock.call.end()] == reader.method_calls
##__________________________________________________________________||
@pytest.fixture()
def report_progress(monkeypatch):
ret = mock.Mock()
module = sys.modules['alphatwirl.progressbar']
monkeypatch.setattr(module, 'report_progress', ret)
return ret
@pytest.fixture()
def ProgressReport(monkeypatch):
ret = mock.Mock()
module = sys.modules['alphatwirl.loop.EventLoop']
monkeypatch.setattr(module, 'ProgressReport', ret)
return ret
def test_report_progress(obj, report_progress, ProgressReport):
obj()
expected = [
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=0, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=1, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=2, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=3, total=3))
]
actual = report_progress.call_args_list
assert expected[0] == actual[0]
##__________________________________________________________________||
|
# Tai Sakuma <[email protected]>
import logging
import pytest
try:
import unittest.mock as mock
except ImportError:
import mock
from alphatwirl.loop import EventLoop
##__________________________________________________________________||
@pytest.fixture()
def events():
event1 = mock.Mock(name='event1')
event2 = mock.Mock(name='event2')
event3 = mock.Mock(name='event3')
return [event1, event2, event3]
@pytest.fixture()
def build_events(events):
ret = mock.Mock()
ret.return_value = events
return ret
@pytest.fixture()
def reader():
return mock.Mock()
@pytest.fixture()
def obj(build_events, reader):
return EventLoop(build_events, reader)
##__________________________________________________________________||
def test_name(build_events, reader):
obj = EventLoop(build_events, reader)
assert 'EventLoop' == obj.name
obj = EventLoop(build_events, reader, name='TTJets')
assert 'TTJets' == obj.name
def test_repr(obj):
repr(obj)
def test_call(obj, events, reader):
assert reader == obj()
assert [
mock.call.begin(events),
mock.call.event(events[0]),
mock.call.event(events[1]),
mock.call.event(events[2]),
mock.call.end()] == reader.method_calls
##__________________________________________________________________||
@pytest.fixture()
def report_progress(monkeypatch):
ret = mock.Mock()
module = sys.modules['alphatwirl.progressbar']
monkeypatch.setattr(module, 'report_progress', ret)
return ret
@pytest.fixture()
def ProgressReport(monkeypatch):
ret = mock.Mock()
module = sys.modules['alphatwirl.loop.EventLoop']
monkeypatch.setattr(module, 'ProgressReport', ret)
return ret
def test_report_progress(obj, report_progress, ProgressReport):
obj()
expected = [
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=0, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=1, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=2, total=3)),
mock.call(ProgressReport(taskid=obj.taskid, name='EventLoop', done=3, total=3))
]
actual = report_progress.call_args_list
assert expected[0] == actual[0]
##__________________________________________________________________||
|
bsd-3-clause
|
Python
|
042f005b8e22f9d8844e0c16329598ca13eb4567
|
Update formatting for better compatibility with unit tests.
|
stdweird/aquilon,stdweird/aquilon,stdweird/aquilon,guillaume-philippon/aquilon,quattor/aquilon,quattor/aquilon,guillaume-philippon/aquilon,quattor/aquilon,guillaume-philippon/aquilon
|
lib/python2.5/aquilon/aqdb/utils/table_admin.py
|
lib/python2.5/aquilon/aqdb/utils/table_admin.py
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
""" A collection of table level functions for maintenance """
from confirm import confirm
import sys
import os
if __name__ == '__main__':
DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.realpath(os.path.join(DIR, '..', '..', '..')))
import aquilon.aqdb.depends
from sqlalchemy import text
def get_table_list_from_db(engine):
"""
return a list of table names from the current
databases public schema
"""
sql='select table_name from user_tables'
execute = engine.execute
return [name for (name, ) in execute(text(sql))]
def get_seq_list_from_db(engine):
"""return a list of the sequence names from the current
databases public schema
"""
sql='select sequence_name from user_sequences'
execute = engine.execute
return [name for (name, ) in execute(text(sql))]
def drop_all_tables_and_sequences(db,option=None):
""" MetaData.drop_all() doesn't play nice with db's that have sequences.
you're alternative is to call this"""
if not db.dsn.startswith('ora'):
pass
if db.dsn.find('NYPO_AQUILON') >= 0:
sys.stderr.write(
'your DSN is on the production database, not permitted \n')
sys.exit(9)
msg = ("\nYou've asked to wipe out the \n%s\ndatabase. Please confirm."
% db.dsn)
if confirm(prompt=msg, resp=False):
execute = db.engine.execute
for table in get_table_list_from_db(db.engine):
try:
execute(text('DROP TABLE %s CASCADE CONSTRAINTS' %(table)))
except SQLError, e:
print >> sys.stderr, e
for seq in get_seq_list_from_db(db.engine):
try:
execute(text('DROP SEQUENCE %s'%(seq)))
except SQLError, e:
print >> sys.stderr, e
try:
execute(text('PURGE RECYCLEBIN'))
except SQLError, e:
print >> sys.stderr, e
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
""" A collection of table level functions for maintenance """
from confirm import confirm
import sys
import os
if __name__ == '__main__':
DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.realpath(os.path.join(DIR, '..', '..', '..')))
import aquilon.aqdb.depends
from sqlalchemy import text
def get_table_list_from_db(engine):
"""
return a list of table names from the current
databases public schema
"""
sql='select table_name from user_tables'
execute = engine.execute
return [name for (name, ) in execute(text(sql))]
def get_seq_list_from_db(engine):
"""return a list of the sequence names from the current
databases public schema
"""
sql='select sequence_name from user_sequences'
execute = engine.execute
return [name for (name, ) in execute(text(sql))]
def drop_all_tables_and_sequences(db,option=None):
""" MetaData.drop_all() doesn't play nice with db's that have sequences.
you're alternative is to call this"""
if not db.dsn.startswith('ora'):
pass
if db.dsn.endswith('@LNPO_AQUILON_NY'):
sys.stderr.write(
'your DSN is on the production database, not permitted \n')
sys.exit(9)
msg="You've asked to wipe out the %s database. Please confirm."%(db.dsn)
if confirm(prompt=msg, resp=False):
execute = db.engine.execute
for table in get_table_list_from_db(db.engine):
try:
execute(text('DROP TABLE %s CASCADE CONSTRAINTS' %(table)))
except SQLError, e:
print >> sys.stderr, e
for seq in get_seq_list_from_db(db.engine):
try:
execute(text('DROP SEQUENCE %s'%(seq)))
except SQLError, e:
print >> sys.stderr, e
try:
execute(text('PURGE RECYCLEBIN'))
except SQLError, e:
print >> sys.stderr, e
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
apache-2.0
|
Python
|
1c7b881b62f9d1931957322874f6c38d610c2cf4
|
Rename setting to match name of app
|
Perkville/django-append-url-to-sql,playfire/django-append-url-to-sql,lamby/django-append-url-to-sql
|
append_url_to_sql/models.py
|
append_url_to_sql/models.py
|
"""
:mod:`django-append-url-to-sql` --- Appends the request URL to SQL statements in Django
=======================================================================================
Whilst the `Django Debug Toolbar
<https://github.com/robhudson/django-debug-toolbar>`_ is invaluable for
development in a local environment, it cannot help you identify misbehaving
queries in production. To assist in this task, ``django-append-url-to-sql``
appends the request URL as a comment to every SQL statement that is executed.
For example::
SELECT "auth_user"."id", [..] WHERE "auth_user"."id" = 1 -- /login
This makes it possible to go from ``SELECT * FROM pg_stat_activity`` or ``SHOW
PROCESSLIST`` output to the view that is executing it.
If the current request URL cannot be determined, nothing is appended.
Installation
------------
1. Add ``append_url_to_sql`` to your ``INSTALLED_APPS``::
INSTALLED_APPS = (
...
'append_url_to_sql',
...
)
Configuration
-------------
``APPEND_URL_TO_SQL_ENABLED``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Default: ``True``
Use this setting to disable this functionality without having to remove the
application. This can be used to only append the SQL code only in specific
environments.
Links
-----
View/download code
https://github.com/playfire/django-append-url-to-sql
File a bug
https://github.com/playfire/django-append-url-to-sql/issues
"""
import sys
from django.conf import settings
from django.http import HttpRequest
from django.db.backends import util, BaseDatabaseWrapper
class CursorWrapper(util.CursorDebugWrapper):
def execute(self, sql, *args):
f = sys._getframe()
while f:
request = f.f_locals.get('request')
if isinstance(request, HttpRequest):
sql += ' -- %s' % repr(request.path)[2:-1].replace('%', '%%')
break
f = f.f_back
return self.cursor.execute(sql, *args)
if getattr(settings, 'APPEND_URL_TO_SQL_ENABLED', True):
old_cursor = BaseDatabaseWrapper.cursor
def cursor(self, *args, **kwargs):
return CursorWrapper(old_cursor(self, *args, **kwargs), self)
BaseDatabaseWrapper.cursor = cursor
|
"""
:mod:`django-append-url-to-sql` --- Appends the request URL to SQL statements in Django
=======================================================================================
Whilst the `Django Debug Toolbar
<https://github.com/robhudson/django-debug-toolbar>`_ is invaluable for
development in a local environment, it cannot help you identify misbehaving
queries in production. To assist in this task, ``django-append-url-to-sql``
appends the request URL as a comment to every SQL statement that is executed.
For example::
SELECT "auth_user"."id", [..] WHERE "auth_user"."id" = 1 -- /login
This makes it possible to go from ``SELECT * FROM pg_stat_activity`` or ``SHOW
PROCESSLIST`` output to the view that is executing it.
If the current request URL cannot be determined, nothing is appended.
Installation
------------
1. Add ``append_url_to_sql`` to your ``INSTALLED_APPS``::
INSTALLED_APPS = (
...
'append_url_to_sql',
...
)
Configuration
-------------
``APPEND_REQUEST_URL_TO_SQL_ENABLED``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Default: ``True``
Use this setting to disable this functionality without having to remove the
application. This can be used to only append the SQL code only in specific
environments.
Links
-----
View/download code
https://github.com/playfire/django-append-url-to-sql
File a bug
https://github.com/playfire/django-append-url-to-sql/issues
"""
import sys
from django.conf import settings
from django.http import HttpRequest
from django.db.backends import util, BaseDatabaseWrapper
class CursorWrapper(util.CursorDebugWrapper):
def execute(self, sql, *args):
f = sys._getframe()
while f:
request = f.f_locals.get('request')
if isinstance(request, HttpRequest):
sql += ' -- %s' % repr(request.path)[2:-1].replace('%', '%%')
break
f = f.f_back
return self.cursor.execute(sql, *args)
if getattr(settings, 'APPEND_REQUEST_URL_TO_SQL_ENABLED', True):
old_cursor = BaseDatabaseWrapper.cursor
def cursor(self, *args, **kwargs):
return CursorWrapper(old_cursor(self, *args, **kwargs), self)
BaseDatabaseWrapper.cursor = cursor
|
bsd-3-clause
|
Python
|
8f3b16e23bc29465d9b6cfc5e9afbe9c7e8727df
|
Bump to 1.2.0
|
kivy/pyjnius,kivy/pyjnius,kivy/pyjnius
|
jnius/__init__.py
|
jnius/__init__.py
|
'''
Pyjnius
=======
Accessing Java classes from Python.
All the documentation is available at: http://pyjnius.readthedocs.org
'''
__version__ = '1.2.0'
from .jnius import * # noqa
from .reflect import * # noqa
from six import with_metaclass
# XXX monkey patch methods that cannot be in cython.
# Cython doesn't allow to set new attribute on methods it compiled
HASHCODE_MAX = 2 ** 31 - 1
class PythonJavaClass_(with_metaclass(MetaJavaBase, PythonJavaClass)):
@java_method('()I', name='hashCode')
def hashCode(self):
return id(self) % HASHCODE_MAX
@java_method('()Ljava/lang/String;', name='hashCode')
def hashCode_(self):
return '{}'.format(self.hashCode())
@java_method('()Ljava/lang/String;', name='toString')
def toString(self):
return repr(self)
@java_method('(Ljava/lang/Object;)Z', name='equals')
def equals(self, other):
return self.hashCode() == other.hashCode()
PythonJavaClass = PythonJavaClass_
# from https://gist.github.com/tito/09c42fb4767721dc323d
import os
if "ANDROID_ARGUMENT" in os.environ:
# on android, catch all exception to ensure about a jnius.detach
import threading
import jnius
orig_thread_run = threading.Thread.run
def jnius_thread_hook(*args, **kwargs):
try:
return orig_thread_run(*args, **kwargs)
finally:
jnius.detach()
threading.Thread.run = jnius_thread_hook
|
'''
Pyjnius
=======
Accessing Java classes from Python.
All the documentation is available at: http://pyjnius.readthedocs.org
'''
__version__ = '1.1.5.dev0'
from .jnius import * # noqa
from .reflect import * # noqa
from six import with_metaclass
# XXX monkey patch methods that cannot be in cython.
# Cython doesn't allow to set new attribute on methods it compiled
HASHCODE_MAX = 2 ** 31 - 1
class PythonJavaClass_(with_metaclass(MetaJavaBase, PythonJavaClass)):
@java_method('()I', name='hashCode')
def hashCode(self):
return id(self) % HASHCODE_MAX
@java_method('()Ljava/lang/String;', name='hashCode')
def hashCode_(self):
return '{}'.format(self.hashCode())
@java_method('()Ljava/lang/String;', name='toString')
def toString(self):
return repr(self)
@java_method('(Ljava/lang/Object;)Z', name='equals')
def equals(self, other):
return self.hashCode() == other.hashCode()
PythonJavaClass = PythonJavaClass_
# from https://gist.github.com/tito/09c42fb4767721dc323d
import os
if "ANDROID_ARGUMENT" in os.environ:
# on android, catch all exception to ensure about a jnius.detach
import threading
import jnius
orig_thread_run = threading.Thread.run
def jnius_thread_hook(*args, **kwargs):
try:
return orig_thread_run(*args, **kwargs)
finally:
jnius.detach()
threading.Thread.run = jnius_thread_hook
|
mit
|
Python
|
d4479df64a2fd7a53327bc3ce79e48ec4cc30efc
|
Update course forum url
|
Kaggle/learntools,Kaggle/learntools
|
notebooks/feature_engineering_new/track_meta.py
|
notebooks/feature_engineering_new/track_meta.py
|
track = dict(
author_username="ryanholbrook",
course_name="Feature Engineering",
course_url="https://www.kaggle.com/learn/feature-engineering",
course_forum_url="https://www.kaggle.com/learn-forum/221677",
)
TOPICS = [
"What is Feature Engineering", # 1
"Mutual Information", # 2
"Creating Features", # 3
"Clustering with k-Means", # 4
"Principal Component Analysis", # 5
"Target Encoding", # 6
"Feature Engineering for House Prices", # Bonus
]
lessons = [{"topic": topic_name} for topic_name in TOPICS]
notebooks = [
dict(filename="tut1.ipynb", lesson_idx=0, type="tutorial"),
#dict(filename="ex1.ipynb", lesson_idx=0, type="exercise", scriptid=14393915),
dict(filename="tut2.ipynb", lesson_idx=1, type="tutorial"),
dict(filename="ex2.ipynb", lesson_idx=1, type="exercise", scriptid=14393925),
dict(filename="tut3.ipynb", lesson_idx=2, type="tutorial"),
dict(filename="ex3.ipynb", lesson_idx=2, type="exercise", scriptid=14393912),
dict(filename="tut4.ipynb", lesson_idx=3, type="tutorial"),
dict(filename="ex4.ipynb", lesson_idx=3, type="exercise", scriptid=14393920),
dict(filename="tut5.ipynb", lesson_idx=4, type="tutorial"),
dict(filename="ex5.ipynb", lesson_idx=4, type="exercise", scriptid=14393921),
dict(filename="tut6.ipynb", lesson_idx=5, type="tutorial"),
dict(filename="ex6.ipynb", lesson_idx=5, type="exercise", scriptid=14393917),
]
for nb in notebooks:
nb["dataset_sources"] = ["ryanholbrook/fe-course-data"]
notebooks.append(
dict(
filename="tut_bonus.ipynb",
lesson_idx=6,
type="tutorial",
competition_sources=["house-prices-advanced-regression-techniques"],
)
)
|
track = dict(
author_username="ryanholbrook",
course_name="Feature Engineering",
course_url="https://www.kaggle.com/learn/feature-engineering",
course_forum_url="https://www.kaggle.com/learn-forum/161443",
)
TOPICS = [
"What is Feature Engineering", # 1
"Mutual Information", # 2
"Creating Features", # 3
"Clustering with k-Means", # 4
"Principal Component Analysis", # 5
"Target Encoding", # 6
"Feature Engineering for House Prices", # Bonus
]
lessons = [{"topic": topic_name} for topic_name in TOPICS]
notebooks = [
dict(filename="tut1.ipynb", lesson_idx=0, type="tutorial"),
#dict(filename="ex1.ipynb", lesson_idx=0, type="exercise", scriptid=14393915),
dict(filename="tut2.ipynb", lesson_idx=1, type="tutorial"),
dict(filename="ex2.ipynb", lesson_idx=1, type="exercise", scriptid=14393925),
dict(filename="tut3.ipynb", lesson_idx=2, type="tutorial"),
dict(filename="ex3.ipynb", lesson_idx=2, type="exercise", scriptid=14393912),
dict(filename="tut4.ipynb", lesson_idx=3, type="tutorial"),
dict(filename="ex4.ipynb", lesson_idx=3, type="exercise", scriptid=14393920),
dict(filename="tut5.ipynb", lesson_idx=4, type="tutorial"),
dict(filename="ex5.ipynb", lesson_idx=4, type="exercise", scriptid=14393921),
dict(filename="tut6.ipynb", lesson_idx=5, type="tutorial"),
dict(filename="ex6.ipynb", lesson_idx=5, type="exercise", scriptid=14393917),
]
for nb in notebooks:
nb["dataset_sources"] = ["ryanholbrook/fe-course-data"]
notebooks.append(
dict(
filename="tut_bonus.ipynb",
lesson_idx=6,
type="tutorial",
competition_sources=["house-prices-advanced-regression-techniques"],
)
)
|
apache-2.0
|
Python
|
8de0c35cccc316a6e9bc6dc9cff04d37e6c975a9
|
Update track_meta
|
Kaggle/learntools,Kaggle/learntools
|
notebooks/feature_engineering_new/track_meta.py
|
notebooks/feature_engineering_new/track_meta.py
|
track = dict(
author_username='',
course_name="Feature Engineering",
course_url='https://www.kaggle.com/learn/feature-engineering',
course_forum_url='https://www.kaggle.com/learn-forum/',
)
TOPICS = ["What is Feature Engineering?", # 1
"Polynomial and Interaction Features", # 2
"Principal Components Analysis", # 3
"Feature Hashing", # 4
"Target Encoding", # 5
"Feature Selection", # 6
]
lessons = [{'topic': topic_name} for topic_name in TOPICS]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
scriptid=1,
),
dict(
filename='ex1.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
scriptid=1,
),
dict(
filename='ex2.ipynb',
lesson_idx=1,
type='exercise',
scriptid=1,
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
scriptid=1,
),
dict(
filename='ex3.ipynb',
lesson_idx=2,
type='exercise',
scriptid=1,
),
dict(
filename='tut4.ipynb',
lesson_idx=3,
type='tutorial',
scriptid=1,
),
dict(
filename='ex4.ipynb',
lesson_idx=3,
type='exercise',
scriptid=1,
),
dict(
filename='tut5.ipynb',
lesson_idx=4,
type='tutorial',
scriptid=1,
),
dict(
filename='ex5.ipynb',
lesson_idx=4,
type='exercise',
scriptid=1,
),
dict(
filename='tut6.ipynb',
lesson_idx=5,
type='tutorial',
scriptid=1,
),
dict(
filename='ex6.ipynb',
lesson_idx=5,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ['ryanholbrook/fe-course-data']
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='',
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='exemplar examples',
),
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
scriptid=1,
),
dict(
filename='ex1.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
|
apache-2.0
|
Python
|
957a381f81baf8cb9f7f4e3cbd8f437f1dbf858c
|
Use 'config' instead 'device' to speficy device type for session creation
|
tqchen/tinyflow,tqchen/tinyflow,tqchen/tinyflow
|
example/mnist_mlp_auto_shape_inference.py
|
example/mnist_mlp_auto_shape_inference.py
|
"""TinyFlow Example code.
Automatic variable creation and shape inductions.
The network structure is directly specified via forward node numbers
The variables are automatically created, and their shape infered by tf.infer_variable_shapes
"""
import tinyflow as tf
from tinyflow.datasets import get_mnist
# Create the model
x = tf.placeholder(tf.float32)
fc1 = tf.nn.linear(x, num_hidden=100, name="fc1", no_bias=False)
relu1 = tf.nn.relu(fc1)
fc2 = tf.nn.linear(relu1, num_hidden=10, name="fc2")
# define loss
label = tf.placeholder(tf.float32)
cross_entropy = tf.nn.mean_sparse_softmax_cross_entropy_with_logits(fc2, label)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session(config='gpu')
# Automatic variable shape inference API, infers the shape and initialize the weights.
known_shape = {x: [100, 28 * 28], label: [100]}
init_step = []
for v, name, shape in tf.infer_variable_shapes(
cross_entropy, feed_dict=known_shape):
init_step.append(tf.assign(v, tf.normal(shape)))
print("shape[%s]=%s" % (name, str(shape)))
sess.run(init_step)
# get the mnist dataset
mnist = get_mnist(flatten=True, onehot=False)
print_period = 1000
for epoch in range(10):
sum_loss = 0.0
num_batch = 600
for i in range(num_batch):
batch_xs, batch_ys = mnist.train.next_batch(100)
loss, _ = sess.run([cross_entropy, train_step], feed_dict={x: batch_xs, label:batch_ys})
sum_loss += loss
print("epoch[%d] cross_entropy=%g" % (epoch, sum_loss /num_batch))
correct_prediction = tf.equal(tf.argmax(fc2, 1), label)
accuracy = tf.reduce_mean(correct_prediction)
print(sess.run(accuracy, feed_dict={x: mnist.test.images, label: mnist.test.labels}))
|
"""TinyFlow Example code.
Automatic variable creation and shape inductions.
The network structure is directly specified via forward node numbers
The variables are automatically created, and their shape infered by tf.infer_variable_shapes
"""
import tinyflow as tf
from tinyflow.datasets import get_mnist
# Create the model
x = tf.placeholder(tf.float32)
fc1 = tf.nn.linear(x, num_hidden=100, name="fc1", no_bias=False)
relu1 = tf.nn.relu(fc1)
fc2 = tf.nn.linear(relu1, num_hidden=10, name="fc2")
# define loss
label = tf.placeholder(tf.float32)
cross_entropy = tf.nn.mean_sparse_softmax_cross_entropy_with_logits(fc2, label)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session(device='gpu')
# Automatic variable shape inference API, infers the shape and initialize the weights.
known_shape = {x: [100, 28 * 28], label: [100]}
init_step = []
for v, name, shape in tf.infer_variable_shapes(
cross_entropy, feed_dict=known_shape):
init_step.append(tf.assign(v, tf.normal(shape)))
print("shape[%s]=%s" % (name, str(shape)))
sess.run(init_step)
# get the mnist dataset
mnist = get_mnist(flatten=True, onehot=False)
print_period = 1000
for epoch in range(10):
sum_loss = 0.0
num_batch = 600
for i in range(num_batch):
batch_xs, batch_ys = mnist.train.next_batch(100)
loss, _ = sess.run([cross_entropy, train_step], feed_dict={x: batch_xs, label:batch_ys})
sum_loss += loss
print("epoch[%d] cross_entropy=%g" % (epoch, sum_loss /num_batch))
correct_prediction = tf.equal(tf.argmax(fc2, 1), label)
accuracy = tf.reduce_mean(correct_prediction)
print(sess.run(accuracy, feed_dict={x: mnist.test.images, label: mnist.test.labels}))
|
apache-2.0
|
Python
|
b483522f8af1d58a8ca8e25eb0ba44a98acf1df7
|
Fix Client call
|
TwilioDevEd/airtng-flask,TwilioDevEd/airtng-flask,TwilioDevEd/airtng-flask
|
airtng_flask/models/reservation.py
|
airtng_flask/models/reservation.py
|
from airtng_flask.models import app_db, auth_token, account_sid, phone_number
from flask import render_template
from twilio.rest import Client
db = app_db()
class Reservation(db.Model):
__tablename__ = "reservations"
id = db.Column(db.Integer, primary_key=True)
message = db.Column(db.String, nullable=False)
status = db.Column(db.Enum('pending', 'confirmed', 'rejected', name='reservation_status_enum'), default='pending')
guest_id = db.Column(db.Integer, db.ForeignKey('users.id'))
vacation_property_id = db.Column(db.Integer, db.ForeignKey('vacation_properties.id'))
guest = db.relationship("User", back_populates="reservations")
vacation_property = db.relationship("VacationProperty", back_populates="reservations")
def __init__(self, message, vacation_property, guest):
self.message = message
self.guest = guest
self.vacation_property = vacation_property
self.status = 'pending'
def confirm(self):
self.status = 'confirmed'
def reject(self):
self.status = 'rejected'
def __repr__(self):
return '<VacationProperty %r %r>' % self.id, self.name
def notify_host(self):
self._send_message(self.vacation_property.host.phone_number,
render_template('messages/sms_host.txt',
name=self.guest.name,
description=self.vacation_property.description,
message=self.message))
def notify_guest(self):
self._send_message(self.guest.phone_number,
render_template('messages/sms_guest.txt',
description=self.vacation_property.description,
status=self.status))
def _get_twilio_client(self):
return Client(account_sid(), auth_token())
def _send_message(self, to, message):
self._get_twilio_client().messages.create(
to=to,
from_=phone_number(),
body=message)
|
from airtng_flask.models import app_db, auth_token, account_sid, phone_number
from flask import render_template
from twilio.rest import Client
db = app_db()
class Reservation(db.Model):
__tablename__ = "reservations"
id = db.Column(db.Integer, primary_key=True)
message = db.Column(db.String, nullable=False)
status = db.Column(db.Enum('pending', 'confirmed', 'rejected', name='reservation_status_enum'), default='pending')
guest_id = db.Column(db.Integer, db.ForeignKey('users.id'))
vacation_property_id = db.Column(db.Integer, db.ForeignKey('vacation_properties.id'))
guest = db.relationship("User", back_populates="reservations")
vacation_property = db.relationship("VacationProperty", back_populates="reservations")
def __init__(self, message, vacation_property, guest):
self.message = message
self.guest = guest
self.vacation_property = vacation_property
self.status = 'pending'
def confirm(self):
self.status = 'confirmed'
def reject(self):
self.status = 'rejected'
def __repr__(self):
return '<VacationProperty %r %r>' % self.id, self.name
def notify_host(self):
self._send_message(self.vacation_property.host.phone_number,
render_template('messages/sms_host.txt',
name=self.guest.name,
description=self.vacation_property.description,
message=self.message))
def notify_guest(self):
self._send_message(self.guest.phone_number,
render_template('messages/sms_guest.txt',
description=self.vacation_property.description,
status=self.status))
def _get_twilio_client(self):
return TwilioRestClient(account_sid(), auth_token())
def _send_message(self, to, message):
self._get_twilio_client().messages.create(
to=to,
from_=phone_number(),
body=message)
|
mit
|
Python
|
95249bb773c57daa15e1b85765e5e75254f8ba6e
|
Update __init__.py
|
weijia/djangoautoconf,weijia/djangoautoconf
|
djangoautoconf/__init__.py
|
djangoautoconf/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Richard Wang'
__email__ = '[email protected]'
__version__ = '2.0.2'
# from .django_autoconf import DjangoAutoConf
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Richard Wang'
__email__ = '[email protected]'
__version__ = '2.0.1'
# from .django_autoconf import DjangoAutoConf
|
bsd-3-clause
|
Python
|
05190c5dc6fc680bea245184b74f46f781ce998c
|
Prepare for inital release
|
frague59/wagtailpolls,takeflight/wagtailpolls,frague59/wagtailpolls,frague59/wagtailpolls,takeflight/wagtailpolls,takeflight/wagtailpolls
|
wagtailpolls/__init__.py
|
wagtailpolls/__init__.py
|
__version__ = '0.1.0'
|
__version__ = '0.1'
|
bsd-3-clause
|
Python
|
7b80fdfe3487d3f5f32c69b03707963d3f8e1e3a
|
Update cpuinfo from b40bae2 to 9fa6219
|
tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow
|
third_party/cpuinfo/workspace.bzl
|
third_party/cpuinfo/workspace.bzl
|
"""Loads the cpuinfo library, used by XNNPACK."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
tf_http_archive(
name = "cpuinfo",
strip_prefix = "cpuinfo-9fa621933fc6080b96fa0f037cdc7cd2c69ab272",
sha256 = "810708948128be2da882a5a3ca61eb6db40186bac9180d205a7ece43597b5fc3",
urls = tf_mirror_urls("https://github.com/pytorch/cpuinfo/archive/9fa621933fc6080b96fa0f037cdc7cd2c69ab272.tar.gz"),
build_file = "//third_party/cpuinfo:cpuinfo.BUILD",
)
|
"""Loads the cpuinfo library, used by XNNPACK."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
tf_http_archive(
name = "cpuinfo",
strip_prefix = "cpuinfo-b40bae27785787b6dd70788986fd96434cf90ae2",
sha256 = "5794c7b37facc590018eddffec934c60aeb71165b59a375babe4f7be7f04723f",
urls = tf_mirror_urls("https://github.com/pytorch/cpuinfo/archive/b40bae27785787b6dd70788986fd96434cf90ae2.tar.gz"),
build_file = "//third_party/cpuinfo:cpuinfo.BUILD",
)
|
apache-2.0
|
Python
|
adafee9ea64501632331d2681f93ada9b24d05da
|
Fix publish with dict
|
amm0nite/unicornclient,amm0nite/unicornclient
|
unicornclient/mission.py
|
unicornclient/mission.py
|
import json
from . import message
class Mission():
def __init__(self, manager):
self.manager = manager
def send(self, msg: message.Message):
self.manager.sender.send(msg)
def publish(self, topic, data):
self.manager.mqtt_sender.publish(topic, self.serialize(data))
def post(self, name, data):
msg = message.Message({'type': 'mission', 'name': name})
msg.set_body(self.serialize(data))
self.send(msg)
def serialize(self, data):
if isinstance(data, dict):
data = json.dumps(data).encode()
return data
def forward(self, name, task):
self.manager.forward(name, task)
|
import json
from . import message
class Mission():
def __init__(self, manager):
self.manager = manager
def send(self, msg):
self.manager.sender.send(msg)
def publish(self, topic, msg):
self.manager.mqtt_sender.publish(topic, msg)
def post(self, name, data):
msg = message.Message({'type': 'mission', 'name': name})
if isinstance(data, dict):
msg.set_body(json.dumps(data).encode())
else:
msg.set_body(data)
self.send(msg)
def forward(self, name, task):
self.manager.forward(name, task)
|
mit
|
Python
|
83b30e9e911ae71512c48a4e54c924d79fb7c9d7
|
adjust inherit relation
|
note35/sinon,note35/sinon
|
lib/sinon/SinonStub.py
|
lib/sinon/SinonStub.py
|
import sys
sys.path.insert(0, '../')
from lib.sinon.util import ErrorHandler, Wrapper, CollectionHandler
from lib.sinon.SinonSpy import SinonSpy
class SinonStub(SinonSpy):
def __init__(self, obj=None, prop=None, func=None):
super(SinonStub, self).__init__(obj, prop)
self._prepare(func)
def _prepare(self, func):
self.stubfunc = func if func else Wrapper.emptyFunction
super(SinonStub, self).addWrapStub(self.stubfunc)
self.condition = {"args":[], "kwargs":[], "action": [], "oncall":[]}
self.cond_args = self.cond_kwargs = self.oncall = None
def _appendCondition(self, func):
self.condition["args"].append(self.cond_args)
self.condition["kwargs"].append(self.cond_kwargs)
self.condition["oncall"].append(self.oncall)
self.condition["action"].append(func)
self.cond_args = self.cond_kwargs = self.oncall = None
def withArgs(self, *args, **kwargs):
if args:
self.cond_args = args
if kwargs:
self.cond_kwargs = kwargs
return self
def onCall(self, n):
self.oncall = n
return self
def onFirstCall(self):
self.oncall = 1
return self
def onSecondCall(self):
self.oncall = 2
return self
def onThirdCall(self):
self.oncall = 3
return self
def returns(self, obj):
def returnFunction(*args, **kwargs):
return obj
if self.cond_args or self.cond_kwargs or self.oncall:
self._appendCondition(returnFunction)
super(SinonStub, self).addWrapStub(self.stubfunc, self.condition)
else:
super(SinonStub, self).addWrapStub(returnFunction)
return self
def throws(self, exceptions=Exception):
def exceptionFunction(*args, **kwargs):
raise exceptions
if self.cond_args or self.cond_kwargs or self.oncall:
self._appendCondition(exceptionFunction)
super(SinonStub, self).addWrapStub(self.stubfunc, self.condition)
else:
super(SinonStub, self).addWrapStub(exceptionFunction)
return self
|
import sys
sys.path.insert(0, '../')
from lib.sinon.util import ErrorHandler, Wrapper, CollectionHandler
from lib.sinon.SinonBase import SinonBase
class SinonStub(SinonBase):
def __init__(self, obj=None, prop=None, func=None):
super(SinonStub, self).__init__(obj, prop)
self.stubfunc = func if func else Wrapper.emptyFunction
super(SinonStub, self).addWrapStub(self.stubfunc)
self.condition = {"args":[], "kwargs":[], "action": [], "oncall":[]}
self.args = self.kwargs = self.oncall = None
def _appendCondition(self, func):
self.condition["args"].append(self.args)
self.condition["kwargs"].append(self.kwargs)
self.condition["oncall"].append(self.oncall)
self.condition["action"].append(func)
self.args = self.kwargs = self.oncall = None
def withArgs(self, *args, **kwargs):
if args:
self.args = args
if kwargs:
self.kwargs = kwargs
return self
def onCall(self, n):
self.oncall = n
return self
def onFirstCall(self):
self.oncall = 1
return self
def onSecondCall(self):
self.oncall = 2
return self
def onThirdCall(self):
self.oncall = 3
return self
def returns(self, obj):
def returnFunction(*args, **kwargs):
return obj
if self.args or self.kwargs or self.oncall:
self._appendCondition(returnFunction)
super(SinonStub, self).addWrapStub(self.stubfunc, self.condition)
else:
super(SinonStub, self).addWrapStub(returnFunction)
return self
def throws(self, exceptions=Exception):
def exceptionFunction(*args, **kwargs):
raise exceptions
if self.args or self.kwargs or self.oncall:
self._appendCondition(exceptionFunction)
super(SinonStub, self).addWrapStub(self.stubfunc, self.condition)
else:
super(SinonStub, self).addWrapStub(exceptionFunction)
return self
|
bsd-2-clause
|
Python
|
803a6e495966b5281b376b4e28f0bcb04f44ee50
|
Change args to api
|
johnbachman/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,bgyori/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra
|
indra/sources/sofia/sofia_api.py
|
indra/sources/sofia/sofia_api.py
|
import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
import openpyxl
from .processor import SofiaProcessor
def process_table(fname, sheet_name):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
|
bsd-2-clause
|
Python
|
124cef21de78c84aa32808d4287733e616df4095
|
Update colorbars test.
|
openmv/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,openmv/openmv,iabdalkader/openmv
|
usr/examples/15-Tests/colorbar.py
|
usr/examples/15-Tests/colorbar.py
|
# Colorbar Test Example
#
# This example is the color bar test run by each OpenMV Cam before being allowed
# out of the factory. The OMV sensors can output a color bar image which you
# can threshold to check the the camera bus is connected correctly.
import sensor, time
sensor.reset()
# Set sensor settings
sensor.set_brightness(0)
sensor.set_saturation(3)
sensor.set_gainceiling(8)
sensor.set_contrast(2)
# Set sensor pixel format
sensor.set_framesize(sensor.QVGA)
sensor.set_pixformat(sensor.RGB565)
# Enable colorbar test mode
sensor.set_colorbar(True)
# Skip a few frames to allow the sensor settle down
for i in range(0, 30):
image = sensor.snapshot()
# Color bars thresholds
t = [lambda r, g, b: r < 70 and g < 70 and b < 70, # Black
lambda r, g, b: r < 70 and g < 70 and b > 200, # Blue
lambda r, g, b: r > 200 and g < 70 and b < 70, # Red
lambda r, g, b: r > 200 and g < 70 and b > 200, # Purple
lambda r, g, b: r < 70 and g > 200 and b < 70, # Green
lambda r, g, b: r < 70 and g > 200 and b > 200, # Aqua
lambda r, g, b: r > 200 and g > 200 and b < 70, # Yellow
lambda r, g, b: r > 200 and g > 200 and b > 200] # White
# color bars are inverted for OV7725
if (sensor.get_id() == sensor.OV7725):
t = t[::-1]
# 320x240 image with 8 color bars each one is approx 40 pixels.
# we start from the center of the frame buffer, and average the
# values of 10 sample pixels from the center of each color bar.
for i in range(0, 8):
avg = (0, 0, 0)
idx = 40*i+20 # center of colorbars
for off in range(0, 10): # avg 10 pixels
rgb = image.get_pixel(idx+off, 120)
avg = tuple(map(sum, zip(avg, rgb)))
if not t[i](avg[0]/10, avg[1]/10, avg[2]/10):
raise Exception("COLOR BARS TEST FAILED. "
"BAR#(%d): RGB(%d,%d,%d)"%(i+1, avg[0]/10, avg[1]/10, avg[2]/10))
print("COLOR BARS TEST PASSED...")
|
# Colorbar Test Example
#
# This example is the color bar test run by each OpenMV Cam before being allowed
# out of the factory. The OMV sensors can output a color bar image which you
# can threshold to check the the camera bus is connected correctly.
import sensor, time
sensor.reset()
# Set sensor settings
sensor.set_brightness(0)
sensor.set_saturation(0)
sensor.set_gainceiling(8)
sensor.set_contrast(2)
# Set sensor pixel format
sensor.set_framesize(sensor.QVGA)
sensor.set_pixformat(sensor.RGB565)
# Enable colorbar test mode
sensor.set_colorbar(True)
# Skip a few frames to allow the sensor settle down
for i in range(0, 100):
image = sensor.snapshot()
# Color bars thresholds
t = [lambda r, g, b: r < 50 and g < 50 and b < 50, # Black
lambda r, g, b: r < 50 and g < 50 and b > 200, # Blue
lambda r, g, b: r > 200 and g < 50 and b < 50, # Red
lambda r, g, b: r > 200 and g < 50 and b > 200, # Purple
lambda r, g, b: r < 50 and g > 200 and b < 50, # Green
lambda r, g, b: r < 50 and g > 200 and b > 200, # Aqua
lambda r, g, b: r > 200 and g > 200 and b < 50, # Yellow
lambda r, g, b: r > 200 and g > 200 and b > 200] # White
# 320x240 image with 8 color bars each one is approx 40 pixels.
# we start from the center of the frame buffer, and average the
# values of 10 sample pixels from the center of each color bar.
for i in range(0, 8):
avg = (0, 0, 0)
idx = 40*i+20 # center of colorbars
for off in range(0, 10): # avg 10 pixels
rgb = image.get_pixel(idx+off, 120)
avg = tuple(map(sum, zip(avg, rgb)))
if not t[i](avg[0]/10, avg[1]/10, avg[2]/10):
raise Exception("COLOR BARS TEST FAILED. "
"BAR#(%d): RGB(%d,%d,%d)"%(i+1, avg[0]/10, avg[1]/10, avg[2]/10))
print("COLOR BARS TEST PASSED...")
|
mit
|
Python
|
60fe51f3e193dd42c24001cf5a01e689df12730b
|
support the limit on the max number of points
|
maxim5/hyper-engine
|
hyperengine/model/hyper_tuner.py
|
hyperengine/model/hyper_tuner.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'maxim'
import numpy as np
import time
from ..base import *
from ..spec import ParsedSpec
from ..bayesian.sampler import DefaultSampler
from ..bayesian.strategy import BayesianStrategy, BayesianPortfolioStrategy
strategies = {
'bayesian': lambda sampler, params: BayesianStrategy(sampler, **params),
'portfolio': lambda sampler, params: BayesianPortfolioStrategy(sampler, **params),
}
class HyperTuner(object):
def __init__(self, hyper_params_spec, solver_generator, **strategy_params):
self._solver_generator = solver_generator
self._parsed = ParsedSpec(hyper_params_spec)
info('Spec size:', self._parsed.size())
sampler = DefaultSampler()
sampler.add_uniform(self._parsed.size())
strategy_gen = as_function(strategy_params.get('strategy', 'bayesian'), presets=strategies)
self._strategy = strategy_gen(sampler, strategy_params)
self._timeout = strategy_params.get('timeout', 0)
self._max_points = strategy_params.get('max_points', None)
def tune(self):
info('Start hyper tuner')
while True:
if self._max_points is not None and len(self._strategy.values) >= self._max_points:
info('Maximum points reached: max_points=%d. Aborting hyper tuner' % self._max_points)
break
point = self._strategy.next_proposal()
hyper_params = self._parsed.instantiate(point)
solver = self._solver_generator(hyper_params)
accuracy = solver.train()
previous_max = np.max(self._strategy.values) if len(self._strategy.values) > 0 else -np.inf
self._strategy.add_point(point, accuracy)
index = len(self._strategy.values)
marker = '!' if accuracy > previous_max else ' '
info('%s [%d] accuracy=%.4f, params: %s' % (marker, index, accuracy, smart_str(hyper_params)))
info('Current top-%d:' % min(len(self._strategy.values), 5))
for value in sorted(self._strategy.values, reverse=True)[:5]:
info(' accuracy=%.4f' % value)
if self._timeout:
time.sleep(self._timeout)
solver.terminate()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'maxim'
import numpy as np
import time
from ..base import *
from ..spec import ParsedSpec
from ..bayesian.sampler import DefaultSampler
from ..bayesian.strategy import BayesianStrategy, BayesianPortfolioStrategy
strategies = {
'bayesian': lambda sampler, params: BayesianStrategy(sampler, **params),
'portfolio': lambda sampler, params: BayesianPortfolioStrategy(sampler, **params),
}
class HyperTuner(object):
def __init__(self, hyper_params_spec, solver_generator, **strategy_params):
self.solver_generator = solver_generator
self.parsed = ParsedSpec(hyper_params_spec)
info('Spec size:', self.parsed.size())
sampler = DefaultSampler()
sampler.add_uniform(self.parsed.size())
strategy_gen = as_function(strategy_params.get('strategy', 'bayesian'), presets=strategies)
self.strategy = strategy_gen(sampler, strategy_params)
self.timeout = strategy_params.get('timeout', 0)
def tune(self):
info('Start hyper tuner')
while True:
point = self.strategy.next_proposal()
hyper_params = self.parsed.instantiate(point)
solver = self.solver_generator(hyper_params)
accuracy = solver.train()
previous_max = np.max(self.strategy.values) if len(self.strategy.values) > 0 else -np.inf
self.strategy.add_point(point, accuracy)
index = len(self.strategy.values)
marker = '!' if accuracy > previous_max else ' '
info('%s [%d] accuracy=%.4f, params: %s' % (marker, index, accuracy, smart_str(hyper_params)))
info('Current top-%d:' % min(len(self.strategy.values), 5))
for value in sorted(self.strategy.values, reverse=True)[:5]:
info(' accuracy=%.4f' % value)
if self.timeout:
time.sleep(self.timeout)
solver.terminate()
|
apache-2.0
|
Python
|
f0e29748ff899d7e65d1f4169e890d3e3c4bda0e
|
Update instead of overriding `DATABASES` setting in `test` settings.
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
icekit/project/settings/_test.py
|
icekit/project/settings/_test.py
|
from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES['default'].update({
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
|
from ._base import *
# DJANGO ######################################################################
DATABASE_NAME = 'test_%s' % DATABASES['default']['NAME']
DATABASES = {
'default': {
'NAME': DATABASE_NAME,
'TEST': {
'NAME': DATABASE_NAME,
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
}
}
INSTALLED_APPS += ('icekit.tests', )
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
|
mit
|
Python
|
b34315f4b4c77dfcc4bc83901ca8786af1a3f12a
|
Add more content
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
ideascube/conf/kb_gin_conakry.py
|
ideascube/conf/kb_gin_conakry.py
|
# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CODE = 'fr'
IDEASCUBE_NAME = 'Conakry'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'bsfcampus',
},
{
'id': 'koombookedu',
},
{
'id': 'wikipedia',
'languages': 'fr',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
{
'id': 'universcience',
'languages': ['fr']
},
{
'id': 'maps',
'maps': [
(_('World'), 'world.map'),
('Burundi', 'burundi.map'),
]
},
]
|
# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'fr'
IDEASCUBE_NAME = 'CONAKRY'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'koombookedu',
},
{
'id': 'bsfcampus',
},
]
|
agpl-3.0
|
Python
|
e4abc4dbde81b21d1d66439a482249887bfd56a7
|
Fix TypeError on callback
|
hasegaw/IkaLog,deathmetalland/IkaLog,deathmetalland/IkaLog,hasegaw/IkaLog,hasegaw/IkaLog,deathmetalland/IkaLog
|
ikalog/scenes/game/kill_combo.py
|
ikalog/scenes/game/kill_combo.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA, Shingo MINAMIYAMA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import cv2
from ikalog.scenes.scene import Scene
from ikalog.utils import *
class GameKillCombo(Scene):
def reset(self):
super(GameKillCombo, self).reset()
self.resetParams()
self.max_kill_streak = 0
self.max_kill_combo = 0
def resetParams(self):
self.chain_kill_combos = 0
self.kill_streak = 0
self.last_kill_msec = 0
def match_no_cache(self, context):
if not self.is_another_scene_matched(context, 'GameTimerIcon'):
return False
frame = context['engine']['frame']
if frame is None:
return False
def on_game_killed(self, context, params):
self.kill_streak += 1
context['game']['kill_streak'] = self.kill_streak
context['game']['max_kill_streak'] = max(self.kill_streak, context['game'].get('max_kill_streak', 0))
if (self.kill_streak > 1 and (context['engine']['msec'] - self.last_kill_msec) <= 5000):
self.chain_kill_combos += 1
context['game']['kill_combo'] = self.chain_kill_combos
context['game']['max_kill_combo'] = max(self.chain_kill_combos, context['game'].get('max_kill_combo', 0))
self._call_plugins('on_game_chained_kill_combo')
else:
self.chain_kill_combos = 0;
self.last_kill_msec = context['engine']['msec']
def on_game_dead(self, context):
self.resetParams()
def _analyze(self, context):
pass
def _init_scene(self, debug=False):
pass
if __name__ == "__main__":
GameKillCombo.main_func()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA, Shingo MINAMIYAMA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import cv2
from ikalog.scenes.scene import Scene
from ikalog.utils import *
class GameKillCombo(Scene):
def reset(self):
super(GameKillCombo, self).reset()
self.resetParams()
self.max_kill_streak = 0
self.max_kill_combo = 0
def resetParams(self):
self.chain_kill_combos = 0
self.kill_streak = 0
self.last_kill_msec = 0
def match_no_cache(self, context):
if not self.is_another_scene_matched(context, 'GameTimerIcon'):
return False
frame = context['engine']['frame']
if frame is None:
return False
def on_game_killed(self, context):
self.kill_streak += 1
context['game']['kill_streak'] = self.kill_streak
context['game']['max_kill_streak'] = max(self.kill_streak, context['game'].get('max_kill_streak', 0))
if (self.kill_streak > 1 and (context['engine']['msec'] - self.last_kill_msec) <= 5000):
self.chain_kill_combos += 1
context['game']['kill_combo'] = self.chain_kill_combos
context['game']['max_kill_combo'] = max(self.chain_kill_combos, context['game'].get('max_kill_combo', 0))
self._call_plugins('on_game_chained_kill_combo')
else:
self.chain_kill_combos = 0;
self.last_kill_msec = context['engine']['msec']
def on_game_dead(self, context):
self.resetParams()
def _analyze(self, context):
pass
def _init_scene(self, debug=False):
pass
if __name__ == "__main__":
GameKillCombo.main_func()
|
apache-2.0
|
Python
|
bafa26c28399b43d15e23d1fca66bf98938f329d
|
fix treasury yield bugs
|
johntut/MongoDisco,dcrosta/mongo-disco,sajal/MongoDisco,10genNYUITP/MongoDisco,mongodb/mongo-disco
|
examples/treasury_yield/treasury_yield.py
|
examples/treasury_yield/treasury_yield.py
|
#!/usr/bin/env python
# encoding: utf-8
import datetime
#from app.MongoSplitter import calculate_splits
#from disco.core import Job, result_iterator
#from mongodb_io import mongodb_output_stream, mongodb_input_stream
from job import DiscoJob
"""
Description: calculate the average 10 year treasury bond yield for given data.
Note: run parse_yield_historical.py first to populate the mongodb with data.
example record:
{ "_id" : { "$date" : 633571200000 },
"dayOfWeek" : "MONDAY",
"bc3Year" : 8.390000000000001,
"bc5Year" : 8.390000000000001,
"bc10Year" : 8.5,
"bc20Year" : None,
"bc1Month" : None,
"bc2Year" : 8.300000000000001,
"bc3Month" : 8,
"bc30Year" : 8.539999999999999,
"bc1Year" : 8.08,
"bc7Year" : 8.449999999999999,
"bc6Month" : 8.09 }
"""
#this is the config file for the mongosplitter
config = {
"input_uri": "mongodb://localhost/test.yield_historical.in",
"slaveOk": True,
"useShards": True,
"createInputSplits": True,
"useChunks": True}
def map(record, params):
year = record.get('_id').year
yield year, record['bc10Year']
def reduce(iter, params):
from disco.util import kvgroup
for year, bid_prices in kvgroup(sorted(iter)):
avg = sum(bid_prices) / len(bid_prices)
yield year, avg
if __name__ == '__main__':
DiscoJob(config=config, map=map, reduce=reduce).run()
'''
job = Job().run(input=calculate_splits(config),
map=map,
reduce=reduce,
map_input_stream=mongodb_input_stream)
for year, avg in result_iterator(job.wait(show=True)):
print "Average 10 Year treasury for %s was %s" % (year, avg)
'''
|
#!/usr/bin/env python
# encoding: utf-8
import datetime
from app.MongoSplitter import calculate_splits
from disco.core import Job, result_iterator
from mongodb_io import mongodb_output_stream, mongodb_input_stream
"""
Description: calculate the average 10 year treasury bond yield for given data.
Note: run parse_yield_historical.py first to populate the mongodb with data.
example record:
{ "_id" : { "$date" : 633571200000 },
"dayOfWeek" : "MONDAY",
"bc3Year" : 8.390000000000001,
"bc5Year" : 8.390000000000001,
"bc10Year" : 8.5,
"bc20Year" : None,
"bc1Month" : None,
"bc2Year" : 8.300000000000001,
"bc3Month" : 8,
"bc30Year" : 8.539999999999999,
"bc1Year" : 8.08,
"bc7Year" : 8.449999999999999,
"bc6Month" : 8.09 }
"""
#this is the config file for the mongosplitter
config = {
"inputURI": "mongodb://localhost/yield_historical.in",
"slaveOk": True,
"useShards": True,
"createInputSplits": True,
"useChunks": True}
def map(record, params):
time = record['_id']['$date'] / 1000
year = datetime.datetime.fromtimestamp(time).date().year
yield year, record['bc10year']
def reduce(iter, params):
from disco.util import kvgroup
for year, bid_prices in kvgroup(sorted(iter)):
avg = sum(bid_prices) / len(bid_prices)
yield year, avg
if __name__ == '__main__':
job = Job().run(input=calculate_splits(config),
map=map,
reduce=reduce,
map_input_stream=mongodb_input_stream)
for year, avg in result_iterator(job.wait(show=True)):
print "Average 10 Year treasury for %s was %s" % (year, avg)
|
apache-2.0
|
Python
|
e637e5f53990709ed654b661465685ad9d05a182
|
Update cluster config map key format
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
api/spawner/templates/constants.py
|
api/spawner/templates/constants.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
apache-2.0
|
Python
|
8171af80ab1bff2ffac4b85642217a37fb485d74
|
Rewrite serializer
|
manhg/django-rest-framework-gis,arjenvrielink/django-rest-framework-gis,djangonauts/django-rest-framework-gis,barseghyanartur/django-rest-framework-gis,illing2005/django-rest-framework-gis,nmandery/django-rest-framework-gis,bopo/django-rest-framework-gis,pglotov/django-rest-framework-gis,sh4wn/django-rest-framework-gis,nmandery/django-rest-framework-gis
|
rest_framework_gis/serializers.py
|
rest_framework_gis/serializers.py
|
# rest_framework_gis/serializers.py
from django.contrib.gis.db import models
from rest_framework.serializers import ModelSerializer
from .fields import GeometryField
class GeoModelSerializer(ModelSerializer):
pass
GeoModelSerializer.field_mapping.update({
models.GeometryField: GeometryField,
models.PointField: GeometryField,
models.LineStringField: GeometryField,
models.PolygonField: GeometryField,
models.MultiPointField: GeometryField,
models.MultiLineStringField: GeometryField,
models.MultiPolygonField: GeometryField,
models.GeometryCollectionField: GeometryField
})
|
# rest_framework_gis/serializers.py
from django.contrib.gis.db import models
from rest_framework.serializers import ModelSerializer
from .fields import GeometryField
class GeoModelSerializer(ModelSerializer):
def get_field(self, model_field):
"""
Creates a default instance of a basic non-relational field.
"""
kwargs = {}
kwargs['blank'] = model_field.blank
if model_field.null or model_field.blank:
kwargs['required'] = False
if isinstance(model_field, models.AutoField) or not model_field.editable:
kwargs['read_only'] = True
if model_field.has_default():
kwargs['required'] = False
kwargs['default'] = model_field.get_default()
if issubclass(model_field.__class__, models.TextField):
kwargs['widget'] = widgets.Textarea
# TODO: TypedChoiceField?
if model_field.flatchoices: # This ModelField contains choices
kwargs['choices'] = model_field.flatchoices
return ChoiceField(**kwargs)
field_mapping = {
models.AutoField: IntegerField,
models.FloatField: FloatField,
models.IntegerField: IntegerField,
models.PositiveIntegerField: IntegerField,
models.SmallIntegerField: IntegerField,
models.PositiveSmallIntegerField: IntegerField,
models.DateTimeField: DateTimeField,
models.EmailField: EmailField,
models.CharField: CharField,
models.URLField: URLField,
models.SlugField: SlugField,
models.TextField: CharField,
models.CommaSeparatedIntegerField: CharField,
models.BooleanField: BooleanField,
models.FileField: FileField,
models.ImageField: ImageField,
models.GeometryField: GeometryField,
models.PointField: GeometryField,
models.LineStringField: GeometryField,
models.PolygonField: GeometryField,
models.MultiPointField: GeometryField,
models.MultiLineStringField: GeometryField,
models.MultiPolygonField: GeometryField,
models.GeometryCollectionField: GeometryField
}
try:
return field_mapping[model_field.__class__](**kwargs)
except KeyError:
return ModelField(model_field=model_field, **kwargs)
|
mit
|
Python
|
70d009834123cb5a10788763fed3193017cc8162
|
Add a default null logger per python recommendations.
|
pebble/libpebble2
|
libpebble2/__init__.py
|
libpebble2/__init__.py
|
__author__ = 'katharine'
import logging
from .exceptions import *
logging.getLogger('libpebble2').addHandler(logging.NullHandler())
|
__author__ = 'katharine'
from .exceptions import *
|
mit
|
Python
|
eb9d297d14741f311cb4bf27c384077ba98cc789
|
Add missing slot.
|
marrow/mongo,djdduty/mongo,djdduty/mongo
|
web/db/mongo/__init__.py
|
web/db/mongo/__init__.py
|
# encoding: utf-8
"""MongoDB database connection extension."""
import re
from pymongo import MongoClient
from pymongo.errors import ConfigurationError
from .model import Model
from .resource import MongoDBResource
from .collection import MongoDBCollection
__all__ = ['Model', 'MongoDBResource', 'MongoDBCollection', 'MongoDBConnection']
log = __import__('logging').getLogger(__name__)
_safe_uri_replace = re.compile(r'(\w+)://(\w+):(?P<password>[^@]+)@')
class MongoDBConnection(object):
"""WebCore database extension connector for MongoDB databases.
This tiny class performs the work needed to populate the WebCore context with a MonogoDB database (or connection
if no default database is provided) on startup, using `pymongo`. In addition to performing initial configuration,
this extension adapts
"""
__slots__ = ('__name__', 'uri', 'config', 'client', 'db', 'alias')
provides = {'mongodb'}
def __init__(self, uri, alias=None, **config):
"""Prepare MongoDB client configuration.
The only required configuration option (passed positionally or by keyword) is `uri`, specifying the host to
connect to and optionally client credentials (username, password), default database, and additional options.
Extraneous keyword arguments will be stored and passed through to the `MongoClient` class instantiated on
startup.
"""
self.uri = uri
self.client = None
self.db = None
self.alias = alias
# Configure a few of our own defaults here, usually because we compare the value somewhere.
config.setdefault('event_listeners', []) # For logging purposes, we add some of our own handlers.
self.config = config
def start(self, context):
name = self.alias or self.__name__ # Either we were configured with an explicit name, or the DB ext infers.
log.info("Connecting context.db.{name} to MongoDB database.".format(name=name), extra=dict(
uri = _safe_uri_replace.sub(r'\1://\2@', self.uri),
config = self.config,
))
client = self.client = MongoClient(self.uri, **self.config)
try:
db = self.db = client.get_default_database()
except ConfigurationError:
db = self.db = None
if self.config.get('connect', True):
pass # Log extra details about the connection here.
context.db[name] = db if db is not None else client
def stop(self, context):
self.client.close()
del context.db[self.alias or self.__name__]
|
# encoding: utf-8
"""MongoDB database connection extension."""
import re
from pymongo import MongoClient
from pymongo.errors import ConfigurationError
from .model import Model
from .resource import MongoDBResource
from .collection import MongoDBCollection
__all__ = ['Model', 'MongoDBResource', 'MongoDBCollection', 'MongoDBConnection']
log = __import__('logging').getLogger(__name__)
_safe_uri_replace = re.compile(r'(\w+)://(\w+):(?P<password>[^@]+)@')
class MongoDBConnection(object):
"""WebCore database extension connector for MongoDB databases.
This tiny class performs the work needed to populate the WebCore context with a MonogoDB database (or connection
if no default database is provided) on startup, using `pymongo`. In addition to performing initial configuration,
this extension adapts
"""
__slots__ = ('__name__', 'uri', 'config', 'client', 'db')
provides = {'mongodb'}
def __init__(self, uri, alias=None, **config):
"""Prepare MongoDB client configuration.
The only required configuration option (passed positionally or by keyword) is `uri`, specifying the host to
connect to and optionally client credentials (username, password), default database, and additional options.
Extraneous keyword arguments will be stored and passed through to the `MongoClient` class instantiated on
startup.
"""
self.uri = uri
self.client = None
self.db = None
self.alias = alias
# Configure a few of our own defaults here, usually because we compare the value somewhere.
config.setdefault('event_listeners', []) # For logging purposes, we add some of our own handlers.
self.config = config
def start(self, context):
name = self.alias or self.__name__ # Either we were configured with an explicit name, or the DB ext infers.
log.info("Connecting context.db.{name} to MongoDB database.".format(name=name), extra=dict(
uri = _safe_uri_replace.sub(r'\1://\2@', self.uri),
config = self.config,
))
client = self.client = MongoClient(self.uri, **self.config)
try:
db = self.db = client.get_default_database()
except ConfigurationError:
db = self.db = None
if self.config.get('connect', True):
pass # Log extra details about the connection here.
context.db[name] = db if db is not None else client
def stop(self, context):
self.client.close()
del context.db[self.alias or self.__name__]
|
mit
|
Python
|
bf24abb4ffba4f63f641cc61e22357253cdca956
|
Fix migration script
|
DanielNeugebauer/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,liqd/adhocracy,alkadis/vcv,liqd/adhocracy,phihag/adhocracy,liqd/adhocracy,SysTheron/adhocracy,alkadis/vcv,SysTheron/adhocracy,liqd/adhocracy,phihag/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy
|
src/adhocracy/migration/versions/053_add_newsservice.py
|
src/adhocracy/migration/versions/053_add_newsservice.py
|
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import Boolean, DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
message_table = Table(
'message', metadata,
Column('id', Integer, primary_key=True),
Column('subject', Unicode(140), nullable=False),
Column('body', UnicodeText(), nullable=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('access_time', DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow),
Column('delete_time', DateTime, nullable=True),
Column('creator_id', Integer, ForeignKey('user.id'), nullable=False),
Column('sender_email', Unicode(255), nullable=False),
)
message_recipient_table = Table(
'message_recipient', metadata,
Column('id', Integer, primary_key=True),
Column('message_id', Integer, ForeignKey('message.id'), nullable=False),
Column('recipient_id', Integer, ForeignKey('user.id'), nullable=False),
Column('email_sent', Boolean, default=False),
)
user_table = Table(
'user', metadata,
Column('id', Integer, primary_key=True),
Column('user_name', Unicode(255), nullable=False, unique=True, index=True),
Column('display_name', Unicode(255), nullable=True, index=True),
Column('bio', UnicodeText(), nullable=True),
Column('email', Unicode(255), nullable=True, unique=True),
Column('email_priority', Integer, default=3),
Column('activation_code', Unicode(255), nullable=True, unique=False),
Column('reset_code', Unicode(255), nullable=True, unique=False),
Column('password', Unicode(80), nullable=False),
Column('locale', Unicode(7), nullable=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('access_time', DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow),
Column('delete_time', DateTime),
Column('banned', Boolean, default=False),
Column('no_help', Boolean, default=False, nullable=True),
Column('page_size', Integer, default=10, nullable=True),
Column('proposal_sort_order', Unicode(50), default=None, nullable=True),
Column('gender', Unicode(1), default=None),
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
message_table.create()
message_recipient_table.create()
email_messages = Column('email_messages', Boolean, default=True)
email_messages.create(user_table)
def downgrade(migrate_engine):
raise NotImplementedError()
|
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import Boolean, DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
message_table = Table(
'message', metadata,
Column('id', Integer, primary_key=True),
Column('subject', Unicode(140), nullable=False),
Column('body', UnicodeText(), nullable=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('access_time', DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow),
Column('delete_time', DateTime, nullable=True),
Column('creator_id', Integer, ForeignKey('user.id'), nullable=False),
Column('sender_email', Unicode(255), nullable=False),
)
message_recipient_table = Table(
'message_recipient', metadata,
Column('id', Integer, primary_key=True),
Column('message_id', Integer, ForeignKey('message.id'), nullable=False),
Column('recipient_id', Integer, ForeignKey('user.id'), nullable=False),
Column('email_sent', Boolean, default=False),
)
user_table = Table(
'user', metadata,
Column('id', Integer, primary_key=True),
Column('user_name', Unicode(255), nullable=False, unique=True, index=True),
Column('display_name', Unicode(255), nullable=True, index=True),
Column('bio', UnicodeText(), nullable=True),
Column('email', Unicode(255), nullable=True, unique=True),
Column('email_priority', Integer, default=3),
Column('activation_code', Unicode(255), nullable=True, unique=False),
Column('reset_code', Unicode(255), nullable=True, unique=False),
Column('password', Unicode(80), nullable=False),
Column('locale', Unicode(7), nullable=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('access_time', DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow),
Column('delete_time', DateTime),
Column('banned', Boolean, default=False),
Column('no_help', Boolean, default=False, nullable=True),
Column('page_size', Integer, default=10, nullable=True),
Column('proposal_sort_order', Unicode(50), default=None, nullable=True),
Column('gender', Unicode(1), default=None),
)
def upgrade(migrate_engine):
meta.bind = migrate_engine
message_table.create()
message_recipient_table.create()
email_messages = Column('email_messages', Boolean, default=True)
email_messages.create(user_table)
def downgrade(migrate_engine):
raise NotImplementedError()
|
agpl-3.0
|
Python
|
307d866bb6538a78effcc44e005a4dcb90a2a4b5
|
Increment to 0.5.4
|
ashleysommer/sanic,lixxu/sanic,yunstanford/sanic,Tim-Erwin/sanic,lixxu/sanic,jrocketfingers/sanic,yunstanford/sanic,lixxu/sanic,ashleysommer/sanic,r0fls/sanic,yunstanford/sanic,Tim-Erwin/sanic,channelcat/sanic,ashleysommer/sanic,lixxu/sanic,channelcat/sanic,r0fls/sanic,yunstanford/sanic,channelcat/sanic,jrocketfingers/sanic,channelcat/sanic
|
sanic/__init__.py
|
sanic/__init__.py
|
from sanic.app import Sanic
from sanic.blueprints import Blueprint
__version__ = '0.5.4'
__all__ = ['Sanic', 'Blueprint']
|
from sanic.app import Sanic
from sanic.blueprints import Blueprint
__version__ = '0.5.3'
__all__ = ['Sanic', 'Blueprint']
|
mit
|
Python
|
5fd62098bd2f2722876a0873d5856d70046d3889
|
Increment to 0.5.2
|
r0fls/sanic,ashleysommer/sanic,yunstanford/sanic,channelcat/sanic,lixxu/sanic,yunstanford/sanic,lixxu/sanic,lixxu/sanic,jrocketfingers/sanic,yunstanford/sanic,lixxu/sanic,yunstanford/sanic,ashleysommer/sanic,channelcat/sanic,Tim-Erwin/sanic,jrocketfingers/sanic,channelcat/sanic,channelcat/sanic,ashleysommer/sanic,r0fls/sanic,Tim-Erwin/sanic
|
sanic/__init__.py
|
sanic/__init__.py
|
from sanic.app import Sanic
from sanic.blueprints import Blueprint
__version__ = '0.5.2'
__all__ = ['Sanic', 'Blueprint']
|
from sanic.app import Sanic
from sanic.blueprints import Blueprint
__version__ = '0.5.1'
__all__ = ['Sanic', 'Blueprint']
|
mit
|
Python
|
035938d8c0f3cc2cda353286c0089ee02ffe3b87
|
Use dj six
|
kelvinwong-ca/django-likert-field,kelvinwong-ca/django-likert-field
|
likert_field/models.py
|
likert_field/models.py
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.six import string_types
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
@python_2_unicode_compatible
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
"""LikertField stores items with no answer as NULL"""
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def __str__(self):
return "%s" % force_text(self.description)
def get_prep_value(self, value):
"""
Perform preliminary non-db specific value checks and conversions.
The field expects a number as a string (ie. '2'). Unscored fields are
empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
value = int(value)
if value < 0:
value = 0
return value
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
@python_2_unicode_compatible
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def __str__(self):
return "%s" % force_text(self.description)
def get_prep_value(self, value):
"""
Perform preliminary non-db specific value checks and conversions.
The field expects a number as a string (ie. '2'). Unscored fields are
empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
value = int(value)
if value < 0:
value = 0
return value
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
bsd-3-clause
|
Python
|
60156236836944205f3993badcf179aaa6e7ae54
|
Add an (unexposed) ResourceHandler so inheriting objects serialise better
|
mikesname/ehri-collections,mikesname/ehri-collections,mikesname/ehri-collections
|
ehriportal/portal/api/handlers.py
|
ehriportal/portal/api/handlers.py
|
"""
Piston handlers for notable resources.
"""
from piston.handler import BaseHandler
from portal import models
class ResourceHandler(BaseHandler):
model = models.Resource
class RepositoryHandler(BaseHandler):
model = models.Repository
class CollectionHandler(BaseHandler):
model = models.Collection
class PlaceHandler(BaseHandler):
model = models.Place
class ContactHandler(BaseHandler):
model = models.Contact
class AuthorityHandler(BaseHandler):
model = models.Authority
|
"""
Piston handlers for notable resources.
"""
from piston.handler import BaseHandler
from portal import models
class RepositoryHandler(BaseHandler):
model = models.Repository
class CollectionHandler(BaseHandler):
model = models.Collection
class PlaceHandler(BaseHandler):
model = models.Place
class ContactHandler(BaseHandler):
model = models.Contact
class AuthorityHandler(BaseHandler):
model = models.Authority
|
mit
|
Python
|
488e5dd9bcdcba26de98fdbcaba1e23e8b4a8188
|
use csv writer for listing scraper
|
ClintonKing/band-scraper,ClintonKing/band-scraper,ClintonKing/band-scraper
|
scrape_listing.py
|
scrape_listing.py
|
#!/usr/bin/env python
import csv
import sys
import requests
from models.listing import Listing
def scrape_listing(url):
writer = csv.writer(sys.stdout)
response = requests.get(url)
listing = Listing(response.content)
# print('Title: ' + listing.title)
# print('Price: ' + listing.price)
# print('Image URLs: ' + listing.imgs)
# print('Location: ' + listing.location)
# print('Description: ' + listing.description)
# print('Category: ' + listing.category)
# print('Manufacturer: ' + listing.manufacturer)
# print('Caliber: ' + listing.caliber)
# print('Action: ' + listing.action)
# print('Firearm Type: ' + listing.firearm_type)
# print('Listing Date: ' + listing.listed_date)
# print('Post ID: ' + listing.post_id)
# print('Registration: ' + str(listing.registered))
# print('Party Type: ' + listing.party)
writer.writerow([
listing.post_id,
listing.title,
listing.listed_date,
listing.price,
listing.location,
listing.description,
listing.registered,
listing.category,
listing.manufacturer,
listing.caliber,
listing.action,
listing.firearm_type,
listing.party,
listing.imgs
])
print('{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13}'.format(listing.title, listing.listed_date, listing.post_id, listing.price, listing.location, listing.description, listing.registered, listing.category, listing.manufacturer, listing.caliber, listing.action, listing.firearm_type, listing.party, listing.imgs))
if __name__ == '__main__':
if len(sys.argv) == 1:
print('url required')
sys.exit()
url = str(sys.argv[1])
scrape_listing(url=url)
|
#!/usr/bin/env python
import sys
import requests
from models.listing import Listing
def scrape_listing(url):
response = requests.get(url)
listing = Listing(response.content)
# print('Title: ' + listing.title)
# print('Price: ' + listing.price)
# print('Image URLs: ' + listing.imgs)
# print('Location: ' + listing.location)
# print('Description: ' + listing.description)
# print('Category: ' + listing.category)
# print('Manufacturer: ' + listing.manufacturer)
# print('Caliber: ' + listing.caliber)
# print('Action: ' + listing.action)
# print('Firearm Type: ' + listing.firearm_type)
# print('Listing Date: ' + listing.listed_date)
# print('Post ID: ' + listing.post_id)
# print('Registration: ' + str(listing.registered))
# print('Party Type: ' + listing.party)
print('{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13}'.format(listing.title, listing.listed_date, listing.post_id, listing.price, listing.location, listing.description, listing.registered, listing.category, listing.manufacturer, listing.caliber, listing.action, listing.firearm_type, listing.party, listing.imgs))
if __name__ == '__main__':
if len(sys.argv) == 1:
print('url required')
sys.exit()
url = str(sys.argv[1])
scrape_listing(url=url)
|
mit
|
Python
|
ca356ae7b85c9d88f42c5adc6227d0125ff49399
|
Update settings.py
|
BFriedland/UserDataBase,defzzd/UserDataBase
|
udbproject/settings.py
|
udbproject/settings.py
|
"""
Django settings for udbproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'c5d$g#)x!2s91v2nr@h9d21opa*p1&65z)i(#4%@62fm#f!!l-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'udb',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'udbproject.urls'
WSGI_APPLICATION = 'udbproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Seattle'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
"""
Django settings for udbproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'c5d$g#)x!2s91v2nr@h9d21opa*p1&65z)i(#4%@62fm#f!!l-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'udb',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'udbproject.urls'
WSGI_APPLICATION = 'udbproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'PST'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
mit
|
Python
|
844e1917e971e834f7c95064dc7ea31fc7cc0947
|
Make build_plugins.py bail on error
|
marianocarrazana/anticontainer,downthemall/anticontainer,marianocarrazana/anticontainer,marianocarrazana/anticontainer,downthemall/anticontainer,downthemall/anticontainer
|
build/build_plugins.py
|
build/build_plugins.py
|
from __future__ import print_function
import glob, os.path, sys
from mergeex import mergeex
try:
import simplejson as json
except ImportError:
import json
plugins = []
filters = []
for fileName in sorted(glob.glob('../plugins/*.json')):
try:
with open(fileName, 'rb') as f:
content = f.read().decode('utf-8')
plugin = json.loads(content)
plugin['date'] = int(os.path.getmtime(fileName) * 1000)
plugins.append(plugin)
filters.append(plugin['match'])
except IOError as e:
print('Could not open file {0}: {1}'.format(fileName, e), file=sys.stderr)
sys.exit(1)
except ValueError as e:
print('Could not load JSON from file {0}: {1}'.format(fileName, *e.args), file=sys.stderr)
sys.exit(1)
print('Writing combined plugins.')
with open('../modules/plugins.json', 'w') as f:
json.dump(plugins, f)
|
from __future__ import print_function
import glob, os.path, sys
from mergeex import mergeex
try:
import simplejson as json
except ImportError:
import json
plugins = []
filters = []
for fileName in sorted(glob.glob('../plugins/*.json')):
try:
with open(fileName, 'rb') as f:
content = f.read().decode('utf-8')
plugin = json.loads(content)
plugin['date'] = int(os.path.getmtime(fileName) * 1000)
plugins.append(plugin)
filters.append(plugin['match'])
except IOError as e:
print('Could not open file {0}: {1}'.format(fileName, e), file=sys.stderr)
except ValueError as e:
print('Could not load JSON from file {0}: {1}'.format(fileName, *e.args), file=sys.stderr)
print('Writing combined plugins.')
with open('../modules/plugins.json', 'w') as f:
json.dump(plugins, f)
|
mpl-2.0
|
Python
|
6aa7acba495648b710635b465d5b7cd955d9f476
|
remove tmp line
|
viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker
|
api/__database.py
|
api/__database.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sqlite3
import os
from core.config import _core_config
from core.config_builder import _core_default_config
from core.config_builder import _builder
from core.alert import warn
from core.alert import messages
def create_connection(language):
try:
return sqlite3.connect(os.path.join(os.path.dirname(os.path.dirname(__file__)),
_builder(_core_config(), _core_default_config())["api_db_name"]))
except:
warn(messages(language, 168))
return False
def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag,
category, profile, scan_method, language, scan_cmd):
conn = create_connection(language)
if not conn:
return False
try:
c = conn.cursor()
c.execute("""
INSERT INTO reports (
date, scan_id, report_filename, events_num, verbose,
api_flag, report_type, graph_flag, category, profile,
scan_method, language, scan_cmd
)
VALUES (
'{0}', '{1}', '{2}', '{3}', '{4}',
'{5}', '{6}', '{7}', '{8}', '{9}',
'{10}', '{11}', '{12}'
);
""".format(date, scan_id, report_filename, events_num, verbose,
api_flag, report_type, graph_flag, category, profile,
scan_method, language, scan_cmd))
conn.commit()
conn.close()
except:
warn(messages(language, 168))
return False
return True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sqlite3
import os
from core.config import _core_config
from core.config_builder import _core_default_config
from core.config_builder import _builder
from core.alert import warn
from core.alert import messages
def create_connection(language):
try:
return sqlite3.connect(os.path.join(os.path.dirname(os.path.dirname(__file__)),
_builder(_core_config(), _core_default_config())["api_db_name"]))
except:
warn(messages(language, 168))
return False
def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag,
category, profile, scan_method, language, scan_cmd):
conn = create_connection(language)
if not conn:
return False
try:
c = conn.cursor()
c.execute("""
INSERT INTO reports (
date, scan_id, report_filename, events_num, verbose,
api_flag, report_type, graph_flag, category, profile,
scan_method, language, scan_cmd
)
VALUES (
'{0}', '{1}', '{2}', '{3}', '{4}',
'{5}', '{6}', '{7}', '{8}', '{9}',
'{10}', '{11}', '{12}'
);
""".format(date, scan_id, report_filename, events_num, verbose,
api_flag, report_type, graph_flag, category, profile,
scan_method, language, scan_cmd))
conn.commit()
conn.close()
except:
warn(messages(language, 168))
print 2
return False
return True
|
apache-2.0
|
Python
|
591b0550e0724f3e515974fee02d8d40e070e52a
|
Bump version
|
markstory/lint-review,markstory/lint-review,markstory/lint-review
|
lintreview/__init__.py
|
lintreview/__init__.py
|
__version__ = '2.25.1'
|
__version__ = '2.25.0'
|
mit
|
Python
|
c0b3a1b40149e939e91c5483383f1a1c715a9b9c
|
Update ipc_lista1.7.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.7.py
|
lista1/ipc_lista1.7.py
|
#ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura em
|
#ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura
|
apache-2.0
|
Python
|
4fb6112552ab7969bddca7193dd51910be51d8b2
|
Update ipc_lista1.7.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.7.py
|
lista1/ipc_lista1.7.py
|
#ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura do quadrado em
|
#ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura do em
|
apache-2.0
|
Python
|
360ef0dec991d4486ec51f23ffb065d0225347fa
|
Update ipc_lista1.8.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.8.py
|
lista1/ipc_lista1.8.py
|
#ipc_lista1.8
#Professor: Jucimar
|
#ipc_lista1.8
#Professor:
|
apache-2.0
|
Python
|
93a91ac118ab4e7280562bd0cfac0ea964ae0a7e
|
remove auth_check import
|
wathsalav/xos,xmaruto/mcord,wathsalav/xos,open-cloud/xos,zdw/xos,cboling/xos,cboling/xos,cboling/xos,open-cloud/xos,opencord/xos,jermowery/xos,wathsalav/xos,zdw/xos,opencord/xos,zdw/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,opencord/xos,zdw/xos,jermowery/xos,cboling/xos,xmaruto/mcord,wathsalav/xos,jermowery/xos,jermowery/xos,open-cloud/xos
|
plstackapi/core/api/sites.py
|
plstackapi/core/api/sites.py
|
from types import StringTypes
from django.contrib.auth import authenticate
from plstackapi.openstack.manager import OpenStackManager
from plstackapi.core.models import Site
def _get_sites(filter):
if isinstance(filter, StringTypes) and filter.isdigit():
filter = int(filter)
if isinstance(filter, int):
sites = Site.objects.filter(id=filter)
elif isinstance(filter, StringTypes):
sites = Site.objects.filter(login_base=filter)
elif isinstance(filter, dict):
sites = Site.objects.filter(**filter)
else:
sites = []
return sites
def add_site(auth, fields):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
site = Site(**fields)
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.save()
return site
def update_site(auth, id, **fields):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
sites = _get_sites(id)
if not sites:
return
site = Site[0]
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.update(**fields)
return site
def delete_site(auth, filter={}):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
sites = _get_sites(id)
for site in sites:
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.delete()
return 1
def get_sites(auth, filter={}):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
sites = _get_sites(filter)
return sites
|
from types import StringTypes
from django.contrib.auth import authenticate
from plstackapi.openstack.manager import OpenStackManager
from plstackapi.core.api.auth import auth_check
from plstackapi.core.models import Site
def _get_sites(filter):
if isinstance(filter, StringTypes) and filter.isdigit():
filter = int(filter)
if isinstance(filter, int):
sites = Site.objects.filter(id=filter)
elif isinstance(filter, StringTypes):
sites = Site.objects.filter(login_base=filter)
elif isinstance(filter, dict):
sites = Site.objects.filter(**filter)
else:
sites = []
return sites
def add_site(auth, fields):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
site = Site(**fields)
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.save()
return site
def update_site(auth, id, **fields):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
sites = _get_sites(id)
if not sites:
return
site = Site[0]
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.update(**fields)
return site
def delete_site(auth, filter={}):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
auth['tenant'] = user.site.login_base
sites = _get_sites(id)
for site in sites:
site.os_manager = OpenStackManager(auth=auth, caller = user)
site.delete()
return 1
def get_sites(auth, filter={}):
user = authenticate(username=auth.get('username'),
password=auth.get('password'))
sites = _get_sites(filter)
return sites
|
apache-2.0
|
Python
|
273aeda221aa12aac7fe1eea51e0aed859cd9098
|
move fixme to right pos
|
obestwalter/mau-mau
|
sim.py
|
sim.py
|
import logging
from cardroom import Game, Table, Player, Stock, Waste, Card
log = logging.getLogger(__name__)
def play_game(players=3, cardsPerPlayer=5):
game = start_new_game(players, cardsPerPlayer)
while not game.over:
game.next_turn()
play_turn(game.player, game.table)
return game
def start_new_game(players, cardsPerPlayer):
players = invite_players(players)
deck = fetch_fresh_deck_of_cards()
ensure_sure_we_are_ok_to_play(players, cardsPerPlayer, deck)
table = set_the_table(deck)
for player in players:
deal_cards(player, table.stock, cardsPerPlayer)
return Game(players, table)
def invite_players(players):
"""Invite players to the game.
:type players: int or list of str
"""
try:
players = [Player(name) for name in players]
except TypeError:
players = [Player("Player %s" % (n)) for n in range(1, players + 1)]
log.debug("invited players are: %s", players)
return players
def fetch_fresh_deck_of_cards():
"""Magic a fresh deck of cards out of nothing from a definition"""
class Def:
values = [7, 8, 9, 10, 'Jack', 'Queen', 'King', 'Ace']
suits = ['diamonds', 'hearts', 'spades', 'clubs']
deck = Stock([Card(v, s) for v in Def.values for s in Def.suits])
log.debug(str(deck))
return deck
def ensure_sure_we_are_ok_to_play(players, cardsPerPlayer, deck):
assert len(players) > 1
assert len(players) * cardsPerPlayer <= len(deck)
def set_the_table(deck):
deck.shuffle()
stock = deck
upcard = stock.fetch_card()
waste = Waste()
return Table(stock, waste, upcard)
def deal_cards(player, stock, cardsPerPlayer):
deal = stock.fetch_cards(cardsPerPlayer)
player.hand = deal
log.debug(str(player))
def play_turn(player, table):
log.debug("upcard: %s; hand: %s", table.upcard, player.hand)
if not player.play_card(table.upcard, table):
# FIXME this could be more symmetric to what happens in play_card
# - draw_card returns boolean
# - if False (stock empty)
# - replenish stock
# - draw again
ensure_stock_is_replenished(table)
player.draw_card(table.stock)
def ensure_stock_is_replenished(table):
if table.stock.isEmpty:
table.stock = Stock(table.waste.cards)
table.waste = Waste()
table.stock.shuffle()
|
import logging
from cardroom import Game, Table, Player, Stock, Waste, Card
log = logging.getLogger(__name__)
def play_game(players=3, cardsPerPlayer=5):
game = start_new_game(players, cardsPerPlayer)
while not game.over:
game.next_turn()
play_turn(game.player, game.table)
return game
def start_new_game(players, cardsPerPlayer):
players = invite_players(players)
deck = fetch_fresh_deck_of_cards()
make_sure_we_are_ok_to_play(players, cardsPerPlayer, deck)
table = set_the_table(deck)
for player in players:
deal_cards(player, table.stock, cardsPerPlayer)
return Game(players, table)
def invite_players(players):
"""Invite players to the game.
:type players: int or list of str
"""
try:
players = [Player(name) for name in players]
except TypeError:
players = [Player("Player %s" % (n)) for n in range(1, players + 1)]
log.debug("invited players are: %s", players)
return players
def fetch_fresh_deck_of_cards():
"""Magic a fresh deck of cards out of nothing from a definition"""
class Def:
values = [7, 8, 9, 10, 'Jack', 'Queen', 'King', 'Ace']
suits = ['diamonds', 'hearts', 'spades', 'clubs']
deck = Stock([Card(v, s) for v in Def.values for s in Def.suits])
log.debug(str(deck))
return deck
def make_sure_we_are_ok_to_play(players, cardsPerPlayer, deck):
assert len(players) > 1
assert len(players) * cardsPerPlayer <= len(deck)
def set_the_table(deck):
deck.shuffle()
stock = deck
upcard = stock.fetch_card()
waste = Waste()
return Table(stock, waste, upcard)
def deal_cards(player, stock, cardsPerPlayer):
deal = stock.fetch_cards(cardsPerPlayer)
player.hand = deal
log.debug(str(player))
def play_turn(player, table):
log.debug("upcard: %s; hand: %s", table.upcard, player.hand)
if not player.play_card(table.upcard, table):
# FIXME this could be more symmetric to what happens in play_card
# - draw_card returns boolean
# - if False (stock empty)
# - replenish stock
# - draw again
ensure_stock_is_replenished(table)
player.draw_card(table.stock)
def ensure_stock_is_replenished(table):
if table.stock.isEmpty:
table.stock = Stock(table.waste.cards)
table.waste = Waste()
table.stock.shuffle()
|
mit
|
Python
|
51f4d40cf6750d35f10f37d939a2c30c5f26d300
|
Update script to write results to the database.
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
backend/scripts/updatedf.py
|
backend/scripts/updatedf.py
|
#!/usr/bin/env python
import hashlib
import os
import rethinkdb as r
def main():
conn = r.connect('localhost', 28015, db='materialscommons')
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
path = os.path.join(root, f)
with open(path) as fd:
data = fd.read()
hash = hashlib.md5(data).hexdigest()
s = os.stat(path).st_size
r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn)
print "%s:%s:%d" %(path, hash, s)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
mit
|
Python
|
599672acbf925cab634bc15ab47055aabb131efd
|
Fix xkcd text regex. Closes #46
|
webcomics/dosage,blade2005/dosage,peterjanes/dosage,wummel/dosage,wummel/dosage,mbrandis/dosage,peterjanes/dosage,mbrandis/dosage,blade2005/dosage,Freestila/dosage,Freestila/dosage,webcomics/dosage
|
dosagelib/plugins/x.py
|
dosagelib/plugins/x.py
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2013 Bastian Kleineidam
from re import compile
from ..scraper import _BasicScraper
from ..helpers import bounceStarter
from ..util import tagre
class xkcd(_BasicScraper):
url = 'http://xkcd.com/'
starter = bounceStarter(url, compile(tagre("a", "href", r'(/\d+/)', before="next")))
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '1'
imageSearch = compile(tagre("img", "src", r'(http://imgs\.xkcd\.com/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(/\d+/)', before="prev"))
help = 'Index format: n (unpadded)'
description = u'A webcomic of romance, sarcasm, math, and language.'
textSearch = compile(tagre("img", "title", r'([^"]+)', before=r'http://imgs\.xkcd\.com/comics/'))
adult = True
@classmethod
def namer(cls, imageUrl, pageUrl):
index = int(pageUrl.rstrip('/').rsplit('/', 1)[-1])
name = imageUrl.rsplit('/', 1)[-1].split('.')[0]
return '%03d-%s' % (index, name)
@classmethod
def imageUrlModifier(cls, url, data):
if url and '/large/' in data:
return url.replace(".png", "_large.png")
return url
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2013 Bastian Kleineidam
from re import compile
from ..scraper import _BasicScraper
from ..helpers import bounceStarter
from ..util import tagre
class xkcd(_BasicScraper):
url = 'http://xkcd.com/'
starter = bounceStarter(url, compile(tagre("a", "href", r'(/\d+/)', before="next")))
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '1'
imageSearch = compile(tagre("img", "src", r'(http://imgs\.xkcd\.com/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(/\d+/)', before="prev"))
help = 'Index format: n (unpadded)'
description = u'A webcomic of romance, sarcasm, math, and language.'
textSearch = compile(tagre("img", "title", r'([^"]+)'))
adult = True
@classmethod
def namer(cls, imageUrl, pageUrl):
index = int(pageUrl.rstrip('/').rsplit('/', 1)[-1])
name = imageUrl.rsplit('/', 1)[-1].split('.')[0]
return '%03d-%s' % (index, name)
@classmethod
def imageUrlModifier(cls, url, data):
if url and '/large/' in data:
return url.replace(".png", "_large.png")
return url
|
mit
|
Python
|
f0593b2d69730441b5a486e27ed6eb7001939bf4
|
Include unlimited features for enterprise
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/apps/accounting/bootstrap/config/user_buckets_august_2018.py
|
corehq/apps/accounting/bootstrap/config/user_buckets_august_2018.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from decimal import Decimal
from corehq.apps.accounting.models import (
FeatureType,
SoftwarePlanEdition,
UNLIMITED_FEATURE_USAGE
)
BOOTSTRAP_CONFIG = {
(SoftwarePlanEdition.COMMUNITY, False, False): {
'role': 'community_plan_v1',
'product_rate_monthly_fee': Decimal('0.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=10, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=0),
}
},
(SoftwarePlanEdition.STANDARD, False, False): {
'role': 'standard_plan_v0',
'product_rate_monthly_fee': Decimal('300.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=50, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.PRO, False, False): {
'role': 'pro_plan_v0',
'product_rate_monthly_fee': Decimal('600.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=250, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.ADVANCED, False, False): {
'role': 'advanced_plan_v0',
'product_rate_monthly_fee': Decimal('1200.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=500, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.ADVANCED, True, False): {
'role': 'advanced_plan_v0',
'product_rate_monthly_fee': Decimal('0.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=UNLIMITED_FEATURE_USAGE, per_excess_fee=Decimal('0.00')),
FeatureType.SMS: dict(monthly_limit=UNLIMITED_FEATURE_USAGE),
}
}
}
|
from __future__ import absolute_import
from __future__ import unicode_literals
from decimal import Decimal
from corehq.apps.accounting.models import (
FeatureType,
SoftwarePlanEdition,
)
BOOTSTRAP_CONFIG = {
(SoftwarePlanEdition.COMMUNITY, False, False): {
'role': 'community_plan_v1',
'product_rate_monthly_fee': Decimal('0.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=10, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=0),
}
},
(SoftwarePlanEdition.STANDARD, False, False): {
'role': 'standard_plan_v0',
'product_rate_monthly_fee': Decimal('300.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=50, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.PRO, False, False): {
'role': 'pro_plan_v0',
'product_rate_monthly_fee': Decimal('600.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=250, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.ADVANCED, False, False): {
'role': 'advanced_plan_v0',
'product_rate_monthly_fee': Decimal('1200.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=500, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=50),
}
},
(SoftwarePlanEdition.ADVANCED, True, False): {
'role': 'advanced_plan_v0',
'product_rate_monthly_fee': Decimal('0.00'),
'feature_rates': {
FeatureType.USER: dict(monthly_limit=10, per_excess_fee=Decimal('2.00')),
FeatureType.SMS: dict(monthly_limit=0),
}
}
}
|
bsd-3-clause
|
Python
|
205f3fb2f36f33c6d13b4541ad49522b799d358d
|
simplify the call to make file list
|
derwolfe/teiler,derwolfe/teiler
|
src/actions/server.py
|
src/actions/server.py
|
import sys
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from twisted.internet import task
from twisted.internet.protocol import DatagramProtocol
from . import utils
class Broadcaster(DatagramProtocol):
"""
Broadcast the ip to all of the listeners on the channel
"""
def __init__(self, address):
self.ip = address # shouldn't this be passed in
self.host = '224.0.0.5'
self.port = 8005
def startProtocol(self):
log.msg("Serving on {0}:8888 and broadcasting IP on 224.0.0.5:8005".format(self.ip))
self.transport.joinGroup(self.host)
self._call = task.LoopingCall(self.sendHeartbeat)
self._loop = self._call.start(5)
def sendHeartbeat(self):
message ='{0}:8888'.format(self.ip)
self.transport.write(message, (self.host, self.port))
def stopProtocol(self):
self._call.stop()
def main(serve_dir):
from twisted.internet import reactor
resource = File(serve_dir)
factory = Site(resource)
log.startLogging(sys.stdout)
serve_at = utils.get_live_interface()
# this is messy
# the program should expect to serve files at a specific location everytime.
utils.make_file_list(serve_dir)
log.msg("Starting fileserver on{0}:8888".format(serve_at))
reactor.listenTCP(8888, factory)
log.msg("Broadcasting")
reactor.listenMulticast(8005, Broadcaster(serve_at))
reactor.run()
if __name__ == "__main__":
main('./')
|
import sys
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from twisted.internet import task
from twisted.internet.protocol import DatagramProtocol
from . import utils
class Broadcaster(DatagramProtocol):
"""
Broadcast the ip to all of the listeners on the channel
"""
def __init__(self, address):
self.ip = address # shouldn't this be passed in
self.host = '224.0.0.5'
self.port = 8005
def startProtocol(self):
log.msg("Serving on {0}:8888 and broadcasting IP on 224.0.0.5:8005".format(self.ip))
self.transport.joinGroup(self.host)
self._call = task.LoopingCall(self.sendHeartbeat)
self._loop = self._call.start(5)
def sendHeartbeat(self):
message ='{0}:8888'.format(self.ip)
self.transport.write(message, (self.host, self.port))
def stopProtocol(self):
self._call.stop()
def main(serve_dir):
from twisted.internet import reactor
resource = File(serve_dir)
factory = Site(resource)
log.startLogging(sys.stdout)
serve_at = utils.get_live_interface()
# this is messy
# the program should expect to serve files at a specific location everytime.
utils.make_file_list(utils.list_files(serve_dir),
utils.list_dirs(serve_dir),
serve_dir)
log.msg("Starting fileserver on{0}:8888".format(serve_at))
reactor.listenTCP(8888, factory)
log.msg("Broadcasting")
reactor.listenMulticast(8005, Broadcaster(serve_at))
reactor.run()
if __name__ == "__main__":
main('./')
|
mit
|
Python
|
923d49c753acf7d8945d6b79efbdb08363e130a2
|
Bring test_frame_of_test_null_file up to date with new signature of frame_of_test().
|
pmclanahan/pytest-progressive,erikrose/nose-progressive,veo-labs/nose-progressive,olivierverdier/nose-progressive
|
noseprogressive/tests/test_utils.py
|
noseprogressive/tests/test_utils.py
|
from os import chdir, getcwd
from os.path import dirname, basename
from unittest import TestCase
from nose.tools import eq_
from noseprogressive.utils import human_path, frame_of_test
class UtilsTests(TestCase):
"""Tests for independent little bits and pieces"""
def test_human_path(self):
chdir(dirname(__file__))
eq_(human_path(__file__, getcwd()), basename(__file__))
def test_frame_of_test_null_file(self):
"""Make sure frame_of_test() doesn't crash when test_file is None."""
try:
frame_of_test((None, None, None), NotImplementedError,
NotImplementedError(), [('file', 333)])
except AttributeError:
self.fail('frame_of_test() raised AttributeError.')
|
from os import chdir, getcwd
from os.path import dirname, basename
from unittest import TestCase
from nose.tools import eq_
from noseprogressive.utils import human_path, frame_of_test
class UtilsTests(TestCase):
"""Tests for independent little bits and pieces"""
def test_human_path(self):
chdir(dirname(__file__))
eq_(human_path(__file__, getcwd()), basename(__file__))
def test_frame_of_test_null_file(self):
"""Make sure frame_of_test() doesn't crash when test_file is None."""
try:
frame_of_test((None, None, None), [('file', 333)])
except AttributeError:
self.fail('frame_of_test() raised AttributeError.')
|
mit
|
Python
|
d9800c562b81f4e118e9db96a68e301396af46f9
|
Add abstract job serializer
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
polyaxon/jobs/serializers.py
|
polyaxon/jobs/serializers.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from rest_framework import serializers, fields
from jobs.models import JobResources
class JobResourcesSerializer(serializers.ModelSerializer):
class Meta:
model = JobResources
exclude = ('id',)
class JobSerializer(serializers.ModelSerializer):
cpu = fields.DictField(allow_null=True)
memory = fields.DictField(allow_null=True)
gpu = fields.DictField(allow_null=True)
resources = JobResourcesSerializer(read_only=True)
class Meta:
fields = ('image', 'resources', 'cpu', 'memory', 'gpu')
extra_kwargs = {
'cpu': {'write_only': True},
'memory': {'write_only': True},
'gpu': {'write_only': True}}
@staticmethod
def _has_resources(validated_data):
cpu = validated_data['cpu']
memory = validated_data['memory']
gpu = validated_data['gpu']
if cpu is None and memory is None and gpu is None:
return False
return True
@staticmethod
def _get_resources(validated_data):
cpu = validated_data['cpu']
memory = validated_data['memory']
gpu = validated_data['gpu']
return {'cpu': cpu, 'memory': memory, 'gpu': gpu}
def _create_resources(self, validated_data):
if self._has_resources(validated_data):
resources = JobResourcesSerializer(data=self._get_resources(validated_data))
resources.is_valid(raise_exception=True)
return resources.save()
return None
def _update_resources(self, resources_instance, validated_data):
if self._has_resources(validated_data):
resources = JobResourcesSerializer(instance=resources_instance,
data=self._get_resources(validated_data))
resources.is_valid(raise_exception=True)
return resources.save()
return None
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from rest_framework import serializers
from jobs.models import JobResources
class JobResourcesSerializer(serializers.ModelSerializer):
class Meta:
model = JobResources
exclude = ('id',)
|
apache-2.0
|
Python
|
77c4b5a72ddad68717b6fb1291ce643f20a63e2d
|
Update SeleniumBase exceptions
|
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase
|
seleniumbase/common/exceptions.py
|
seleniumbase/common/exceptions.py
|
""" SeleniumBase Exceptions
NoSuchFileException => Called when self.assert_downloaded_file(...) fails.
NotUsingChromeException => Used by Chrome-only methods if not using Chrome.
OutOfScopeException => Used by BaseCase methods when setUp() is skipped.
TextNotVisibleException => Called when expected text fails to appear.
TimeLimitExceededException => Called when exceeding "--time-limit=SECONDS".
"""
from selenium.common.exceptions import WebDriverException
class NoSuchFileException(Exception):
pass
class NotUsingChromeException(WebDriverException):
pass
class OutOfScopeException(Exception):
pass
class TextNotVisibleException(WebDriverException):
pass
class TimeLimitExceededException(Exception):
pass
|
""" SeleniumBase Exceptions
NoSuchFileException => Used by self.assert_downloaded_file(...)
NotUsingChromeException => Used by Chrome-only methods if not using Chrome
OutOfScopeException => Used by BaseCase methods when setUp() is skipped
TimeLimitExceededException => Used by "--time-limit=SECONDS"
"""
class NoSuchFileException(Exception):
pass
class NotUsingChromeException(Exception):
pass
class OutOfScopeException(Exception):
pass
class TimeLimitExceededException(Exception):
pass
class TextNotVisibleException(Exception):
pass
|
mit
|
Python
|
e6af9d901f26fdf779a6a13319face483fe48a3b
|
Disable clickjacking protection on demos to display them in iframes
|
lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter
|
dwitter/dweet/views.py
|
dwitter/dweet/views.py
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from dwitter.models import Dweet
from django.views.decorators.clickjacking import xframe_options_exempt
@xframe_options_exempt
def fullscreen_dweet(request, dweet_id):
dweet = get_object_or_404(Dweet, id=dweet_id)
context = {'dweet': dweet
}
return render(request, 'dweet/dweet-id.html', context );
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from dwitter.models import Dweet
def fullscreen_dweet(request, dweet_id):
dweet = get_object_or_404(Dweet, id=dweet_id)
context = {'dweet': dweet
}
return render(request, 'dweet/dweet-id.html', context );
|
apache-2.0
|
Python
|
4d5a15a4a087ea8bcf458243da947f5e0934013b
|
Fix html not loading the initial value (#569)
|
UTNkar/moore,UTNkar/moore,UTNkar/moore,UTNkar/moore
|
src/blocks/widgets.py
|
src/blocks/widgets.py
|
from django import forms
from wagtail.utils.widgets import WidgetWithScript
class CodeMirrorWidget(WidgetWithScript, forms.Textarea):
def render_js_init(self, id, name, value):
js = """
document.addEventListener('DOMContentLoaded', function(){{
CodeMirror.fromTextArea(
document.getElementById("{id}"),
{{
lineWrapping: true,
indentUnit: 4,
mode: "htmlmixed",
autoRefresh: true
}}
)
}});
"""
return js.format(id=id)
@property
def media(self):
return forms.Media(
css={'all': ('libraries/codemirror/codemirror.css',)},
js=(
'libraries/codemirror/codemirror.js',
'libraries/codemirror/autorefresh.js',
'libraries/codemirror/xml.js',
'libraries/codemirror/css.js',
'libraries/codemirror/javascript.js',
'libraries/codemirror/htmlmixed.js',
)
)
|
from django import forms
from wagtail.utils.widgets import WidgetWithScript
class CodeMirrorWidget(WidgetWithScript, forms.Textarea):
def render_js_init(self, id, name, value):
js = """
CodeMirror.fromTextArea(
document.getElementById("{id}"),
{{
lineWrapping: true,
indentUnit: 4,
mode: "htmlmixed",
autoRefresh: true
}}
);
"""
return js.format(id=id)
@property
def media(self):
return forms.Media(
css={'all': ('libraries/codemirror/codemirror.css',)},
js=('libraries/codemirror/codemirror.js',
'libraries/codemirror/autorefresh.js',
'libraries/codemirror/xml.js',
'libraries/codemirror/css.js',
'libraries/codemirror/javascript.js',
'libraries/codemirror/htmlmixed.js')
)
|
agpl-3.0
|
Python
|
e77d142d73945bc55e893d0d6ca87c657f838558
|
hide top level import (#12353)
|
bokeh/bokeh,bokeh/bokeh,bokeh/bokeh,bokeh/bokeh,bokeh/bokeh
|
src/bokeh/__init__.py
|
src/bokeh/__init__.py
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Bokeh is a Python library for creating interactive visualizations for modern
web browsers.
Bokeh helps you build beautiful graphics, ranging from simple plots to complex
dashboards with streaming datasets. With Bokeh, you can create JavaScript-powered
visualizations without writing any JavaScript yourself.
Most of the functionality of Bokeh is accessed through submodules such as
|bokeh.plotting| and |bokeh.models|.
For full documentation, please visit https://docs.bokeh.org
----
The top-level ``bokeh`` module itself contains a few useful functions and
attributes:
.. attribute:: __version__
:annotation: = currently installed version of Bokeh
.. autofunction:: bokeh.license
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import importlib.metadata
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'__version__',
'license',
'sampledata',
)
__version__ = importlib.metadata.version("bokeh")
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def license():
''' Print the Bokeh license to the console.
Returns:
None
'''
from pathlib import Path
with open(Path(__file__).parent / 'LICENSE.txt') as lic:
print(lic.read())
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
del importlib.metadata
# expose sample data module
from . import sampledata; sampledata
# configure Bokeh logger
from .util import logconfig # isort:skip
del logconfig
# Configure warnings to always show nice mssages, despite Python's active
# efforts to hide them from users.
import warnings # isort:skip
from .util.warnings import BokehDeprecationWarning, BokehUserWarning # isort:skip
warnings.simplefilter('always', BokehDeprecationWarning)
warnings.simplefilter('always', BokehUserWarning)
original_formatwarning = warnings.formatwarning
def _formatwarning(message, category, filename, lineno, line=None):
from .util.warnings import BokehDeprecationWarning, BokehUserWarning
if category not in (BokehDeprecationWarning, BokehUserWarning):
return original_formatwarning(message, category, filename, lineno, line)
return "%s: %s\n" % (category.__name__, message)
warnings.formatwarning = _formatwarning
del _formatwarning
del BokehDeprecationWarning, BokehUserWarning
del warnings
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Bokeh is a Python library for creating interactive visualizations for modern
web browsers.
Bokeh helps you build beautiful graphics, ranging from simple plots to complex
dashboards with streaming datasets. With Bokeh, you can create JavaScript-powered
visualizations without writing any JavaScript yourself.
Most of the functionality of Bokeh is accessed through submodules such as
|bokeh.plotting| and |bokeh.models|.
For full documentation, please visit https://docs.bokeh.org
----
The top-level ``bokeh`` module itself contains a few useful functions and
attributes:
.. attribute:: __version__
:annotation: = currently installed version of Bokeh
.. autofunction:: bokeh.license
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import importlib.metadata
from pathlib import Path
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'__version__',
'license',
'sampledata',
)
__version__ = importlib.metadata.version("bokeh")
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def license():
''' Print the Bokeh license to the console.
Returns:
None
'''
with open(Path(__file__).parent / 'LICENSE.txt') as lic:
print(lic.read())
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
del importlib.metadata
# expose sample data module
from . import sampledata; sampledata
# configure Bokeh logger
from .util import logconfig # isort:skip
del logconfig
# Configure warnings to always show nice mssages, despite Python's active
# efforts to hide them from users.
import warnings # isort:skip
from .util.warnings import BokehDeprecationWarning, BokehUserWarning # isort:skip
warnings.simplefilter('always', BokehDeprecationWarning)
warnings.simplefilter('always', BokehUserWarning)
original_formatwarning = warnings.formatwarning
def _formatwarning(message, category, filename, lineno, line=None):
from .util.warnings import BokehDeprecationWarning, BokehUserWarning
if category not in (BokehDeprecationWarning, BokehUserWarning):
return original_formatwarning(message, category, filename, lineno, line)
return "%s: %s\n" % (category.__name__, message)
warnings.formatwarning = _formatwarning
del _formatwarning
del BokehDeprecationWarning, BokehUserWarning
del warnings
|
bsd-3-clause
|
Python
|
c358f467bbab9bd0366347f9a1bd10cb2e027bb8
|
use moksha widget template
|
fedora-infra/fedora-packages,Fale/fedora-packages,Fale/fedora-packages,fedora-infra/fedora-packages,Fale/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages
|
fedoracommunity/mokshaapps/packagemaintresource/controllers/root.py
|
fedoracommunity/mokshaapps/packagemaintresource/controllers/root.py
|
from moksha.lib.base import Controller
from moksha.lib.helpers import MokshaApp
from tg import expose, tmpl_context
from fedoracommunity.widgets import SubTabbedContainer
class TabbedNav(SubTabbedContainer):
tabs= (MokshaApp('Overview', 'fedoracommunity.packagemaint.overview'),
MokshaApp('Builds', 'fedoracommunity.builds'),
MokshaApp('Updates', 'fedoracommunity.updates'),
MokshaApp('Packages', 'fedoracommunity.packagemaint.packages'),
MokshaApp('Package Groups', 'fedoracommunity.packagemaint.packagegroups'),
)
class RootController(Controller):
def __init__(self):
self.widget = TabbedNav('packagemaintnav')
@expose('mako:moksha.templates.widget')
def index(self):
tmpl_context.widget = self.widget
return {'options':{}}
|
from moksha.lib.base import Controller
from moksha.lib.helpers import MokshaApp
from tg import expose, tmpl_context
from fedoracommunity.widgets import SubTabbedContainer
class TabbedNav(SubTabbedContainer):
tabs= (MokshaApp('Overview', 'fedoracommunity.packagemaint.overview'),
MokshaApp('Builds', 'fedoracommunity.builds'),
MokshaApp('Updates', 'fedoracommunity.updates'),
MokshaApp('Packages', 'fedoracommunity.packagemaint.packages'),
MokshaApp('Package Groups', 'fedoracommunity.packagemaint.packagegroups'),
)
class RootController(Controller):
def __init__(self):
self.widget = TabbedNav('packagemaintnav')
@expose('mako:fedoracommunity.mokshaapps.packagemaintresource.templates.index')
def index(self):
tmpl_context.widget = self.widget
return {}
|
agpl-3.0
|
Python
|
5b6aa3f6cca7ea83a53178be7b9e58892597ac0b
|
Add some logging to Auth
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
opwen_email_server/services/auth.py
|
opwen_email_server/services/auth.py
|
from abc import ABCMeta
from abc import abstractmethod
from functools import lru_cache
from typing import Callable
from typing import Optional
from azure.storage.table import TableService
from opwen_email_server.utils.log import LogMixin
class Auth(metaclass=ABCMeta):
@abstractmethod
def domain_for(self, client_id: str) -> Optional[str]:
raise NotImplementedError # pramga: no cover
class AzureAuth(Auth, LogMixin):
def __init__(self, account: str, key: str, table: str,
client: TableService=None,
client_factory: Callable[..., TableService]=TableService
) -> None:
self._account = account
self._key = key
self._table = table
self.__client = client
self._client_factory = client_factory
@property
def _client(self) -> TableService:
if self.__client is not None:
return self.__client
client = self._client_factory(self._account, self._key)
client.create_table(self._table)
self.__client = client
return client
def insert(self, client_id: str, domain: str):
self._client.insert_entity(self._table, {
'RowKey': client_id,
'PartitionKey': client_id,
'domain': domain,
})
self.log_debug('Registered client %s at domain %s', client_id, domain)
def domain_for(self, client_id):
try:
return self._domain_for_cached(client_id)
except KeyError:
return None
@lru_cache(maxsize=128)
def _domain_for_cached(self, client_id: str) -> str:
query = "PartitionKey eq '{0}' and RowKey eq '{0}'".format(client_id)
entities = self._client.query_entities(self._table, query)
for entity in entities:
domain = entity.get('domain')
if domain:
self.log_debug('Got domain %s for client %s', domain, client_id)
return domain
self.log_debug('Unrecognized client %s', client_id)
raise KeyError
|
from abc import ABCMeta
from abc import abstractmethod
from functools import lru_cache
from typing import Callable
from typing import Optional
from azure.storage.table import TableService
class Auth(metaclass=ABCMeta):
@abstractmethod
def domain_for(self, client_id: str) -> Optional[str]:
raise NotImplementedError # pramga: no cover
class AzureAuth(Auth):
def __init__(self, account: str, key: str, table: str,
client: TableService=None,
client_factory: Callable[..., TableService]=TableService
) -> None:
self._account = account
self._key = key
self._table = table
self.__client = client
self._client_factory = client_factory
@property
def _client(self) -> TableService:
if self.__client is not None:
return self.__client
client = self._client_factory(self._account, self._key)
client.create_table(self._table)
self.__client = client
return client
def insert(self, client_id: str, domain: str):
self._client.insert_entity(self._table, {
'RowKey': client_id,
'PartitionKey': client_id,
'domain': domain,
})
def domain_for(self, client_id):
try:
return self._domain_for_cached(client_id)
except KeyError:
return None
@lru_cache(maxsize=128)
def _domain_for_cached(self, client_id: str) -> str:
query = "PartitionKey eq '{0}' and RowKey eq '{0}'".format(client_id)
entities = self._client.query_entities(self._table, query)
for entity in entities:
domain = entity.get('domain')
if domain:
return domain
raise KeyError
|
apache-2.0
|
Python
|
4b2a29c484ddd5e2dfb4ad91bb0ae5c7681553c1
|
Bump version to 0.1.5
|
HighMileage/lacrm
|
lacrm/_version.py
|
lacrm/_version.py
|
__version_info__ = (0, 1, 5)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 1, 4)
__version__ = '.'.join(map(str, __version_info__))
|
mit
|
Python
|
0cdac10ee51cc3e812ae9188606301e6be0644ee
|
Fix default url bug
|
CornellProjects/hlthpal,CornellProjects/hlthpal,CornellProjects/hlthpal,CornellProjects/hlthpal
|
web/project/main/urls.py
|
web/project/main/urls.py
|
from django.conf.urls import url, include
from rest_framework.authtoken import views as authviews
from rest_framework_jwt import views as jwt_views
from . import views
urlpatterns = [
url(r'^home/', views.index, name='index'),
# Authentication APIs
url(r'^api/auth', jwt_views.obtain_jwt_token, name="auth"),
url(r'^api/token-verify', jwt_views.verify_jwt_token, name="token-verify"),
url(r'^api/token-refresh', jwt_views.refresh_jwt_token, name="token-refresh"),
# User APIs
url(r'^api/register', views.UserCreateView.as_view(), name="register"),
url(r'^api/entity', views.EntityCreateView.as_view(), name="entity"),
url(r'^api/doctor', views.DoctorCreateView.as_view(), name="doctor"),
url(r'^api/login', views.UserLoginView.as_view(), name="login"),
url(r'^api/user', views.CurrentUserView.as_view(), name="user"),
url(r'^api/profile', views.UserProfileView.as_view(), name="profile"),
url(r'^api/record', views.RecordAPIView.as_view(), name="record"),
url(r'^api/questions', views.QuestionGetAPIView.as_view(), name="questions"),
url(r'^api/answer', views.AnswerAPIView.as_view(), name="answer"),
url(r'^api/symptom', views.SymptomAPIView.as_view(), name="symptom"),
url(r'^api/edit_symptom/(?P<record>\d+)/(?P<symptom>\d+)$', views.SymptomUpdateView.as_view(), name="edit_symptom"),
url(r'^api/edit_answer/(?P<record>\d+)/(?P<question>\d+)$', views.AnswerUpdateView.as_view(), name="edit_answer"),
url(r'^api/edit_record/(?P<pk>\d+)$', views.RecordUpdateView.as_view(), name="edit_record"),
url(r'^api/edit_question/(?P<pk>\d+)$', views.QuestionUpdateView.as_view(), name="edit_question"),
# Default URL
url(r'', views.index, name='index'),
]
|
from django.conf.urls import url, include
from rest_framework.authtoken import views as authviews
from rest_framework_jwt import views as jwt_views
from . import views
urlpatterns = [
url(r'', views.index, name='index'),
url(r'^home/', views.index, name='index'),
# Authentication APIs
url(r'^api/auth', jwt_views.obtain_jwt_token, name="auth"),
url(r'^api/token-verify', jwt_views.verify_jwt_token, name="token-verify"),
url(r'^api/token-refresh', jwt_views.refresh_jwt_token, name="token-refresh"),
# User APIs
url(r'^api/register', views.UserCreateView.as_view(), name="register"),
url(r'^api/entity', views.EntityCreateView.as_view(), name="entity"),
url(r'^api/doctor', views.DoctorCreateView.as_view(), name="doctor"),
url(r'^api/login', views.UserLoginView.as_view(), name="login"),
url(r'^api/user', views.CurrentUserView.as_view(), name="user"),
url(r'^api/profile', views.UserProfileView.as_view(), name="profile"),
url(r'^api/record', views.RecordAPIView.as_view(), name="record"),
url(r'^api/questions', views.QuestionGetAPIView.as_view(), name="questions"),
url(r'^api/answer', views.AnswerAPIView.as_view(), name="answer"),
url(r'^api/symptom', views.SymptomAPIView.as_view(), name="symptom"),
url(r'^api/edit_symptom/(?P<record>\d+)/(?P<symptom>\d+)$', views.SymptomUpdateView.as_view(), name="edit_symptom"),
url(r'^api/edit_answer/(?P<record>\d+)/(?P<question>\d+)$', views.AnswerUpdateView.as_view(), name="edit_answer"),
url(r'^api/edit_record/(?P<pk>\d+)$', views.RecordUpdateView.as_view(), name="edit_record"),
url(r'^api/edit_question/(?P<pk>\d+)$', views.QuestionUpdateView.as_view(), name="edit_question"),
]
|
apache-2.0
|
Python
|
9e0c83e751e72e3396a4729392b972834b25c8b7
|
Add TODO
|
jonhadfield/ansible-lookups
|
v2/aws_secgroup_ids_from_names.py
|
v2/aws_secgroup_ids_from_names.py
|
# (c) 2015, Jon Hadfield <[email protected]>
"""
Description: This lookup takes an AWS region and a list of one or more
security Group Names and returns a list of matching security Group IDs.
Example Usage:
{{ lookup('aws_secgroup_ids_from_names', ('eu-west-1', ['nginx_group', 'mysql_group'])) }}
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import codecs
from ansible.errors import *
from ansible.plugins.lookup import LookupBase
try:
import boto
import boto.ec2
except ImportError:
raise AnsibleError("aws_secgroup_ids_from_names lookup cannot be run without boto installed")
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if isinstance(terms, basestring):
terms = [terms]
sg_list = []
region = terms[0]
group_names = terms[1]
conn = boto.ec2.connect_to_region(region)
#TODO: Use OR filter rather than making multiple calls
for group_name in group_names:
filters = {'group_name': group_name}
sg = conn.get_all_security_groups(filters=filters)
if sg and sg[0]:
sg_list.append(sg[0].id)
return sg_list
|
# (c) 2015, Jon Hadfield <[email protected]>
"""
Description: This lookup takes an AWS region and a list of one or more
security Group Names and returns a list of matching security Group IDs.
Example Usage:
{{ lookup('aws_secgroup_ids_from_names', ('eu-west-1', ['nginx_group', 'mysql_group'])) }}
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import codecs
from ansible.errors import *
from ansible.plugins.lookup import LookupBase
try:
import boto
import boto.ec2
except ImportError:
raise AnsibleError("aws_secgroup_ids_from_names lookup cannot be run without boto installed")
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if isinstance(terms, basestring):
terms = [terms]
sg_list = []
region = terms[0]
group_names = terms[1]
conn = boto.ec2.connect_to_region(region)
for group_name in group_names:
filters = {'group_name': group_name}
sg = conn.get_all_security_groups(filters=filters)
if sg and sg[0]:
sg_list.append(sg[0].id)
return sg_list
|
mit
|
Python
|
1337c5269d97dc6f1cd47aed838cf26c6b488be2
|
bump version
|
houqp/shell.py
|
shell/__init__.py
|
shell/__init__.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__title__ = 'shell'
__version__ = '0.0.7'
__author__ = 'Qingping Hou'
__license__ = 'MIT'
from .run_cmd import RunCmd
from .input_stream import InputStream
from .api import instream, cmd, pipe_all, ex, p, ex_all
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__title__ = 'shell'
__version__ = '0.0.6'
__author__ = 'Qingping Hou'
__license__ = 'MIT'
from .run_cmd import RunCmd
from .input_stream import InputStream
from .api import instream, cmd, pipe_all, ex, p, ex_all
|
mit
|
Python
|
21ab430368ee262377c77f1ecc24b645377dd520
|
Revert "Bug Fix: sort keys when creating json data to send"
|
imtapps/generic-request-signer,imtapps/generic-request-signer
|
generic_request_signer/client.py
|
generic_request_signer/client.py
|
import six
from datetime import date
import json
import decimal
if six.PY3:
import urllib.request as urllib
else:
import urllib2 as urllib
from generic_request_signer import response, factory
def json_encoder(obj):
if isinstance(obj, date):
return str(obj.isoformat())
if isinstance(obj, decimal.Decimal):
return str(obj)
class Client(object):
def __init__(self, api_credentials):
self.api_credentials = api_credentials
def get_factory(self, files):
if files:
return factory.MultipartSignedRequestFactory
return factory.SignedRequestFactory
def _get_response(self, http_method, endpoint, data=None, files=None, timeout=15, **request_kwargs):
headers = request_kwargs.get("headers", {})
if not isinstance(data, str) and headers.get("Content-Type") == "application/json":
data = json.dumps(data, default=json_encoder)
try:
http_response = urllib.urlopen(
self._get_request(http_method, endpoint, data, files, **request_kwargs), timeout=timeout)
except urllib.HTTPError as e:
http_response = e
return response.Response(http_response)
def _get_request(self, http_method, endpoint, data=None, files=None, **request_kwargs):
factory_class = self.get_factory(files)
request_factory = factory_class(http_method, self._client_id, self._private_key, data, files)
service_url = self._get_service_url(endpoint)
return request_factory.create_request(service_url, **request_kwargs)
def _get_service_url(self, endpoint):
return self._base_url + endpoint
@property
def _base_url(self):
return self.api_credentials.base_url
@property
def _client_id(self):
return self.api_credentials.client_id
@property
def _private_key(self):
return self.api_credentials.private_key
|
import six
from datetime import date
import json
import decimal
from apysigner import DefaultJSONEncoder
if six.PY3:
import urllib.request as urllib
else:
import urllib2 as urllib
from generic_request_signer import response, factory
def json_encoder(obj):
if isinstance(obj, date):
return str(obj.isoformat())
if isinstance(obj, decimal.Decimal):
return str(obj)
class Client(object):
def __init__(self, api_credentials):
self.api_credentials = api_credentials
def get_factory(self, files):
if files:
return factory.MultipartSignedRequestFactory
return factory.SignedRequestFactory
def _get_response(self, http_method, endpoint, data=None, files=None, timeout=15, **request_kwargs):
headers = request_kwargs.get("headers", {})
if not isinstance(data, str) and headers.get("Content-Type") == "application/json":
data = json.dumps(data, default=DefaultJSONEncoder, sort_keys=True)
try:
http_response = urllib.urlopen(
self._get_request(http_method, endpoint, data, files, **request_kwargs), timeout=timeout)
except urllib.HTTPError as e:
http_response = e
return response.Response(http_response)
def _get_request(self, http_method, endpoint, data=None, files=None, **request_kwargs):
factory_class = self.get_factory(files)
request_factory = factory_class(http_method, self._client_id, self._private_key, data, files)
service_url = self._get_service_url(endpoint)
return request_factory.create_request(service_url, **request_kwargs)
def _get_service_url(self, endpoint):
return self._base_url + endpoint
@property
def _base_url(self):
return self.api_credentials.base_url
@property
def _client_id(self):
return self.api_credentials.client_id
@property
def _private_key(self):
return self.api_credentials.private_key
|
bsd-2-clause
|
Python
|
b28ca4abf8a6986b96bfb89cf8737c8f737fee4e
|
update boto import to use boto3 (#1000)
|
openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms
|
global_settings/wagtail_hooks.py
|
global_settings/wagtail_hooks.py
|
import boto3
import wagtail.admin.rich_text.editors.draftail.features as draftail_features
from wagtail.admin.rich_text.converters.html_to_contentstate import InlineStyleElementHandler
from wagtail.core import hooks
from django.urls import reverse
from wagtail.admin.menu import MenuItem
from .models import CloudfrontDistribution
@hooks.register('register_rich_text_features')
def register_strikethrough_feature(features):
"""
Registering the `superscript` feature, which uses the `SUPERSCRIPT` Draft.js inline style type,
and is stored as HTML with an `<sup>` tag.
"""
feature_name = 'superscript'
type_ = 'SUPERSCRIPT'
tag = 'sup'
control = {
'type': type_,
'label': '^',
'description': 'Superscript',
}
features.register_editor_plugin(
'draftail', feature_name, draftail_features.InlineStyleFeature(control)
)
db_conversion = {
'from_database_format': {tag: InlineStyleElementHandler(type_)},
'to_database_format': {'style_map': {type_: tag}},
}
features.default_features.append(feature_name)
features.register_converter_rule('contentstate', feature_name, db_conversion)
@hooks.register('after_edit_page')
def purge_cloudfront_caches(page, request):
try:
distribution = CloudfrontDistribution.objects.all()[0]
client = boto3.client('cloudfront')
response = client.create_invalidation(
DistributionId=distribution.distribution_id,
InvalidationBatch={
'Paths': {
'Quantity': 1,
'Items': [
'/apps/cms/api/*' # invalidate the entire cache for the website
],
},
'CallerReference': str(time()).replace(".", "")
}
)
except CloudfrontDistribution.DoesNotExist:
return
@hooks.register('register_settings_menu_item')
def register_500_menu_item():
return MenuItem('Generate 500', reverse('throw_error'), classnames='icon icon-warning', order=10000)
|
import boto
import wagtail.admin.rich_text.editors.draftail.features as draftail_features
from wagtail.admin.rich_text.converters.html_to_contentstate import InlineStyleElementHandler
from wagtail.core import hooks
from django.urls import reverse
from wagtail.admin.menu import MenuItem
from .models import CloudfrontDistribution
@hooks.register('register_rich_text_features')
def register_strikethrough_feature(features):
"""
Registering the `superscript` feature, which uses the `SUPERSCRIPT` Draft.js inline style type,
and is stored as HTML with an `<sup>` tag.
"""
feature_name = 'superscript'
type_ = 'SUPERSCRIPT'
tag = 'sup'
control = {
'type': type_,
'label': '^',
'description': 'Superscript',
}
features.register_editor_plugin(
'draftail', feature_name, draftail_features.InlineStyleFeature(control)
)
db_conversion = {
'from_database_format': {tag: InlineStyleElementHandler(type_)},
'to_database_format': {'style_map': {type_: tag}},
}
features.default_features.append(feature_name)
features.register_converter_rule('contentstate', feature_name, db_conversion)
@hooks.register('after_edit_page')
def purge_cloudfront_caches(page, request):
try:
distribution = CloudfrontDistribution.objects.all()[0]
client = boto3.client('cloudfront')
response = client.create_invalidation(
DistributionId=distribution.distribution_id,
InvalidationBatch={
'Paths': {
'Quantity': 1,
'Items': [
'/apps/cms/api/*' # invalidate the entire cache for the website
],
},
'CallerReference': str(time()).replace(".", "")
}
)
except CloudfrontDistribution.DoesNotExist:
return
@hooks.register('register_settings_menu_item')
def register_500_menu_item():
return MenuItem('Generate 500', reverse('throw_error'), classnames='icon icon-warning', order=10000)
|
agpl-3.0
|
Python
|
009ab26737923cfff97ba37a035dcff7639135b1
|
Replace all_pages_in_directory with concat_pdf_pages
|
shunghsiyu/pdf-processor
|
Util.py
|
Util.py
|
"""Collection of Helper Functions"""
import os
from fnmatch import fnmatch
from PyPDF2 import PdfFileReader
def pdf_file(filename):
"""Test whether or the the filename ends with '.pdf'."""
return fnmatch(filename, '*.pdf')
def all_pdf_files_in_directory(path):
"""Return a list of of PDF files in a directory."""
return [filename for filename in os.listdir(path) if pdf_file(filename)]
def concat_pdf_pages(files):
"""A generator that yields one PDF page a time for all pages in the PDF files."""
for input_file in files:
for page in PdfFileReader(input_file).pages:
yield page
def split_on_condition(iterable, predicate):
"""Split a iterable into chunks, where the first item in the chunk will be the
evaluate to True with predicate function, and the rest of the items in the chunk
evaluates to False."""
it = iter(iterable)
# Initialize the chunk list with an item
# StopIteration will be thrown if there are no further items in the iterator
chunk = [it.next()]
while True:
try:
item = it.next()
if predicate(item):
# If the next item should be in a new chunk then return the current chunk
yield chunk
# Then rest the chunk list
chunk = [item]
else:
# Simply append the item to current chunk if it doesn't match the predicate
chunk.append(item)
except StopIteration:
# If the end of the iterator is reached then simply return the current chunk
yield chunk
break
|
"""Collection of Helper Functions"""
import os
from fnmatch import fnmatch
from PyPDF2 import PdfFileReader
def pdf_file(filename):
"""Test whether or the the filename ends with '.pdf'."""
return fnmatch(filename, '*.pdf')
def all_pdf_files_in_directory(path):
"""Return a list of of PDF files in a directory."""
return [filename for filename in os.listdir(path) if pdf_file(filename)]
def all_pages_in_directory(path):
"""A generator that yields one PDF page a time for all the PDF in the directory."""
for filename in sorted(all_pdf_files_in_directory(path)):
with open(filename, 'rb') as input_file:
for page in PdfFileReader(input_file).pages:
yield page
def split_on_condition(iterable, predicate):
"""Split a iterable into chunks, where the first item in the chunk will be the
evaluate to True with predicate function, and the rest of the items in the chunk
evaluates to False."""
it = iter(iterable)
# Initialize the chunk list with an item
# StopIteration will be thrown if there are no further items in the iterator
chunk = [it.next()]
while True:
try:
item = it.next()
if predicate(item):
# If the next item should be in a new chunk then return the current chunk
yield chunk
# Then rest the chunk list
chunk = [item]
else:
# Simply append the item to current chunk if it doesn't match the predicate
chunk.append(item)
except StopIteration:
# If the end of the iterator is reached then simply return the current chunk
yield chunk
break
|
mit
|
Python
|
d16373609b2f30c6ffa576c1269c529f12c9622c
|
Switch to fast method for personal timetable
|
uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi
|
backend/uclapi/timetable/urls.py
|
backend/uclapi/timetable/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^personal$', views.get_personal_timetable_fast),
url(r'^bymodule$', views.get_modules_timetable),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^personal_fast$', views.get_personal_timetable_fast),
url(r'^personal$', views.get_personal_timetable),
url(r'^bymodule$', views.get_modules_timetable),
]
|
mit
|
Python
|
22785c709956365ac51bc3b79135e6debc6418ae
|
Exclude legacy objc API tests properly.
|
ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,TimothyGu/libilbc
|
all.gyp
|
all.gyp
|
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'variables': {
'include_examples%': 1,
'include_tests%': 1,
'webrtc_root_additional_dependencies': [],
},
'targets': [
{
'target_name': 'All',
'type': 'none',
'dependencies': [
'webrtc/webrtc.gyp:*',
'<@(webrtc_root_additional_dependencies)',
],
'conditions': [
['include_examples==1', {
'dependencies': [
'webrtc/webrtc_examples.gyp:*',
],
}],
['(OS=="ios" or (OS=="mac" and target_arch!="ia32")) and include_tests==1', {
'dependencies': [
'talk/app/webrtc/legacy_objc_api_tests.gyp:*',
],
}],
],
},
],
}
|
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'variables': {
'include_examples%': 1,
'include_tests%': 1,
'webrtc_root_additional_dependencies': [],
},
'targets': [
{
'target_name': 'All',
'type': 'none',
'dependencies': [
'webrtc/webrtc.gyp:*',
'<@(webrtc_root_additional_dependencies)',
],
'conditions': [
['include_examples==1', {
'dependencies': [
'webrtc/webrtc_examples.gyp:*',
],
}],
['OS=="ios" or (OS=="mac" and target_arch!="ia32") and include_tests==1', {
'dependencies': [
'talk/app/webrtc/legacy_objc_api_tests.gyp:*',
],
}],
],
},
],
}
|
bsd-3-clause
|
Python
|
e89e721225e916f4c2514f4a6568571abfc2acc0
|
Add slides frame simibar
|
speed-of-light/pyslider
|
lib/plotter/matching/__init__.py
|
lib/plotter/matching/__init__.py
|
__all__ = ["core", "single_matching_plotter"]
from lib.exp.evaluator.ground_truth import GroundTruth as GT
from core import MatchingPlotterBase
class MatchingPlotter(MatchingPlotterBase):
def __init__(self, root, name):
"""
Try to show one matching pairs
use set_data to set matched results:
array of `sid`, `fid`, `matches`
"""
MatchingPlotterBase.__init__(self, root, name)
def __match_info(self, ax, df=None, sid=-1, fid=0):
self.set_matched_pair(sid, fid)
view = self.get_view()
ax.imshow(view[:, :, [2, 1, 0]])
info = "S-{}, F-{}, df: {:5.2f}({})".\
format(sid, fid, df.dist.mean(), len(df))
ax.set_title(info)
def result_grid(self, fig, row=4, col=4, from_=1):
start = from_ - 1
end = from_+(row*col) - 1
for mi, mc in enumerate(self.df[start:end], 1):
ax = fig.add_subplot(15, 4, mi)
self.__match_info(ax, **mc)
def frame_slides_relation(self, ax, matches, answer):
"""
Print frame to slides relation by input function
"""
x = [s["sid"] for s in matches]
y = [s["df"].dist.mean() for s in matches]
ax.plot(x, y)
def slides_frames_similarity(self, sids, fids, sims):
pass
def slice_bar(self, ax, x, y, z, start, size, cmm):
end = start+size
gt = GT(self.root, self.name)
for fi, mv, fid in zip(range(1, size+1), z[start: end], y[start:end]):
cr = [cmm(fi*3./size)]*len(mv)
asid = int(gt.answer(fid))
fac = 1
if asid > 0:
print asid, fid
cr[asid-1] = '#FF5698'
else:
cr = ['#aa77FF']*len(mv)
mv = mv/max(mv)
fac = max(mv)
ax.bar(x, mv, fid, zdir='y', color=cr, alpha=0.4)
mi = min(xrange(len(mv)), key=mv.__getitem__)
ax.bar([x[mi]], [mv[mi]*fac/2.0], fid,
zdir='y', color=['#44FF32'], alpha=.8)
ax.view_init(elev=60., azim=120)
|
__all__ = ["core", "single_matching_plotter"]
from core import MatchingPlotterBase
class MatchingPlotter(MatchingPlotterBase):
def __init__(self, root, name):
"""
Try to show one matching pairs
use set_data to set matched results:
array of `sid`, `fid`, `matches`
"""
MatchingPlotterBase.__init__(self, root, name)
def __match_info(self, ax, df=None, sid=-1, fid=0):
self.set_matched_pair(sid, fid)
view = self.get_view()
ax.imshow(view[:, :, [2, 1, 0]])
info = "S-{}, F-{}, df: {:5.2f}({})".\
format(sid, fid, df.dist.mean(), len(df))
ax.set_title(info)
def result_grid(self, fig, row=4, col=4, from_=1):
start = from_ - 1
end = from_+(row*col) - 1
for mi, mc in enumerate(self.df[start:end], 1):
ax = fig.add_subplot(15, 4, mi)
self.__match_info(ax, **mc)
def frame_slides_relation(self, ax, matches, answer):
"""
Print frame to slides relation by input function
"""
x = [s["sid"] for s in matches]
y = [s["df"].dist.mean() for s in matches]
ax.plot(x, y)
def slides_frames_similarity(self, sids, fids, sims):
pass
|
agpl-3.0
|
Python
|
cd59979ab446d7613ec7df5d5737539464918edf
|
Fix span boundary handling in Spanish noun_chunks (#5860)
|
explosion/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy
|
spacy/lang/es/syntax_iterators.py
|
spacy/lang/es/syntax_iterators.py
|
# coding: utf8
from __future__ import unicode_literals
from ...symbols import NOUN, PROPN, PRON, VERB, AUX
from ...errors import Errors
def noun_chunks(doclike):
doc = doclike.doc
if not doc.is_parsed:
raise ValueError(Errors.E029)
if not len(doc):
return
np_label = doc.vocab.strings.add("NP")
left_labels = ["det", "fixed", "neg"] # ['nunmod', 'det', 'appos', 'fixed']
right_labels = ["flat", "fixed", "compound", "neg"]
stop_labels = ["punct"]
np_left_deps = [doc.vocab.strings.add(label) for label in left_labels]
np_right_deps = [doc.vocab.strings.add(label) for label in right_labels]
stop_deps = [doc.vocab.strings.add(label) for label in stop_labels]
for token in doclike:
if token.pos in [PROPN, NOUN, PRON]:
left, right = noun_bounds(
doc, token, np_left_deps, np_right_deps, stop_deps
)
yield left.i, right.i + 1, np_label
token = right
token = next_token(token)
def is_verb_token(token):
return token.pos in [VERB, AUX]
def next_token(token):
try:
return token.nbor()
except IndexError:
return None
def noun_bounds(doc, root, np_left_deps, np_right_deps, stop_deps):
left_bound = root
for token in reversed(list(root.lefts)):
if token.dep in np_left_deps:
left_bound = token
right_bound = root
for token in root.rights:
if token.dep in np_right_deps:
left, right = noun_bounds(
doc, token, np_left_deps, np_right_deps, stop_deps
)
if list(
filter(
lambda t: is_verb_token(t) or t.dep in stop_deps,
doc[left_bound.i : right.i],
)
):
break
else:
right_bound = right
return left_bound, right_bound
SYNTAX_ITERATORS = {"noun_chunks": noun_chunks}
|
# coding: utf8
from __future__ import unicode_literals
from ...symbols import NOUN, PROPN, PRON, VERB, AUX
from ...errors import Errors
def noun_chunks(doclike):
doc = doclike.doc
if not doc.is_parsed:
raise ValueError(Errors.E029)
if not len(doc):
return
np_label = doc.vocab.strings.add("NP")
left_labels = ["det", "fixed", "neg"] # ['nunmod', 'det', 'appos', 'fixed']
right_labels = ["flat", "fixed", "compound", "neg"]
stop_labels = ["punct"]
np_left_deps = [doc.vocab.strings.add(label) for label in left_labels]
np_right_deps = [doc.vocab.strings.add(label) for label in right_labels]
stop_deps = [doc.vocab.strings.add(label) for label in stop_labels]
token = doc[0]
while token and token.i < len(doclike):
if token.pos in [PROPN, NOUN, PRON]:
left, right = noun_bounds(
doc, token, np_left_deps, np_right_deps, stop_deps
)
yield left.i, right.i + 1, np_label
token = right
token = next_token(token)
def is_verb_token(token):
return token.pos in [VERB, AUX]
def next_token(token):
try:
return token.nbor()
except IndexError:
return None
def noun_bounds(doc, root, np_left_deps, np_right_deps, stop_deps):
left_bound = root
for token in reversed(list(root.lefts)):
if token.dep in np_left_deps:
left_bound = token
right_bound = root
for token in root.rights:
if token.dep in np_right_deps:
left, right = noun_bounds(
doc, token, np_left_deps, np_right_deps, stop_deps
)
if list(
filter(
lambda t: is_verb_token(t) or t.dep in stop_deps,
doc[left_bound.i : right.i],
)
):
break
else:
right_bound = right
return left_bound, right_bound
SYNTAX_ITERATORS = {"noun_chunks": noun_chunks}
|
mit
|
Python
|
db67db3cea880e40d1982149fea86699c15b5f75
|
change append to add (for the set in part 1)
|
robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions
|
day3.py
|
day3.py
|
#!/usr/local/bin/python3
from collections import namedtuple
with open('day3_input.txt') as f:
instructions = f.read().rstrip()
Point = namedtuple('Point', ['x', 'y'])
location = Point(0, 0)
visited = {location}
def new_loc(current_loc, instruction):
if instruction == '^':
xy = current_loc.x, current_loc.y + 1
elif instruction == 'v':
xy = current_loc.x, current_loc.y - 1
elif instruction == '>':
xy = current_loc.x + 1, current_loc.y
elif instruction == '<':
xy = current_loc.x - 1, current_loc.y
return Point(*xy)
for char in instructions:
location = new_loc(location, char)
visited.add(location)
print('At least one present:', len(visited))
# Part two
santa_loc = Point(0, 0)
robo_loc = Point(0, 0)
visited = {santa_loc, robo_loc}
for idx, char in enumerate(instructions):
if idx % 2 == 0: # Santa
santa_loc = new_loc(santa_loc, char)
visited.add(santa_loc)
else: # robot
robo_loc = new_loc(robo_loc, char)
visited.add(robo_loc)
print('At least one present with santa and robot:', len(visited))
|
#!/usr/local/bin/python3
from collections import namedtuple
with open('day3_input.txt') as f:
instructions = f.read().rstrip()
Point = namedtuple('Point', ['x', 'y'])
location = Point(0, 0)
visited = {location}
def new_loc(current_loc, instruction):
if instruction == '^':
xy = current_loc.x, current_loc.y + 1
elif instruction == 'v':
xy = current_loc.x, current_loc.y - 1
elif instruction == '>':
xy = current_loc.x + 1, current_loc.y
elif instruction == '<':
xy = current_loc.x - 1, current_loc.y
return Point(*xy)
for char in instructions:
location = new_loc(location, char)
visited.append(location)
print('At least one present:', len(visited))
# Part two
santa_loc = Point(0, 0)
robo_loc = Point(0, 0)
visited = {santa_loc, robo_loc}
for idx, char in enumerate(instructions):
if idx % 2 == 0: # Santa
santa_loc = new_loc(santa_loc, char)
visited.add(santa_loc)
else: # robot
robo_loc = new_loc(robo_loc, char)
visited.add(robo_loc)
print('At least one present with santa and robot:', len(visited))
|
mit
|
Python
|
db713e62eafb29c1a968e16b997a4e8f49156c78
|
Correct config for touchscreen
|
sumpfgottheit/pdu1800
|
config.py
|
config.py
|
__author__ = 'Florian'
from util import get_lan_ip
#################
# CONFIGURATION #
#################
# CHANGE FROM HERE
#
UDP_PORT = 18877
IP = get_lan_ip()
BUF_SIZE = 4096
TIMEOUT_IN_SECONDS = 0.1
#
SCREEN_WIDTH = 320
SCREEN_HEIGHT = 240
SCREEN_DEEP = 32
#
LABEL_RIGHT = 0
LABEL_LEFT = 1
ALIGN_CENTER = 0
ALIGN_RIGHT = 1
ALIGN_LEFT = 2
VALIGN_CENTER = 0
VALIGN_TOP = 1
VALIGN_BOTTOM = 2
#
# Stop changing. Of course - you can do, but it should not be necessary
#
FONT = 'assets/DroidSansMono.ttf'
# set up the colors
BLACK = ( 0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = ( 0, 255, 0)
BLUE = ( 0, 0, 255)
CYAN = ( 0, 255, 255)
MAGENTA= (255, 0, 255)
YELLOW = (255, 255, 0)
RPM_YELLOW = (230, 230, 40)
GREY = (214, 214, 214)
BACKGROUND_COLOR = BLACK
FOREGROUND_COLOR = WHITE
#
#
#
import os, sys
if sys.platform == 'darwin':
# Display on Laptop Screen on the left
os.environ['SDL_VIDEO_WINDOW_POS'] = "%d,%d" % (-400,100)
from datastream import MockBaseDataStream
datastream = MockBaseDataStream()
#from datastream import PDU1800DataStream
#datastream = PDU1800DataStream(ip=IP, port=UDP_PORT)
elif sys.platform == 'linux2':
if os.path.isfile('/etc/pointercal'):
os.environ["TSLIB_CALIBFILE"] = '/etc/pointercal'
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
from evdev import InputDevice, list_devices
devices = map(InputDevice, list_devices())
eventX=""
for dev in devices:
if dev.name == "ADS7846 Touchscreen":
eventX = dev.fn
os.environ["SDL_MOUSEDEV"] = eventX
from datastream import PDU1800DataStream
datastream = PDU1800DataStream(ip=IP, port=UDP_PORT)
#
|
__author__ = 'Florian'
from util import get_lan_ip
#################
# CONFIGURATION #
#################
# CHANGE FROM HERE
#
UDP_PORT = 18877
IP = get_lan_ip()
BUF_SIZE = 4096
TIMEOUT_IN_SECONDS = 0.1
#
SCREEN_WIDTH = 320
SCREEN_HEIGHT = 240
SCREEN_DEEP = 32
#
LABEL_RIGHT = 0
LABEL_LEFT = 1
ALIGN_CENTER = 0
ALIGN_RIGHT = 1
ALIGN_LEFT = 2
VALIGN_CENTER = 0
VALIGN_TOP = 1
VALIGN_BOTTOM = 2
#
# Stop changing. Of course - you can do, but it should not be necessary
#
FONT = 'assets/DroidSansMono.ttf'
# set up the colors
BLACK = ( 0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = ( 0, 255, 0)
BLUE = ( 0, 0, 255)
CYAN = ( 0, 255, 255)
MAGENTA= (255, 0, 255)
YELLOW = (255, 255, 0)
RPM_YELLOW = (230, 230, 40)
GREY = (214, 214, 214)
BACKGROUND_COLOR = BLACK
FOREGROUND_COLOR = WHITE
#
#
#
import os, sys
if sys.platform == 'darwin':
# Display on Laptop Screen on the left
os.environ['SDL_VIDEO_WINDOW_POS'] = "%d,%d" % (-400,100)
#from datastream import MockBaseDataStream
#datastream = MockBaseDataStream()
from datastream import PDU1800DataStream
datastream = PDU1800DataStream(ip=IP, port=UDP_PORT)
elif sys.platform == 'linux2':
from evdev import InputDevice, list_devices
devices = map(InputDevice, list_devices())
eventX=""
for dev in devices:
if dev.name == "ADS7846 Touchscreen":
eventX = dev.fn
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
os.environ["SDL_MOUSEDEV"] = eventX
if os.path.isfile('/etc/pointercal'):
os.environ["TSLIB_CALIBFILE"] = '/etc/pointercal'
from datastream import PDU1800DataStream
datastream = PDU1800DataStream(ip=IP, port=UDP_PORT)
#
|
mit
|
Python
|
68593e359d5bb79c096d584c83df1ff55262a686
|
use with
|
victorhaggqvist/ledman
|
config.py
|
config.py
|
# coding=utf-8
from configparser import ConfigParser
import os
__author__ = 'Victor Häggqvist'
class Config:
confdir = os.path.dirname(os.path.realpath(__file__))
config_file = os.path.join(confdir, 'ledman.conf')
default = """
[gpio]
red=22
green=27
blue=17
[default_level]
red=0
green=0.3
blue=0.5
[server]
keys=testkeychangeme
"""
def __init__(self):
config = ConfigParser()
if not os.path.isfile(self.config_file):
self.init_config()
config.read(self.config_file)
self.GPIO_RED = config.get('gpio', 'red') # 22
self.GPIO_GREEN = config.get('gpio', 'green') # 27
self.GPIO_BLUE = config.get('gpio', 'blue') # 17
self.RED_DEFAULT = config.get('default_level', 'red') # 0
self.GREEN_DEFAULT = config.get('default_level', 'green') # 0.3
self.BLUE_DEFAULT = config.get('default_level', 'blue') # 0.5
keys = config.get('server', 'keys')
self.keys = []
for k in keys.split(','):
self.keys.append(k)
def init_config(self):
with open(self.config_file, 'w+') as f:
f.write(self.default)
|
# coding=utf-8
from configparser import ConfigParser
import os
__author__ = 'Victor Häggqvist'
class Config:
confdir = os.path.dirname(os.path.realpath(__file__))
config_file = os.path.join(confdir, 'ledman.conf')
default = """
[gpio]
red=22
green=27
blue=17
[default_level]
red=0
green=0.3
blue=0.5
[server]
keys=testkeychangeme
"""
def __init__(self):
config = ConfigParser()
if not os.path.isfile(self.config_file):
self.init_config()
config.read(self.config_file)
self.GPIO_RED = config.get('gpio', 'red') # 22
self.GPIO_GREEN = config.get('gpio', 'green') # 27
self.GPIO_BLUE = config.get('gpio', 'blue') # 17
self.RED_DEFAULT = config.get('default_level', 'red') # 0
self.GREEN_DEFAULT = config.get('default_level', 'green') # 0.3
self.BLUE_DEFAULT = config.get('default_level', 'blue') # 0.5
keys = config.get('server', 'keys')
self.keys = []
for k in keys.split(','):
self.keys.append(k)
def init_config(self):
f = open(self.config_file, 'w+')
f.write(self.default)
f.close()
|
mit
|
Python
|
0812ec319291b709613152e9e1d781671047a428
|
Make server ignore missing environment variables
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
config.py
|
config.py
|
import os
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL', 'sqlite://')
ACCESS_TOKEN = os.environ.get('ACCESS_TOKEN')
PAGE_ID = os.environ.get('PAGE_ID')
APP_ID = os.environ.get('APP_ID')
VERIFY_TOKEN = os.environ.get('VERIFY_TOKEN')
|
import os
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
ACCESS_TOKEN = os.environ['ACCESS_TOKEN']
PAGE_ID = os.environ['PAGE_ID']
APP_ID = os.environ['APP_ID']
VERIFY_TOKEN = os.environ['VERIFY_TOKEN']
|
mit
|
Python
|
d7e03596f8bf1e886e984c0ea98334af878a15e2
|
Use __future__.print_function so syntax is valid on Python 3
|
enthought/Meta,gutomaia/Meta
|
meta/bytecodetools/print_code.py
|
meta/bytecodetools/print_code.py
|
'''
Created on May 10, 2012
@author: sean
'''
from __future__ import print_function
from .bytecode_consumer import ByteCodeConsumer
from argparse import ArgumentParser
class ByteCodePrinter(ByteCodeConsumer):
def generic_consume(self, instr):
print(instr)
def main():
parser = ArgumentParser()
parser.add_argument()
if __name__ == '__main__':
main()
|
'''
Created on May 10, 2012
@author: sean
'''
from .bytecode_consumer import ByteCodeConsumer
from argparse import ArgumentParser
class ByteCodePrinter(ByteCodeConsumer):
def generic_consume(self, instr):
print instr
def main():
parser = ArgumentParser()
parser.add_argument()
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
1f343e52abb67ab2f85836b10dadb3cb34a95379
|
fix login issue with django 1.7: check_for_test_cookie is deprecated and removed in django 1.7.
|
AndyHelix/django-xadmin,Keleir/django-xadmin,huaishan/django-xadmin,vincent-fei/django-xadmin,tvrcopgg/edm_xadmin,t0nyren/django-xadmin,huaishan/django-xadmin,cupen/django-xadmin,alexsilva/django-xadmin,cupen/django-xadmin,marguslaak/django-xadmin,sshwsfc/xadmin,merlian/django-xadmin,t0nyren/django-xadmin,hochanh/django-xadmin,marguslaak/django-xadmin,f1aky/xadmin,sshwsfc/django-xadmin,AndyHelix/django-xadmin,zhiqiangYang/django-xadmin,zhiqiangYang/django-xadmin,zhiqiangYang/django-xadmin,merlian/django-xadmin,vincent-fei/django-xadmin,vincent-fei/django-xadmin,tvrcopgg/edm_xadmin,marguslaak/django-xadmin,tvrcopgg/edm_xadmin,merlian/django-xadmin,marguslaak/django-xadmin,tvrcopgg/edm_xadmin,AndyHelix/django-xadmin,zhiqiangYang/django-xadmin,alexsilva/django-xadmin,sshwsfc/django-xadmin,sshwsfc/django-xadmin,cupen/django-xadmin,merlian/django-xadmin,Keleir/django-xadmin,f1aky/xadmin,hochanh/django-xadmin,cupen/django-xadmin,hochanh/django-xadmin,iedparis8/django-xadmin,taxido/django-xadmin,taxido/django-xadmin,f1aky/xadmin,vincent-fei/django-xadmin,alexsilva/django-xadmin,sshwsfc/xadmin,alexsilva/django-xadmin,sshwsfc/xadmin,taxido/django-xadmin,hochanh/django-xadmin,AndyHelix/django-xadmin,huaishan/django-xadmin,iedparis8/django-xadmin,taxido/django-xadmin,t0nyren/django-xadmin,iedparis8/django-xadmin,sshwsfc/xadmin,Keleir/django-xadmin,huaishan/django-xadmin,t0nyren/django-xadmin,Keleir/django-xadmin,f1aky/xadmin,sshwsfc/django-xadmin
|
xadmin/forms.py
|
xadmin/forms.py
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy, ugettext as _
from xadmin.util import User
ERROR_MESSAGE = ugettext_lazy("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class AdminAuthenticationForm(AuthenticationForm):
"""
A custom authentication form used in the admin app.
"""
this_is_the_login_form = forms.BooleanField(
widget=forms.HiddenInput, initial=1,
error_messages={'required': ugettext_lazy("Please log in again, because your session has expired.")})
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
self.user_cache = authenticate(
username=username, password=password)
if self.user_cache is None:
if u'@' in username:
# Mistakenly entered e-mail address instead of username? Look it up.
try:
user = User.objects.get(email=username)
except (User.DoesNotExist, User.MultipleObjectsReturned):
# Nothing to do here, moving along.
pass
else:
if user.check_password(password):
message = _("Your e-mail address is not your username."
" Try '%s' instead.") % user.username
raise forms.ValidationError(message)
elif not self.user_cache.is_active or not self.user_cache.is_staff:
raise forms.ValidationError(message)
return self.cleaned_data
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy, ugettext as _
from xadmin.util import User
ERROR_MESSAGE = ugettext_lazy("Please enter the correct username and password "
"for a staff account. Note that both fields are case-sensitive.")
class AdminAuthenticationForm(AuthenticationForm):
"""
A custom authentication form used in the admin app.
"""
this_is_the_login_form = forms.BooleanField(
widget=forms.HiddenInput, initial=1,
error_messages={'required': ugettext_lazy("Please log in again, because your session has expired.")})
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
message = ERROR_MESSAGE
if username and password:
self.user_cache = authenticate(
username=username, password=password)
if self.user_cache is None:
if u'@' in username:
# Mistakenly entered e-mail address instead of username? Look it up.
try:
user = User.objects.get(email=username)
except (User.DoesNotExist, User.MultipleObjectsReturned):
# Nothing to do here, moving along.
pass
else:
if user.check_password(password):
message = _("Your e-mail address is not your username."
" Try '%s' instead.") % user.username
raise forms.ValidationError(message)
elif not self.user_cache.is_active or not self.user_cache.is_staff:
raise forms.ValidationError(message)
self.check_for_test_cookie()
return self.cleaned_data
|
bsd-3-clause
|
Python
|
7b6542d58bbe788587b47e282ef393eda461f267
|
add get method in UserAPI
|
hexa4313/velov-companion-server,hexa4313/velov-companion-server
|
api/route/user.py
|
api/route/user.py
|
from flask import request
from flask.ext import restful
from flask.ext.restful import marshal_with
from route.base import api
from flask.ext.bcrypt import generate_password_hash
from model.base import db
from model.user import User, user_marshaller
class UserAPI(restful.Resource):
@marshal_with(user_marshaller)
def post(self):
data = request.get_json()
hashed_password = generate_password_hash(data['password'])
user = User(data['first_name'], data['last_name'], data['email'], hashed_password, data['birthday'])
db.session.add(user)
db.session.commit()
return user
@marshal_with(user_marshaller)
def get(self):
user = User.query.all()
return user
api.add_resource(UserAPI, "/user")
|
from flask import request
from flask.ext import restful
from flask.ext.restful import marshal_with
from route.base import api
from flask.ext.bcrypt import generate_password_hash
from model.base import db
from model.user import User, user_marshaller
class UserAPI(restful.Resource):
@marshal_with(user_marshaller)
def post(self):
data = request.get_json()
hashed_password = generate_password_hash(data['password'])
user = User(data['first_name'], data['last_name'], data['email'], hashed_password, data['birthday'])
db.session.add(user)
db.session.commit()
return user
api.add_resource(UserAPI, "/user")
|
mit
|
Python
|
346a7d18ef6dc063e2802a0347709700a1543902
|
update 影视列表
|
wangtai/us-show-time-table,wangtai/us-show-time-table
|
1/showics/models.py
|
1/showics/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Last modified: Wang Tai ([email protected])
"""docstring
"""
__revision__ = '0.1'
from django.db import models
class ShowTableIcs(models.Model):
uid = models.CharField(max_length=255, unique=True, primary_key=True)
title = models.CharField(max_length=255, null=False)
description = models.CharField(max_length=255)
date = models.DateField()
class Meta(object):
db_table = 'show_table_ics'
class ShowList(models.Model):
show_id = models.CharField(max_length=255, primary_key=True)
title = models.CharField(max_length=255, unique=True, null=False)
class Meta(object):
db_table = 'show_list'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Last modified: Wang Tai ([email protected])
"""docstring
"""
__revision__ = '0.1'
from django.db import models
class ShowTableIcs(models.Model):
# uid
uid = models.CharField(max_length=255, unique=True, primary_key=True)
# title
title = models.CharField(max_length=255, null=False)
# description
description = models.CharField(max_length=255)
# date
date = models.DateField()
class Meta(object):
db_table = 'show_table_ics'
|
apache-2.0
|
Python
|
5b2cc6ed06045bbe219f9cf81317c1c1a5bac714
|
add missing docstring in ttls
|
iksaif/biggraphite,criteo/biggraphite,iksaif/biggraphite,Thib17/biggraphite,criteo/biggraphite,Thib17/biggraphite,criteo/biggraphite,Thib17/biggraphite,iksaif/biggraphite,iksaif/biggraphite,criteo/biggraphite,Thib17/biggraphite
|
biggraphite/drivers/ttls.py
|
biggraphite/drivers/ttls.py
|
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time constants and functions used by accessors."""
import dateutil
import time
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
DEFAULT_UPDATED_ON_TTL_SEC = 3 * DAY
def str_to_datetime(str_repr):
"""Convert a string into a datetime."""
if not str_repr:
return None
return dateutil.parser.parse(str_repr)
def str_to_timestamp(str_repr):
"""Convert a string into a timestamp."""
if not str_repr:
return None
datetime_tuple = str_to_datetime(str_repr)
ts = time.mktime(datetime_tuple.timetuple())
return ts
|
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time constants and functions used by accessors."""
import dateutil
import time
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
DEFAULT_UPDATED_ON_TTL_SEC = 3 * DAY
def str_to_datetime(str_repr):
if not str_repr:
return None
return dateutil.parser.parse(str_repr)
def str_to_timestamp(str_repr):
if not str_repr:
return None
datetime_tuple = str_to_datetime(str_repr)
ts = time.mktime(datetime_tuple.timetuple())
return ts
|
apache-2.0
|
Python
|
c1d35c37bb51943c28f58b4dc8005b775b7076c4
|
Clean the terp file
|
gisce/openobject-server,xrg/openerp-server,splbio/openobject-server,vnc-biz/openerp-server,MarkusTeufelberger/openobject-server,MarkusTeufelberger/openobject-server,vnc-biz/openerp-server,splbio/openobject-server,xrg/openerp-server,ovnicraft/openerp-server,gisce/openobject-server,MarkusTeufelberger/openobject-server,ovnicraft/openerp-server,gisce/openobject-server,splbio/openobject-server
|
bin/addons/base/__terp__.py
|
bin/addons/base/__terp__.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base',
'version': '1.1',
'category': 'Generic Modules/Base',
'description': """The kernel of OpenERP, needed for all installation.""",
'author': 'Tiny',
'website': 'http://www.openerp.com',
'depends': [],
'init_xml': [
'base_data.xml',
'base_menu.xml',
'security/base_security.xml',
'res/res_security.xml',
'maintenance/maintenance_security.xml'
],
'update_xml': [
'base_update.xml',
'ir/wizard/wizard_menu_view.xml',
'ir/ir.xml',
'ir/workflow/workflow_view.xml',
'module/module_wizard.xml',
'module/module_view.xml',
'module/module_data.xml',
'module/module_report.xml',
'res/res_request_view.xml',
'res/res_lang_view.xml',
'res/partner/partner_report.xml',
'res/partner/partner_view.xml',
'res/partner/partner_wizard.xml',
'res/bank_view.xml',
'res/country_view.xml',
'res/res_currency_view.xml',
'res/partner/crm_view.xml',
'res/partner/partner_data.xml',
'res/ir_property_view.xml',
'security/base_security.xml',
'maintenance/maintenance_view.xml',
'security/ir.model.access.csv'
],
'demo_xml': ['base_demo.xml', 'res/partner/partner_demo.xml', 'res/partner/crm_demo.xml'],
'installable': True,
'active': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Base",
"version" : "1.1",
"author" : "Tiny",
"website" : "http://www.openerp.com",
"category" : "Generic Modules/Base",
"description": "The kernel of OpenERP, needed for all installation.",
"depends" : [],
"init_xml" : [
"base_data.xml",
"base_menu.xml",
"security/base_security.xml",
"res/res_security.xml",
"maintenance/maintenance_security.xml",
],
"demo_xml" : [
"base_demo.xml",
"res/partner/partner_demo.xml",
"res/partner/crm_demo.xml",
],
"update_xml" : [
"base_update.xml",
"ir/wizard/wizard_menu_view.xml",
"ir/ir.xml",
"ir/workflow/workflow_view.xml",
"module/module_wizard.xml",
"module/module_view.xml",
"module/module_data.xml",
"module/module_report.xml",
"res/res_request_view.xml",
"res/res_lang_view.xml",
"res/partner/partner_report.xml",
"res/partner/partner_view.xml",
"res/partner/partner_wizard.xml",
"res/bank_view.xml",
"res/country_view.xml",
"res/res_currency_view.xml",
"res/partner/crm_view.xml",
"res/partner/partner_data.xml",
"res/ir_property_view.xml",
"security/base_security.xml",
"maintenance/maintenance_view.xml",
"security/ir.model.access.csv",
],
"active": True,
"installable": True,
}
|
agpl-3.0
|
Python
|
ca5c3648ad5f28090c09ecbbc0e008c51a4ce708
|
Add a new dev (optional) parameter and use it
|
sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server
|
bin/push/silent_ios_push.py
|
bin/push/silent_ios_push.py
|
import json
import logging
import argparse
import emission.net.ext_service.push.notify_usage as pnu
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(prog="silent_ios_push")
parser.add_argument("interval",
help="specify the sync interval that the phones have subscribed to",
type=int)
parser.add_argument("-d", "--dev", action="store_true", default=False)
args = parser.parse_args()
logging.debug("About to send notification to phones with interval %d" % args.interval)
response = pnu.send_silent_notification_to_ios_with_interval(args.interval, dev=args.dev)
pnu.display_response(response)
|
import json
import logging
import argparse
import emission.net.ext_service.push.notify_usage as pnu
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(prog="silent_ios_push")
parser.add_argument("interval",
help="specify the sync interval that the phones have subscribed to",
type=int)
args = parser.parse_args()
logging.debug("About to send notification to phones with interval %d" % args.interval)
response = pnu.send_silent_notification_to_ios_with_interval(args.interval, dev=True)
pnu.display_response(response)
|
bsd-3-clause
|
Python
|
a33b8222959cc14a4c89658e6d7aa6ff07f27c0c
|
remove commented code
|
neuropycon/ephypype
|
ephypype/import_ctf.py
|
ephypype/import_ctf.py
|
"""Import ctf."""
# -------------------- nodes (Function)
def convert_ds_to_raw_fif(ds_file):
"""CTF .ds to .fif and save result in pipeline folder structure."""
import os
import os.path as op
from nipype.utils.filemanip import split_filename as split_f
from mne.io import read_raw_ctf
_, basename, ext = split_f(ds_file)
# print(subj_path, basename, ext)
raw = read_raw_ctf(ds_file)
raw_fif_file = os.path.abspath(basename + "_raw.fif")
if not op.isfile(raw_fif_file):
raw = read_raw_ctf(ds_file)
raw.save(raw_fif_file)
else:
print(('*** RAW FIF file %s exists!!!' % raw_fif_file))
return raw_fif_file
|
"""Import ctf."""
# -------------------- nodes (Function)
def convert_ds_to_raw_fif(ds_file):
"""CTF .ds to .fif and save result in pipeline folder structure."""
import os
import os.path as op
from nipype.utils.filemanip import split_filename as split_f
from mne.io import read_raw_ctf
_, basename, ext = split_f(ds_file)
# print(subj_path, basename, ext)
raw = read_raw_ctf(ds_file)
# raw_fif_file = os.path.abspath(basename + "_raw.fif")
# raw.save(raw_fif_file)
# return raw_fif_file
raw_fif_file = os.path.abspath(basename + "_raw.fif")
if not op.isfile(raw_fif_file):
raw = read_raw_ctf(ds_file)
raw.save(raw_fif_file)
else:
print(('*** RAW FIF file %s exists!!!' % raw_fif_file))
return raw_fif_file
|
bsd-3-clause
|
Python
|
697d3c4c80574d82e8aa37e2a13cbaeefdad255c
|
bump version
|
cenkalti/kuyruk,cenkalti/kuyruk
|
kuyruk/__init__.py
|
kuyruk/__init__.py
|
from __future__ import absolute_import
import logging
from kuyruk.kuyruk import Kuyruk
from kuyruk.worker import Worker
from kuyruk.task import Task
from kuyruk.config import Config
__version__ = '0.13.2'
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings on startup
null_handler = NullHandler()
logging.getLogger('kuyruk').addHandler(null_handler)
logging.getLogger('pika').addHandler(null_handler)
|
from __future__ import absolute_import
import logging
from kuyruk.kuyruk import Kuyruk
from kuyruk.worker import Worker
from kuyruk.task import Task
from kuyruk.config import Config
__version__ = '0.13.1'
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings on startup
null_handler = NullHandler()
logging.getLogger('kuyruk').addHandler(null_handler)
logging.getLogger('pika').addHandler(null_handler)
|
mit
|
Python
|
50b189888a0ff68f1cc4db1615991d1afe364854
|
Update cigar_party.py
|
RCoon/CodingBat,RCoon/CodingBat
|
Python/Logic_1/cigar_party.py
|
Python/Logic_1/cigar_party.py
|
# When squirrels get together for a party, they like to have cigars. A squirrel
# party is successful when the number of cigars is between 40 and 60, inclusive.
# Unless it is the weekend, in which case there is no upper bound on the number
# of cigars. Return True if the party with the given values is successful, or
# False otherwise.
# cigar_party(30, False) --> False
# cigar_party(50, False) --> True
# cigar_party(70, True) --> True
def cigar_party(cigars, is_weekend):
return (is_weekend and cigars >= 40) or (39 < cigars < 61)
print(cigar_party(30, False))
print(cigar_party(50, False))
print(cigar_party(70, True))
|
# When squirrels get together for a party, they like to have cigars. A squirrel
# party is successful when the number of cigars is between 40 and 60, inclusive.
# Unless it is the weekend, in which case there is no upper bound on the number
# of cigars. Return True if the party with the given values is successful, or
# False otherwise.
# cigar_party(30, False) → False
# cigar_party(50, False) → True
# cigar_party(70, True) → True
def cigar_party(cigars, is_weekend):
return (is_weekend and cigars >= 40) or (39 < cigars < 61)
print(cigar_party(30, False))
print(cigar_party(50, False))
print(cigar_party(70, True))
|
mit
|
Python
|
4d3d4e457c5886ace69250de1c5f4f696604d43b
|
Fix cal_seqs with no delay
|
BBN-Q/QGL,BBN-Q/QGL
|
QGL/BasicSequences/helpers.py
|
QGL/BasicSequences/helpers.py
|
# coding=utf-8
from itertools import product
import operator
from ..PulsePrimitives import Id, X, MEAS
from ..ControlFlow import qwait
from functools import reduce
def create_cal_seqs(qubits, numRepeats, measChans=None, waitcmp=False, delay=None):
"""
Helper function to create a set of calibration sequences.
Parameters
----------
qubits : logical channels, e.g. (q1,) or (q1,q2) (tuple)
numRepeats = number of times to repeat calibration sequences (int)
waitcmp = True if the sequence contains branching
delay: optional time between state preparation and measurement (s)
"""
if measChans is None:
measChans = qubits
calSet = [Id, X]
#Make all combination for qubit calibration states for n qubits and repeat
cal_seqs = [reduce(operator.mul, [p(q) for p, q in zip(pulseSet, qubits)])
for pulseSet in product(calSet, repeat=len(qubits))
for _ in range(numRepeats)]
#Add on the measurement operator.
measBlock = reduce(operator.mul, [MEAS(q) for q in qubits])
#Add optional delay
full_cal_seqs = [[seq, Id(qubits[0], delay), measBlock] if delay else [seq, measBlock] for seq in cal_seqs]
if waitcmp:
[cal_seq.append(qwait('CMP')) for cal_seq in full_cal_seqs]
return full_cal_seqs
def cal_descriptor(qubits, numRepeats):
states = ['0', '1']
# generate state set in same order as we do above in create_cal_seqs()
state_set = [reduce(operator.add, s) for s in product(states, repeat=len(qubits))]
descriptor = {
'name': 'calibration',
'unit': 'state',
'partition': 2,
'points': []
}
for state in state_set:
descriptor['points'] += [state] * numRepeats
return descriptor
def time_descriptor(times, desired_units="us"):
if desired_units == "s":
scale = 1
elif desired_units == "ms":
scale = 1e3
elif desired_units == "us" or desired_units == u"μs":
scale = 1e6
elif desired_units == "ns":
scale = 1e9
axis_descriptor = {
'name': 'time',
'unit': desired_units,
'points': list(scale * times),
'partition': 1
}
return axis_descriptor
|
# coding=utf-8
from itertools import product
import operator
from ..PulsePrimitives import Id, X, MEAS
from ..ControlFlow import qwait
from functools import reduce
def create_cal_seqs(qubits, numRepeats, measChans=None, waitcmp=False, delay=None):
"""
Helper function to create a set of calibration sequences.
Parameters
----------
qubits : logical channels, e.g. (q1,) or (q1,q2) (tuple)
numRepeats = number of times to repeat calibration sequences (int)
waitcmp = True if the sequence contains branching
delay: optional time between state preparation and measurement (s)
"""
if measChans is None:
measChans = qubits
calSet = [Id, X]
#Make all combination for qubit calibration states for n qubits and repeat
calSeqs = [reduce(operator.mul, [p(q) for p, q in zip(pulseSet, qubits)])
for pulseSet in product(calSet, repeat=len(qubits))
for _ in range(numRepeats)]
#Add on the measurement operator.
measBlock = reduce(operator.mul, [MEAS(q) for q in qubits])
return [[seq, Id(qubits[0], delay), measBlock, qwait('CMP')] if waitcmp else [seq, Id(qubits[0], delay), measBlock]
for seq in calSeqs]
def cal_descriptor(qubits, numRepeats):
states = ['0', '1']
# generate state set in same order as we do above in create_cal_seqs()
state_set = [reduce(operator.add, s) for s in product(states, repeat=len(qubits))]
descriptor = {
'name': 'calibration',
'unit': 'state',
'partition': 2,
'points': []
}
for state in state_set:
descriptor['points'] += [state] * numRepeats
return descriptor
def time_descriptor(times, desired_units="us"):
if desired_units == "s":
scale = 1
elif desired_units == "ms":
scale = 1e3
elif desired_units == "us" or desired_units == u"μs":
scale = 1e6
elif desired_units == "ns":
scale = 1e9
axis_descriptor = {
'name': 'time',
'unit': desired_units,
'points': list(scale * times),
'partition': 1
}
return axis_descriptor
|
apache-2.0
|
Python
|
fc975bd573d439490a65bb72ff5f6c69b2b0a771
|
Update loudness_zwicker_lowpass_intp.py
|
Eomys/MoSQITo
|
mosqito/functions/loudness_zwicker/loudness_zwicker_lowpass_intp.py
|
mosqito/functions/loudness_zwicker/loudness_zwicker_lowpass_intp.py
|
# -*- coding: utf-8 -*-
"""
@date Created on Fri May 22 2020
@author martin_g for Eomys
"""
# Standard library imports
import math
import numpy as np
#Needed for the loudness_zwicker_lowpass_intp_ea function
from scipy import signal
def loudness_zwicker_lowpass_intp(loudness, tau, sample_rate):
"""1st order low-pass with linear interpolation of signal for
increased precision
Parameters
----------
loudness : numpy.ndarray
Loudness vs. time
tau : float
Filter parameter
sample_rate : int
Louness signal sampling frequency
Outputs
-------
filt_loudness : numpy.ndarray
Filtered loudness
"""
filt_loudness = np.zeros(np.shape(loudness))
# Factor for virtual upsampling/inner iterations
lp_iter = 24
num_samples = np.shape(loudness)[0]
a1 = math.exp(-1 / (sample_rate * lp_iter * tau))
b0 = 1 - a1
y1 = 0
for i in range(num_samples):
x0 = loudness[i]
y1 = b0 * x0 + a1 * y1
filt_loudness[i] = y1
# Linear interpolation steps between current and next sample
if i < num_samples - 1:
xd = (loudness[i + 1] - x0) / lp_iter
# Inner iterations/interpolation
# Must add a -1 because is repeating the twice the first value at the initial of the first for loop.
for ii in range(lp_iter-1):
x0 += xd
y1 = b0 * x0 + a1 * y1
return filt_loudness
def loudness_zwicker_lowpass_intp_ea(loudness, tau, sample_rate):
"""1st order low-pass with linear interpolation of signal for
increased precision
Parameters
----------
loudness : numpy.ndarray
Loudness vs. time
tau : float
Filter parameter
sample_rate : int
Louness signal sampling frequency
Outputs
-------
filt_loudness : numpy.ndarray
Filtered loudness
"""
filt_loudness = np.zeros(np.shape(loudness))
# Factor for virtual upsampling/inner iterations
lp_iter = 24
num_samples = np.shape(loudness)[0]
a1 = math.exp(-1 / (sample_rate * lp_iter * tau))
b0 = 1 - a1
y1 = 0
delta = np.copy(loudness)
delta = np.roll(delta,-1)
delta [-1] = 0
delta = (delta - loudness) / lp_iter
ui_delta = np.zeros(loudness.shape[0]*lp_iter).reshape(loudness.shape[0],lp_iter)
ui_delta [:,0] = loudness
#Create the array complete of deltas to apply the filter.
for i_in in np.arange(1, lp_iter):
ui_delta [:,i_in] = delta + ui_delta [:,i_in-1]
# Rechape into a vector.
ui_delta = ui_delta.reshape(lp_iter*num_samples)
# Sustituir este bucle for por scipy.signal.lfilter https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html
# ui_delta_filt = scipy.signal.lfilter (b0 , a0, ui_delta )
#filt_loudness = ui_delta_filt.reshape(loudness.shape[0],lp_iter).T[:,0]
# Apply the filter.
ui_delta = signal.lfilter([b0], [1,-a1], ui_delta, axis=- 1, zi=None)
# Reshape again to recover the first col.
ui_delta = ui_delta.reshape(loudness.shape[0],lp_iter)
filt_loudness = ui_delta[:,0]
return filt_loudness
|
# -*- coding: utf-8 -*-
"""
@date Created on Fri May 22 2020
@author martin_g for Eomys
"""
# Standard library imports
import math
import numpy as np
def loudness_zwicker_lowpass_intp(loudness, tau, sample_rate):
"""1st order low-pass with linear interpolation of signal for
increased precision
Parameters
----------
loudness : numpy.ndarray
Loudness vs. time
tau : float
Filter parameter
sample_rate : int
Louness signal sampling frequency
Outputs
-------
filt_loudness : numpy.ndarray
Filtered loudness
"""
filt_loudness = np.zeros(np.shape(loudness))
# Factor for virtual upsampling/inner iterations
lp_iter = 24
num_samples = np.shape(loudness)[0]
a1 = math.exp(-1 / (sample_rate * lp_iter * tau))
b0 = 1 - a1
y1 = 0
for i in range(num_samples):
x0 = loudness[i]
y1 = b0 * x0 + a1 * y1
filt_loudness[i] = y1
# Linear interpolation steps between current and next sample
if i < num_samples - 1:
xd = (loudness[i + 1] - x0) / lp_iter
# Inner iterations/interpolation
for ii in range(lp_iter):
x0 += xd
y1 = b0 * x0 + a1 * y1
return filt_loudness
|
apache-2.0
|
Python
|
4748fd514fcafd9a0536b24069bf3365cb60a926
|
Bump development version number
|
lpomfrey/django-debreach,lpomfrey/django-debreach
|
debreach/__init__.py
|
debreach/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.3.1'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.3.0'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
|
bsd-2-clause
|
Python
|
6e3cd31c7efbea71b5f731429c24e946ce6fc476
|
Bump version
|
lpomfrey/django-debreach,lpomfrey/django-debreach
|
debreach/__init__.py
|
debreach/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '0.2.0'
version_info = version.StrictVersion(__version__).version
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '0.1.1'
version_info = version.StrictVersion(__version__).version
|
bsd-2-clause
|
Python
|
21149eb8d128c405d0b69991d1855e99ced951c7
|
Test fixed: WorkbenchUser is auto created by signal, so creating it separately is not required
|
MOOCworkbench/MOOCworkbench,MOOCworkbench/MOOCworkbench,MOOCworkbench/MOOCworkbench
|
ExperimentsManager/tests.py
|
ExperimentsManager/tests.py
|
from django.test import TestCase
from .models import Experiment
from UserManager.models import WorkbenchUser
from django.contrib.auth.models import User
from django.test import Client
class ExperimentTestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user('test', '[email protected]', 'test')
self.workbench_user = WorkbenchUser.objects.get(user=self.user)
self.experiment = Experiment.objects.create(title='Experiment', description='test', version='1.0', owner=self.workbench_user)
def test_index_not_signed_in(self):
c = Client()
response = c.get('/experiments/')
self.assertEqual(response.status_code, 302)
def test_index_signed_in(self):
c = Client()
c.login(username='test', password='test')
response = c.get('/experiments/')
self.assertIsNotNone(response.context['table'])
|
from django.test import TestCase
from .models import Experiment
from UserManager.models import WorkbenchUser
from django.contrib.auth.models import User
from django.test import Client
class ExperimentTestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user('test', '[email protected]', 'test')
self.workbench_user = WorkbenchUser.objects.create(netid='jlmdegoede', user=self.user)
self.experiment = Experiment.objects.create(title='Experiment', description='test', version='1.0', owner=self.workbench_user)
def test_index_not_signed_in(self):
c = Client()
response = c.get('/experiments/')
self.assertEqual(response.status_code, 302)
def test_index_signed_in(self):
c = Client()
c.login(username='test', password='test')
response = c.get('/experiments/')
self.assertIsNotNone(response.context['table'])
|
mit
|
Python
|
f149baa8ca7a401f8d2d390d84fc85960edd743d
|
Work in progress
|
petrveprek/dius
|
dius.py
|
dius.py
|
#!python3
# Copyright (c) 2016 Petr Veprek
"""Disk Usage"""
import math
import operator
import os
import string
import sys
import time
TITLE = "Disk Usage"
VERSION = "0.0.0"
VERBOSE = False
WIDTH = 80
COUNT = 20
def now(on="on", at="at"):
return "{}{} {}{}".format(on + " " if on != "" else "", time.strftime("%Y-%m-%d"), at + " " if at != "" else "", time.strftime("%H:%M:%S"))
def neat(str, max):
str = "".join([char if char in string.printable else "_" for char in str])
if len(str) > max: str = str[:max-3] + "..."
return str
def digits(max):
return math.ceil(math.log10(max))
def main():
print("{} {}".format(TITLE, VERSION))
if VERBOSE:
print("\a", end="")
print("Python {}".format(sys.version))
print("Command '{}'".format(sys.argv[0]))
print("Arguments {}".format(sys.argv[1:]))
print("Executed {}".format(now()))
start = time.time()
top = os.getcwd()
print("Analyzing {}".format(top))
usage = {}
for path, dirs, files in os.walk(top):
print("\rScanning {: <{}}".format(neat(path, WIDTH), WIDTH), end="")
usage[path] = sum(map(os.path.getsize, filter(os.path.isfile, map(lambda file: os.path.join(path, file), files))))
print("\r {: <{}}\r".format("", WIDTH), end="")
usage = sorted(usage.items(), key=operator.itemgetter(1), reverse=True)
for i, (path, size) in enumerate(usage[:COUNT]):
print("{:{}}/{} {:{}} {}".format(i+1, digits(COUNT), len(usage), size, digits(usage[0][1]), path))
if VERBOSE:
elapsed = time.time() - start
seconds = round(elapsed)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
weeks, days = divmod(days, 7)
print("Completed {}".format(now()))
print("Elapsed {:d}w {:d}d {:d}h {:d}m {:d}s ({:,.3f}s)".format(weeks, days, hours, minutes, seconds, elapsed))
print("\a", end="")
if '__main__' == __name__:
main()
|
#!python3
# Copyright (c) 2016 Petr Veprek
"""Disk Usage"""
import math
import operator
import os
import string
import sys
import time
TITLE = "Disk Usage"
VERSION = "0.0.0"
VERBOSE = False
def now(on="on", at="at"):
return "{}{} {}{}".format(on + " " if on != "" else "", time.strftime("%Y-%m-%d"), at + " " if at != "" else "", time.strftime("%H:%M:%S"))
def neat(str, max):
str = "".join([char if char in string.printable else "_" for char in str])
if len(str) > max: str = str[:max-3] + "..."
return str
def digits(max):
return math.ceil(math.log10(max))
def main():
print("{} {}".format(TITLE, VERSION))
if VERBOSE:
print("\a", end="")
print("Python {}".format(sys.version))
print("Command '{}'".format(sys.argv[0]))
print("Arguments {}".format(sys.argv[1:]))
print("Executed {}".format(now()))
start = time.time()
top = os.getcwd()
top="./Petr/Docs/_Documents" #####################################################
print("Analyzing {}".format(top))
usage = {}
for path, dirs, files in os.walk(top):
print("\rScanning {: <80}".format(neat(path, 80)), end="")
usage[path] = sum(map(os.path.getsize, filter(os.path.isfile, map(lambda file: os.path.join(path, file), files))))
print("\r {: <80}\r".format(""), end="")
usage = sorted(usage.items(), key=operator.itemgetter(1), reverse=True)
for i, (path, size) in enumerate(usage[:20]):
print("{:{}}/{} {} {}".format(i+1, digits(20), len(usage), size, path))
if VERBOSE:
elapsed = time.time() - start
seconds = round(elapsed)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
weeks, days = divmod(days, 7)
print("Completed {}".format(now()))
print("Elapsed {:d}w {:d}d {:d}h {:d}m {:d}s ({:,.3f}s)".format(weeks, days, hours, minutes, seconds, elapsed))
print("\a", end="")
if '__main__' == __name__:
main()
|
mit
|
Python
|
9ec49083879831d7b2cfd863ea139e0e86d42c36
|
Bump release version
|
lpomfrey/django-debreach,lpomfrey/django-debreach
|
debreach/__init__.py
|
debreach/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.4.0'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.3.1'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
|
bsd-2-clause
|
Python
|
206e8c2da4677532add03deadac03e88a7cd0da8
|
update __init__
|
cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,openai/cleverhans
|
cleverhans/__init__.py
|
cleverhans/__init__.py
|
"""The CleverHans adversarial example library"""
from cleverhans.devtools.version import append_dev_version
# If possible attach a hex digest to the version string to keep track of
# changes in the development branch
__version__ = append_dev_version('3.0.0')
|
"""The CleverHans adversarial example library"""
from cleverhans.devtools.version import append_dev_version
# If possible attach a hex digest to the version string to keep track of
# changes in the development branch
__version__ = append_dev_version('2.0.0')
|
mit
|
Python
|
878db5485946935f8784c6c9f15decbe15c0dfbc
|
Remove catchall redirect
|
DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange
|
democracylab/urls.py
|
democracylab/urls.py
|
"""democracylab URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic.base import RedirectView
from . import views
urlpatterns = [
url(r'^signup/$', views.signup, name='signup'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
url(
r'^password_reset/$',
views.password_reset,
name="password_reset",
),
url(
r'^change_password/$',
views.change_password,
name="change_password",
),
url(
r'^verify_user/(?P<user_id>[0-9]+)/(?P<token>[0-9a-z\-]+)$',
views.verify_user,
name="verify_user"
),
url(
r'^verify_user/$',
views.send_verification_email,
name="send_verification_email"
),
url(r'^', include('civictechprojects.urls')),
url(r'^$', RedirectView.as_view(url='/index/', permanent=False)),
url(r'^admin/', admin.site.urls),
url(r'^platform$', RedirectView.as_view(url='http://connect.democracylab.org/platform/', permanent=False)),
# url(r'^.*$', RedirectView.as_view(url='/index/', permanent=False)),
# url(
# r'check_email/(?P<user_email>.*)$',
# views.check_email,
# name="check_email"
# )
]
|
"""democracylab URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic.base import RedirectView
from . import views
urlpatterns = [
url(r'^signup/$', views.signup, name='signup'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
url(
r'^password_reset/$',
views.password_reset,
name="password_reset",
),
url(
r'^change_password/$',
views.change_password,
name="change_password",
),
url(
r'^verify_user/(?P<user_id>[0-9]+)/(?P<token>[0-9a-z\-]+)$',
views.verify_user,
name="verify_user"
),
url(
r'^verify_user/$',
views.send_verification_email,
name="send_verification_email"
),
url(r'^', include('civictechprojects.urls')),
url(r'^$', RedirectView.as_view(url='/index/', permanent=False)),
url(r'^admin/', admin.site.urls),
url(r'^platform$', RedirectView.as_view(url='http://connect.democracylab.org/platform/', permanent=False)),
url(r'^.*$', RedirectView.as_view(url='/index/', permanent=False)),
# url(
# r'check_email/(?P<user_email>.*)$',
# views.check_email,
# name="check_email"
# )
]
|
mit
|
Python
|
a82b3b5ba8d6fba12df1a3c1993325955da893b6
|
Fix a typo in comment. Thanks for tmm1 for watching after me.
|
xadjmerripen/carbon,criteo-forks/carbon,JeanFred/carbon,mleinart/carbon,benburry/carbon,graphite-project/carbon,protochron/carbon,graphite-server/carbon,obfuscurity/carbon,iain-buclaw-sociomantic/carbon,benburry/carbon,piotr1212/carbon,mleinart/carbon,lyft/carbon,pratX/carbon,deniszh/carbon,criteo-forks/carbon,pratX/carbon,krux/carbon,deniszh/carbon,graphite-server/carbon,krux/carbon,xadjmerripen/carbon,iain-buclaw-sociomantic/carbon,kharandziuk/carbon,cbowman0/carbon,lyft/carbon,johnseekins/carbon,JeanFred/carbon,johnseekins/carbon,kharandziuk/carbon,pu239ppy/carbon,cbowman0/carbon,piotr1212/carbon,protochron/carbon,pu239ppy/carbon,graphite-project/carbon,obfuscurity/carbon
|
lib/carbon/util.py
|
lib/carbon/util.py
|
import os
import pwd
from os.path import abspath, basename, dirname, join
from twisted.python.util import initgroups
from twisted.scripts.twistd import runApp
from twisted.scripts._twistd_unix import daemonize
daemonize = daemonize # Backwards compatibility
def dropprivs(user):
uid, gid = pwd.getpwnam(user)[2:4]
initgroups(uid, gid)
os.setregid(gid, gid)
os.setreuid(uid, uid)
return (uid, gid)
def run_twistd_plugin(filename):
from carbon.conf import get_parser
from twisted.scripts.twistd import ServerOptions
bin_dir = dirname(abspath(filename))
root_dir = dirname(bin_dir)
storage_dir = join(root_dir, 'storage')
os.environ.setdefault('GRAPHITE_ROOT', root_dir)
os.environ.setdefault('GRAPHITE_STORAGE_DIR', storage_dir)
program = basename(filename).split('.')[0]
# First, parse command line options as the legacy carbon scripts used to
# do.
parser = get_parser(program)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
return
# This isn't as evil as you might think
__builtins__["instance"] = options.instance
__builtins__["program"] = program
# Then forward applicable options to either twistd or to the plugin itself.
twistd_options = ["--no_save"]
# If no reactor was selected yet, try to use the epoll reactor if
# available.
try:
from twisted.internet import epollreactor
twistd_options.append("--reactor=epoll")
except:
pass
if options.debug:
twistd_options.extend(["-n", "--logfile", "-"])
if options.profile:
twistd_options.append("--profile")
if options.pidfile:
twistd_options.extend(["--pidfile", options.pidfile])
# Now for the plugin-specific options.
twistd_options.append(program)
if options.debug:
twistd_options.append("--debug")
for option_name, option_value in vars(options).items():
if (option_value is not None and
option_name not in ("debug", "profile", "pidfile")):
twistd_options.extend(["--%s" % option_name.replace("_", "-"),
option_value])
# Finally, append extra args so that twistd has a chance to process them.
twistd_options.extend(args)
config = ServerOptions()
config.parseOptions(twistd_options)
runApp(config)
|
import os
import pwd
from os.path import abspath, basename, dirname, join
from twisted.python.util import initgroups
from twisted.scripts.twistd import runApp
from twisted.scripts._twistd_unix import daemonize
daemonize = daemonize # Backwards compatibility
def dropprivs(user):
uid, gid = pwd.getpwnam(user)[2:4]
initgroups(uid, gid)
os.setregid(gid, gid)
os.setreuid(uid, uid)
return (uid, gid)
def run_twistd_plugin(filename):
from carbon.conf import get_parser
from twisted.scripts.twistd import ServerOptions
bin_dir = dirname(abspath(filename))
root_dir = dirname(bin_dir)
storage_dir = join(root_dir, 'storage')
os.environ.setdefault('GRAPHITE_ROOT', root_dir)
os.environ.setdefault('GRAPHITE_STORAGE_DIR', storage_dir)
program = basename(filename).split('.')[0]
# First, parse command line options as the legacy carbon scripts used to
# do.
parser = get_parser(program)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
return
# This isn't as evil as you might think
__builtins__["instance"] = options.instance
__builtins__["program"] = program
# Then forward applicable options to either twistd or to the plugin itself.
twistd_options = ["--no_save"]
# If no reactor was selected yet, try to use the epool reactor if
# available.
try:
from twisted.internet import epollreactor
twistd_options.append("--reactor=epoll")
except:
pass
if options.debug:
twistd_options.extend(["-n", "--logfile", "-"])
if options.profile:
twistd_options.append("--profile")
if options.pidfile:
twistd_options.extend(["--pidfile", options.pidfile])
# Now for the plugin-specific options.
twistd_options.append(program)
if options.debug:
twistd_options.append("--debug")
for option_name, option_value in vars(options).items():
if (option_value is not None and
option_name not in ("debug", "profile", "pidfile")):
twistd_options.extend(["--%s" % option_name.replace("_", "-"),
option_value])
# Finally, append extra args so that twistd has a chance to process them.
twistd_options.extend(args)
config = ServerOptions()
config.parseOptions(twistd_options)
runApp(config)
|
apache-2.0
|
Python
|
e611e9518945fa38165e8adf7103561f438b70b1
|
Add subcommand to process directory
|
gsong/interdiagram
|
interdiagram/bin/interdiagram.py
|
interdiagram/bin/interdiagram.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
from typing import Iterable, List, TypeVar
from typing.io import IO
import click
import yaml
from ..models import Diagram
click.disable_unicode_literals_warning = True
FileType = TypeVar('FileType', IO, Path)
def _is_file_obj(
f: FileType
) -> bool:
read_attr = getattr(f, 'read', None)
has_read_method = callable(read_attr)
return has_read_method
def _draw_files(
files: Iterable[FileType],
output_file: str
) -> None:
diagram = Diagram()
for f in files:
# TODO: Validate against schema
if not _is_file_obj(f):
f = f.open() # type: ignore
diagram.process_spec(yaml.load(f))
diagram.draw(output_file)
# TODO: Correct documentation schema once it's frozen
@click.group()
def cli():
"""Generate interaction/sitemap diagram."""
@cli.command('dir')
@click.argument(
'directory',
type=click.Path(exists=True, file_okay=False, resolve_path=True)
)
@click.argument('output-file', type=click.Path(resolve_path=True))
def directory(
directory: str,
output_file: str
) -> None:
"""Specify a directory where YAML files reside."""
files = Path(directory).glob('**/*.y*ml')
_draw_files(files, output_file)
@cli.command()
@click.argument('yaml-file', nargs=-1, type=click.File())
@click.argument('output-file', type=click.Path(resolve_path=True))
def files(
yaml_file: List[IO],
output_file: str
) -> None:
"""Specify individual YAML files.
Example: interdiagram data1.yaml data2.yaml output.pdf
The YAML spec is in the following format:
\b
sections: # App sections (pages)
Home: # Unique key for section
actions: # List of call to actions
- Sign up: # Action name
- Sign Up # Reference to another section or component
- Login:
- Login
- Search for registry: # Could be empty
components: # List of components in this section
- Experience cards:
- Experience Card
components: # Reusable components
Experience Card:
actions:
- Go to detail:
- Add to registry:
"""
_draw_files(yaml_file, output_file)
if __name__ == '__main__':
cli()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import List
import click
import yaml
from ..models import Diagram
click.disable_unicode_literals_warning = True
# TODO: Correct documentation schema once it's frozen
@click.command()
@click.argument('yaml-file', nargs=-1, type=click.File())
@click.argument('output-file', type=click.Path(resolve_path=True))
def cli(
yaml_file: List,
output_file: str
) -> None:
"""Generate interaction/sitemap diagram.
Example: interdiagram data1.yaml data2.yaml output.pdf
The YAML spec is in the following format:
\b
sections: # App sections (pages)
Home: # Unique key for section
actions: # List of call to actions
- Sign up: # Action name
- Sign Up # Reference to another section or component
- Login:
- Login
- Search for registry: # Could be empty
components: # List of components in this section
- Experience cards:
- Experience Card
components: # Reusable components
Experience Card:
actions:
- Go to detail:
- Add to registry:
"""
diagram = Diagram()
for f in yaml_file:
# TODO: Validate against schema
diagram.process_spec(yaml.load(f))
diagram.draw(output_file)
if __name__ == '__main__':
cli()
|
mit
|
Python
|
2163478d2d927c4e50fcef65a88ca9c81b9d245b
|
Remove print from tests
|
jabooth/menpodetect,yuxiang-zhou/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect
|
menpodetect/tests/opencv_test.py
|
menpodetect/tests/opencv_test.py
|
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
|
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
print takeo_copy.landmarks
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
|
bsd-3-clause
|
Python
|
e8ad2ca0fc2ddec71645bef31686d9de2001dd88
|
add translate type
|
sloria/modular-odm,chrisseto/modular-odm,icereval/modular-odm,CenterForOpenScience/modular-odm
|
modularodm/fields/StringField.py
|
modularodm/fields/StringField.py
|
from . import Field
from ..validators import StringValidator
class StringField(Field):
# default = ''
translate_type = str
validate = StringValidator()
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs)
|
from . import Field
from ..validators import StringValidator
class StringField(Field):
# default = ''
validate = StringValidator()
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs)
|
apache-2.0
|
Python
|
df5ac0a7f2246e5fbbb5f7d87903a5232e94fe87
|
Test deprecation.
|
faassen/morepath,taschini/morepath,morepath/morepath
|
morepath/tests/test_autosetup.py
|
morepath/tests/test_autosetup.py
|
from collections import namedtuple
from morepath.autosetup import (
caller_module, caller_package, autoscan,
morepath_packages, import_package)
from base.m import App
import morepath
import pytest
def setup_module(module):
with pytest.deprecated_call():
morepath.disable_implicit()
def test_import():
import base
import sub
import entrypoint
from ns import real
from ns import real2
import under_score
# Pacakges to be ignored
import no_mp
from ns import nomp
import no_mp_sub
found = set(morepath_packages())
assert {base, entrypoint, real, real2, sub, under_score} <= found
assert {no_mp, nomp, no_mp_sub}.isdisjoint(found)
def test_load_distribution():
Distribution = namedtuple('Distribution', ['project_name'])
assert import_package(Distribution('base')).m.App is App
with pytest.raises(morepath.error.AutoImportError):
import_package(Distribution('inexistant-package'))
def invoke(callable):
"Add one frame to stack, no other purpose."
return callable()
def test_caller_module():
import sys
assert caller_module(1) == sys.modules[__name__]
assert invoke(caller_module) == sys.modules[__name__]
def test_caller_package():
import sys
assert caller_package(1) == sys.modules[__package__]
assert invoke(caller_package) == sys.modules[__package__]
def test_autoscan(monkeypatch):
import sys
for k in 'base.m', 'entrypoint.app', 'under_score.m':
monkeypatch.delitem(sys.modules, k, raising=False)
autoscan()
assert 'base.m' in sys.modules
assert 'entrypoint.app' in sys.modules
assert 'under_score.m' in sys.modules
|
from collections import namedtuple
from morepath.autosetup import (
caller_module, caller_package, autoscan,
morepath_packages, import_package)
from base.m import App
import morepath
import pytest
def setup_module(module):
morepath.disable_implicit()
def test_import():
import base
import sub
import entrypoint
from ns import real
from ns import real2
import under_score
# Pacakges to be ignored
import no_mp
from ns import nomp
import no_mp_sub
found = set(morepath_packages())
assert {base, entrypoint, real, real2, sub, under_score} <= found
assert {no_mp, nomp, no_mp_sub}.isdisjoint(found)
def test_load_distribution():
Distribution = namedtuple('Distribution', ['project_name'])
assert import_package(Distribution('base')).m.App is App
with pytest.raises(morepath.error.AutoImportError):
import_package(Distribution('inexistant-package'))
def invoke(callable):
"Add one frame to stack, no other purpose."
return callable()
def test_caller_module():
import sys
assert caller_module(1) == sys.modules[__name__]
assert invoke(caller_module) == sys.modules[__name__]
def test_caller_package():
import sys
assert caller_package(1) == sys.modules[__package__]
assert invoke(caller_package) == sys.modules[__package__]
def test_autoscan(monkeypatch):
import sys
for k in 'base.m', 'entrypoint.app', 'under_score.m':
monkeypatch.delitem(sys.modules, k, raising=False)
autoscan()
assert 'base.m' in sys.modules
assert 'entrypoint.app' in sys.modules
assert 'under_score.m' in sys.modules
|
bsd-3-clause
|
Python
|
e1184f70abd477ae2d0c304321231c908c67882b
|
add comment to authorize() that uname and pw are saved in plain text
|
MSLNZ/msl-package-manager
|
msl/package_manager/authorize.py
|
msl/package_manager/authorize.py
|
"""
Create the GitHub authorization file.
"""
import getpass
from .utils import log, get_username, _get_input, _GITHUB_AUTH_PATH
WARNING_MESSAGE = """
Your username and password are saved in plain text in the file that
is created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
"""
def authorize(username=None, password=None):
"""
Create the GitHub authorization file.
When requesting information about the MSL repositories_ that are
available on GitHub there is a limit to how often you can send
requests to the GitHub API. If you have a GitHub account and
include your username and password with each request then this
limit is increased.
.. important::
Calling this function will create a file that contains your GitHub
username and password so that GitHub requests are authorized. Your
username and password are saved in plain text in the file that is
created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
.. versionadded:: 2.3.0
.. _repositories: https://github.com/MSLNZ
Parameters
----------
username : :class:`str`, optional
The GitHub username. If :data:`None` then you will be
asked for the `username`.
password : :class:`str`, optional
The GitHub password. If :data:`None` then you will be
asked for the `password`.
"""
if username is None:
default = get_username()
try:
username = _get_input('Enter your GitHub username [default: {}]: '.format(default))
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
else:
if not username:
username = default
if password is None:
try:
password = getpass.getpass('Enter your GitHub password: ')
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
if not username:
log.warning('You must enter a username. Did not create GitHub authorization file.')
return
if not password:
log.warning('You must enter a password. Did not create GitHub authorization file.')
return
with open(_GITHUB_AUTH_PATH, 'w') as fp:
fp.write(username + ':' + password)
log.warning(WARNING_MESSAGE)
log.info('GitHub credentials saved to ' + _GITHUB_AUTH_PATH)
|
"""
Create the GitHub authorization file.
"""
import getpass
from .utils import log, get_username, _get_input, _GITHUB_AUTH_PATH
WARNING_MESSAGE = """
Your username and password are saved in plain text in the file that
is created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
"""
def authorize(username=None, password=None):
"""
Create the GitHub authorization file.
When requesting information about the MSL repositories_ that are
available on GitHub there is a limit to how often you can send
requests to the GitHub API. If you have a GitHub account and
include your username and password with each request then this
limit is increased.
Calling this function will create a file that contains your GitHub
username and password so that GitHub requests are authorized.
.. versionadded:: 2.3.0
.. _repositories: https://github.com/MSLNZ
Parameters
----------
username : :class:`str`, optional
The GitHub username. If :data:`None` then you will be
asked for the `username`.
password : :class:`str`, optional
The GitHub password. If :data:`None` then you will be
asked for the `password`.
"""
if username is None:
default = get_username()
try:
username = _get_input('Enter your GitHub username [default: {}]: '.format(default))
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
else:
if not username:
username = default
if password is None:
try:
password = getpass.getpass('Enter your GitHub password: ')
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
if not username:
log.warning('You must enter a username. Did not create GitHub authorization file.')
return
if not password:
log.warning('You must enter a password. Did not create GitHub authorization file.')
return
with open(_GITHUB_AUTH_PATH, 'w') as fp:
fp.write(username + ':' + password)
log.warning(WARNING_MESSAGE)
log.info('GitHub credentials saved to ' + _GITHUB_AUTH_PATH)
|
mit
|
Python
|
47b97cf311c36b993b59235dedc06993a6d58b6f
|
make TestVecSim subclass object
|
12yujim/pymtl,cfelton/pymtl,tj93/pymtl,jck/pymtl,tj93/pymtl,12yujim/pymtl,tj93/pymtl,jjffryan/pymtl,jck/pymtl,cornell-brg/pymtl,cornell-brg/pymtl,Glyfina-Fernando/pymtl,jck/pymtl,jjffryan/pymtl,jjffryan/pymtl,Glyfina-Fernando/pymtl,12yujim/pymtl,cornell-brg/pymtl,Glyfina-Fernando/pymtl,cfelton/pymtl,cfelton/pymtl
|
new_pmlib/TestVectorSimulator.py
|
new_pmlib/TestVectorSimulator.py
|
#=========================================================================
# TestVectorSimulator
#=========================================================================
# This class simplifies creating unit tests which simply set certain
# inputs and then check certain outputs every cycle. A user simply needs
# to instantiate and elaborate the model, create a list of test vectors,
# and create two helper functions (one to set the model inputs from the
# test vector and one to verify the model outputs against the test
# vector).
#
# Each test vector should be a list of values, so a collection of test
# vectors is just a list of lists. Each test vector specifies the
# inputs/outputs corresponding to a specific cycle in sequence.
#
from new_pymtl import *
class TestVectorSimulator( object ):
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( self, model, test_vectors,
set_inputs_func, verify_outputs_func, wait_cycles = 0 ):
self.model = model
self.set_inputs_func = set_inputs_func
self.verify_outputs_func = verify_outputs_func
self.test_vectors = test_vectors
self.vcd_file_name = None
self.wait_cycles = wait_cycles
#-----------------------------------------------------------------------
# Dump VCD
#-----------------------------------------------------------------------
def dump_vcd( self, vcd_file_name ):
self.vcd_file_name = vcd_file_name
#-----------------------------------------------------------------------
# Run test
#-----------------------------------------------------------------------
def run_test( self, ):
# Create a simulator using the simulation tool
sim = SimulationTool( self.model )
# Dump vcd
if self.vcd_file_name != None:
sim.dump_vcd( self.vcd_file_name )
# Iterate setting the inputs and verifying the outputs each cycle
print ""
sim.reset()
for test_vector in self.test_vectors:
# Set inputs
self.set_inputs_func( self.model, test_vector )
# Evaluate combinational concurrent blocks in simulator
if self.wait_cycles == 0: sim.eval_combinational()
else:
for i in xrange(self.wait_cycles):
sim.cycle()
# Print the line trace
sim.print_line_trace()
# Verify outputs
self.verify_outputs_func( self.model, test_vector )
# Tick the simulator one cycle
sim.cycle()
# Add a couple extra ticks so that the VCD dump is nicer
sim.cycle()
sim.cycle()
sim.cycle()
|
#=========================================================================
# TestVectorSimulator
#=========================================================================
# This class simplifies creating unit tests which simply set certain
# inputs and then check certain outputs every cycle. A user simply needs
# to instantiate and elaborate the model, create a list of test vectors,
# and create two helper functions (one to set the model inputs from the
# test vector and one to verify the model outputs against the test
# vector).
#
# Each test vector should be a list of values, so a collection of test
# vectors is just a list of lists. Each test vector specifies the
# inputs/outputs corresponding to a specific cycle in sequence.
#
from new_pymtl import *
class TestVectorSimulator:
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( self, model, test_vectors,
set_inputs_func, verify_outputs_func, wait_cycles = 0 ):
self.model = model
self.set_inputs_func = set_inputs_func
self.verify_outputs_func = verify_outputs_func
self.test_vectors = test_vectors
self.vcd_file_name = None
self.wait_cycles = wait_cycles
#-----------------------------------------------------------------------
# Dump VCD
#-----------------------------------------------------------------------
def dump_vcd( self, vcd_file_name ):
self.vcd_file_name = vcd_file_name
#-----------------------------------------------------------------------
# Run test
#-----------------------------------------------------------------------
def run_test( self, ):
# Create a simulator using the simulation tool
sim = SimulationTool( self.model )
# Dump vcd
if self.vcd_file_name != None:
sim.dump_vcd( self.vcd_file_name )
# Iterate setting the inputs and verifying the outputs each cycle
print ""
sim.reset()
for test_vector in self.test_vectors:
# Set inputs
self.set_inputs_func( self.model, test_vector )
# Evaluate combinational concurrent blocks in simulator
if self.wait_cycles == 0: sim.eval_combinational()
else:
for i in xrange(self.wait_cycles):
sim.cycle()
# Print the line trace
sim.print_line_trace()
# Verify outputs
self.verify_outputs_func( self.model, test_vector )
# Tick the simulator one cycle
sim.cycle()
# Add a couple extra ticks so that the VCD dump is nicer
sim.cycle()
sim.cycle()
sim.cycle()
|
bsd-3-clause
|
Python
|
0cd4862062bbe19aec5bb2a23563e03eb8ca8cb7
|
Fix stable release script
|
Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary
|
make_stable_release.py
|
make_stable_release.py
|
from robot.libdoc import libdoc
from src.WhiteLibrary.version import VERSION
import git
import sys
VERSION_FILE = './src/WhiteLibrary/version.py'
def change_stable(from_stable, to_stable):
with open(VERSION_FILE, 'r') as file:
filedata = file.read()
filedata = filedata.replace('STABLE = {0}'.format(from_stable), 'STABLE = {0}'.format(to_stable))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
def change_version_number(ver):
with open(VERSION_FILE, 'r') as file:
filedata = file.read()
filedata = filedata.replace('VERSION = "{0}"'.format(VERSION), 'VERSION = "{0}"'.format(ver))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
repo = git.Repo('.')
change_stable("False", "True")
new_version = sys.argv[1]
change_version_number(new_version)
libdoc("./src/WhiteLibrary", "./docs/keywords.html", version=new_version)
ver = "v{}".format(new_version)
repo.git.add(VERSION_FILE)
repo.git.add('./docs/keywords.html')
repo.git.commit(m='Making stable release: {0}'.format(ver))
tag = repo.create_tag(ver, message='New stable version: "{0}"'.format(ver))
repo.remotes.origin.push(tag)
repo.git.push()
change_stable("True", "False")
repo.git.add(VERSION_FILE)
repo.git.commit(m='Back to unstable release')
repo.git.push()
|
from robot.libdoc import libdoc
from src.WhiteLibrary.version import VERSION
import git
import sys
VERSION_FILE = './src/WhiteLibrary/version.py'
def change_stable(from_stable, to_stable):
with open(VERSION_FILE, 'r') as file :
filedata = file.read()
filedata = filedata.replace('STABLE = {0}'.format(from_stable), 'STABLE = {0}'.format(to_stable)
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
def change_version_number(ver):
with open(VERSION_FILE, 'r') as file :
filedata = file.read()
filedata = filedata.replace('VERSION = "{0}"'.format(VERSION), 'VERSION = "{0}"'.format(ver))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
repo = git.Repo( '.' )
change_stable("False", "True")
print(str(sys.argv))
change_version_number(sys.argv[1])
libdoc("./src/WhiteLibrary", "./docs/keywords.html", version=VERSION)
ver = "v".format(VERSION)
repo.git.add(VERSION_FILE)
repo.git.add('./docs/keywords.html')
repo.git.commit( m='Making stable release: {0}'.format(ver) )
tag = repo.git.create_tag(ver, message='New stable version: "{0}"'.format(ver))
repo.git.push(tag)
change_stable("True", "False")
repo.git.add(VERSION_FILE)
repo.git.commit( m='Back to unstable release' )
repo.git.push()
|
apache-2.0
|
Python
|
58bab9291c85edc3f13d3dc0659eff3c17201eb1
|
Improve pixelcnn namings and comments
|
israelg99/eva
|
eva/models/pixelcnn.py
|
eva/models/pixelcnn.py
|
from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense, Reshape, Lambda
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from keras.optimizers import Nadam
import keras.backend.tensorflow_backend as K
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, build=True):
width, height, channels = input_shape
# TODO: Make it scalable to any amount of channels.
input_map = Input(shape=input_shape, name='input_map')
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same', name='masked2d_A')(input_map)
model = ResidualBlockList(filters, blocks)(model)
model = PReLU()(model)
model = MaskedConvolution2D(filters, 1, 1)(model)
model = PReLU()(model)
model = MaskedConvolution2D(3*256, 1, 1, name='channels_mult_palette')(model)
model = Reshape((input_shape[0], input_shape[1], 256, input_shape[2]), name='palette_channels')(model)
# TODO: Make it scalable to any amount of channels.
red = Lambda(lambda x: x[:, :, :, :, 0], name='red_extract')(model)
red = Reshape((input_shape[0] * input_shape[1], 256), name='hw_red-palette')(red)
red = Activation('softmax', name='red')(red)
green = Lambda(lambda x: x[:, :, :, :, 1], name='green_extract')(model)
green = Reshape((input_shape[0] * input_shape[1], 256), name='hw_green-palette')(green)
green = Activation('softmax', name='green')(green)
blue = Lambda(lambda x: x[:, :, :, :, 2], name='blue_extract')(model)
blue = Reshape((input_shape[0] * input_shape[1], 256), name='hw_blue-palette')(blue)
blue = Activation('softmax', name='blue')(blue)
# TODO: Make is scalable to any amount of channels.
if build:
model = Model(input=input_map, output=[red, green, blue])
model.compile(optimizer=Nadam(),
loss={ 'red': 'sparse_categorical_crossentropy',
'green': 'sparse_categorical_crossentropy',
'blue': 'sparse_categorical_crossentropy'})
return model
|
from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense, Reshape, Lambda
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from keras.optimizers import Nadam
import keras.backend.tensorflow_backend as K
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, build=True):
width, height, channels = input_shape
input_map = Input(shape=input_shape, name='input_map')
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same')(input_map)
model = ResidualBlockList(model, filters, blocks)
model = PReLU()(model)
model = MaskedConvolution2D(filters, 1, 1)(model)
model = PReLU()(model)
model = MaskedConvolution2D(3*256, 1, 1)(model)
# TODO: Make it scalable to any amount of channels.
model = Reshape((input_shape[0], input_shape[1], 256, input_shape[2]))(model)
# TODO: Make it scalable to any amount of channels.
red = Lambda(lambda x: x[:, :, :, :, 0])(model)
red = Reshape((input_shape[0] * input_shape[1], 256))(red)
red = Activation('softmax', name='red')(red)
green = Lambda(lambda x: x[:, :, :, :, 1])(model)
green = Reshape((input_shape[0] * input_shape[1], 256))(green)
green = Activation('softmax', name='green')(green)
blue = Lambda(lambda x: x[:, :, :, :, 2])(model)
blue = Reshape((input_shape[0] * input_shape[1], 256))(blue)
blue = Activation('softmax', name='blue')(blue)
# TODO: Make is scalable to any amount of channels.
if build:
model = Model(input=input_map, output=[red, green, blue])
model.compile(optimizer=Nadam(),
loss={ 'red': 'sparse_categorical_crossentropy',
'green': 'sparse_categorical_crossentropy',
'blue': 'sparse_categorical_crossentropy'})
return model
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.