commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
db22f7a508524409f5e03fdbcbf6a394670ebbde
|
Use built-in auth views
|
GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,GNOME/extensions-web,magcius/sweettooth
|
sweettooth/auth/urls.py
|
sweettooth/auth/urls.py
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'login/$', 'django.contrib.auth.views.login',
dict(template_name='login.html'), name='login'),
url(r'logout/$', 'django.contrib.auth.views.logout',
dict(template_name='logout.html'), name='logout'),
url(r'register/$', 'auth.views.register', name='register'),
)
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'login/$', 'django.contrib.auth.views.login', dict(template_name='login.html'), name='login'),
url(r'logout/$', 'django.contrib.auth.views.logout', name='logout'),
url(r'register/$', 'auth.views.register', name='register'),
)
|
agpl-3.0
|
Python
|
32fba62d157953eaeea6e5885a7ea860632a1945
|
rename filter function and set the second parameter as required
|
adnedelcu/SyncSettings,mfuentesg/SyncSettings
|
sync_settings/helper.py
|
sync_settings/helper.py
|
# -*- coding: utf-8 -*-
import os, re
from urllib import parse
def getDifference (setA, setB):
return list(filter(lambda el: el not in setB, setA))
def getHomePath (fl = ""):
if isinstance(fl, str) and fl != "":
return joinPath((os.path.expanduser('~'), fl))
return os.path.expanduser('~')
def existsPath(path, isFolder = False):
opath = os.path
if isinstance(path, str) and path != "" and opath.exists(path):
if (isFolder and opath.isdir(path)): return True
if (not isFolder and opath.isfile(path)): return True
return False
def joinPath (pathTuple):
if isinstance(pathTuple, tuple) and len(pathTuple) > 1:
return os.path.join(*pathTuple)
return None
def getFiles (path):
if existsPath(path, True):
f = []
for root, dirs, files in os.walk(path):
f.extend([joinPath((root, file)) for file in files])
return f
return []
def excludeFilesByPatterns (elements, patterns):
isValidElements = isinstance(elements, list) and len(elements) > 0
isValidPattern = isinstance(patterns, list) and len(patterns) > 0
results = []
if isValidElements and isValidPattern:
for element in elements:
for pattern in patterns:
extension = '.' + element.split(os.extsep)[-1]
filename = os.path.basename(element)
if element.startswith(pattern) and existsPath(pattern, True) and existsPath(joinPath((pattern, filename))):
results.append(element)
elif (extension == pattern or element == pattern) and existsPath(element):
results.append(element)
return getDifference(elements, results)
return elements
def encodePath(path):
if isinstance(path, str) and len(path) > 0:
return parse.quote(path)
return None
def decodePath(path):
if isinstance(path, str) and len(path) > 0:
return parse.unquote(path)
return None
|
# -*- coding: utf-8 -*-
import os, re
from urllib import parse
def getDifference (setA, setB):
return list(filter(lambda el: el not in setB, setA))
def getHomePath (fl = ""):
if isinstance(fl, str) and fl != "":
return joinPath((os.path.expanduser('~'), fl))
return os.path.expanduser('~')
def existsPath(path, isFolder = False):
opath = os.path
if isinstance(path, str) and path != "" and opath.exists(path):
if (isFolder and opath.isdir(path)): return True
if (not isFolder and opath.isfile(path)): return True
return False
def joinPath (pathTuple):
if isinstance(pathTuple, tuple) and len(pathTuple) > 1:
return os.path.join(*pathTuple)
return None
def getFiles (path):
if existsPath(path, True):
f = []
for root, dirs, files in os.walk(path):
f.extend([joinPath((root, file)) for file in files])
return f
return []
def excludeByPatterns (elements, patterns = []):
isValidElements = isinstance(elements, list) and len(elements) > 0
isValidPattern = isinstance(patterns, list) and len(patterns) > 0
results = []
if isValidElements and isValidPattern:
for element in elements:
for pattern in patterns:
extension = '.' + element.split(os.extsep)[-1]
filename = os.path.basename(element)
if element.startswith(pattern) and existsPath(pattern, True) and existsPath(joinPath((pattern, filename))):
results.append(element)
elif (extension == pattern or element == pattern) and existsPath(element):
results.append(element)
return getDifference(elements, results)
return elements
def encodePath(path):
if isinstance(path, str) and len(path) > 0:
return parse.quote(path)
return None
def decodePath(path):
if isinstance(path, str) and len(path) > 0:
return parse.unquote(path)
return None
|
mit
|
Python
|
c3c703c6d8b434da40beef6202bf2cbdc01e50a1
|
Add configured tests
|
Farama-Foundation/Gymnasium,dianchen96/gym,Farama-Foundation/Gymnasium,dianchen96/gym
|
gym/wrappers/tests/test_wrappers.py
|
gym/wrappers/tests/test_wrappers.py
|
import gym
from gym import error
from gym import wrappers
from gym.wrappers import SkipWrapper
import tempfile
import shutil
def test_skip():
every_two_frame = SkipWrapper(2)
env = gym.make("FrozenLake-v0")
env = every_two_frame(env)
obs = env.reset()
env.render()
def test_configured():
env = gym.make("FrozenLake-v0")
env = wrappers.TimeLimit(env)
env.configure()
# Make sure all layers of wrapping are configured
assert env._configured
assert env.env._configured
env.close()
def test_double_configured():
env = gym.make("FrozenLake-v0")
every_two_frame = SkipWrapper(2)
env = every_two_frame(env)
env = wrappers.TimeLimit(env)
env.configure()
# Make sure all layers of wrapping are configured
assert env._configured
assert env.env._configured
assert env.env.env._configured
env.close()
def test_no_double_wrapping():
temp = tempfile.mkdtemp()
try:
env = gym.make("FrozenLake-v0")
env = wrappers.Monitor(env, temp)
try:
env = wrappers.Monitor(env, temp)
except error.DoubleWrapperError:
pass
else:
assert False, "Should not allow double wrapping"
env.close()
finally:
shutil.rmtree(temp)
|
import gym
from gym import error
from gym import wrappers
from gym.wrappers import SkipWrapper
import tempfile
import shutil
def test_skip():
every_two_frame = SkipWrapper(2)
env = gym.make("FrozenLake-v0")
env = every_two_frame(env)
obs = env.reset()
env.render()
def test_no_double_wrapping():
temp = tempfile.mkdtemp()
try:
env = gym.make("FrozenLake-v0")
env = wrappers.Monitor(env, temp)
try:
env = wrappers.Monitor(env, temp)
except error.DoubleWrapperError:
pass
else:
assert False, "Should not allow double wrapping"
env.close()
finally:
shutil.rmtree(temp)
if __name__ == '__main__':
test_no_double_wrapping()
|
mit
|
Python
|
fa0174185832fac608cc1b65255231a73aac630a
|
fix evacuate call on branched lient
|
lcostantino/healing-os,lcostantino/healing-os
|
healing/handler_plugins/evacuate.py
|
healing/handler_plugins/evacuate.py
|
from healing.handler_plugins import base
from healing import exceptions
from healing.openstack.common import log as logging
from healing import utils
LOG = logging.getLogger(__name__)
class Evacuate(base.HandlerPluginBase):
"""evacuate VM plugin.
Data format in action_meta is:
'evacuate_host': True if evacuating the entire host
"""
DESCRIPTION = "Evacuate VM (shared storage)"
NAME = "evacuate"
def start(self, ctx, data):
""" do something... spawn thread?
:param data ActionData Object
shared_storage?
"""
if not self.can_execute(data):
raise exceptions.ActionInProgress()
self.register_action(data)
try:
client = utils.get_nova_client(ctx)
lista = client.servers.evacuate(server=data.target_resource,
host=None, on_shared_storage=True)
self.current_action.output = "Output: " + str(lista)
except Exception as e:
LOG.exception(e)
self.current_action.output = e.message
self.stop(data, True)
return None
self.stop(data)
return self.current_action.id
def stop(self, data, error=False, message=None):
#this will work if not in thread probably, if we change this
#add the id to the data and context
if error:
self.current_action.error()
else:
self.current_action.stop()
self.current_action.save()
LOG.debug("Task stopped")
def can_execute(self, data, ctx=None):
"""
:param data ActionData Obj
move to parent?
"""
return super(Evacuate, self).can_execute(data, ctx=ctx)
|
from healing.handler_plugins import base
from healing import exceptions
from healing.openstack.common import log as logging
from healing import utils
LOG = logging.getLogger(__name__)
class Evacuate(base.HandlerPluginBase):
"""evacuate VM plugin.
Data format in action_meta is:
'evacuate_host': True if evacuating the entire host
"""
DESCRIPTION = "Evacuate VM (shared storage)"
NAME = "evacuate"
def start(self, ctx, data):
""" do something... spawn thread?
:param data ActionData Object
shared_storage?
"""
if not self.can_execute(data):
raise exceptions.ActionInProgress()
self.register_action(data)
try:
client = utils.get_nova_client(ctx)
lista = client.servers.evacuate(data.target_resource,
on_shared_storage=True,
find_host=True)
self.current_action.output = "Output: " + str(lista)
except Exception as e:
LOG.exception(e)
self.current_action.output = e.message
self.stop(data, True)
return None
self.stop(data)
return self.current_action.id
def stop(self, data, error=False, message=None):
#this will work if not in thread probably, if we change this
#add the id to the data and context
if error:
self.current_action.error()
else:
self.current_action.stop()
self.current_action.save()
LOG.debug("Task stopped")
def can_execute(self, data, ctx=None):
"""
:param data ActionData Obj
move to parent?
"""
return super(Evacuate, self).can_execute(data, ctx=ctx)
|
apache-2.0
|
Python
|
5336ff3967f4e297237045ca0914ae5257e3a767
|
fix csv output in one autoplot
|
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
|
htdocs/plotting/auto/scripts/p92.py
|
htdocs/plotting/auto/scripts/p92.py
|
import psycopg2.extras
import pyiem.nws.vtec as vtec
import datetime
import pandas as pd
def get_description():
""" Return a dict describing how to call this plotter """
d = dict()
d['data'] = True
d['cache'] = 3600
d['description'] = """This map depicts the number of days since a
Weather Forecast Office has issued a given VTEC product."""
d['arguments'] = [
dict(type='phenomena', name='phenomena',
default='TO', label='Select Watch/Warning Phenomena Type:'),
dict(type='significance', name='significance',
default='W', label='Select Watch/Warning Significance Level:'),
]
return d
def plotter(fdict):
""" Go """
import matplotlib
matplotlib.use('agg')
from pyiem.plot import MapPlot
utc = datetime.datetime.utcnow()
bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
pgconn = psycopg2.connect(database='postgis', host='iemdb', user='nobody')
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
phenomena = fdict.get('phenomena', 'TO')
significance = fdict.get('significance', 'W')
cursor.execute("""
select wfo, extract(days from ('TODAY'::date - max(issue))) as m
from warnings where significance = %s and phenomena = %s
GROUP by wfo ORDER by m ASC
""", (significance, phenomena))
data = {}
rows = []
for row in cursor:
wfo = row[0] if row[0] != 'JSJ' else 'SJU'
rows.append(dict(wfo=wfo, days=row[1]))
data[wfo] = max([row[1], 0])
df = pd.DataFrame(rows)
df.set_index('wfo', inplace=True)
m = MapPlot(sector='nws', axisbg='white', nocaption=True,
title='Days since Last %s %s by NWS Office' % (
vtec._phenDict.get(phenomena, phenomena),
vtec._sigDict.get(significance, significance)),
subtitle='Valid %s' % (utc.strftime("%d %b %Y %H%M UTC"),))
m.fill_cwas(data, bins=bins, ilabel=True, units='Days',
lblformat='%.0f')
return m.fig, df
|
import psycopg2.extras
import pyiem.nws.vtec as vtec
import datetime
import pandas as pd
def get_description():
""" Return a dict describing how to call this plotter """
d = dict()
d['data'] = True
d['cache'] = 3600
d['description'] = """This map depicts the number of days since a
Weather Forecast Office has issued a given VTEC product."""
d['arguments'] = [
dict(type='phenomena', name='phenomena',
default='TO', label='Select Watch/Warning Phenomena Type:'),
dict(type='significance', name='significance',
default='W', label='Select Watch/Warning Significance Level:'),
]
return d
def plotter(fdict):
""" Go """
import matplotlib
matplotlib.use('agg')
from pyiem.plot import MapPlot
utc = datetime.datetime.utcnow()
bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800]
pgconn = psycopg2.connect(database='postgis', host='iemdb', user='nobody')
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
phenomena = fdict.get('phenomena', 'TO')
significance = fdict.get('significance', 'W')
cursor.execute("""
select wfo, extract(days from ('TODAY'::date - max(issue))) as m
from warnings where significance = %s and phenomena = %s
GROUP by wfo ORDER by m ASC
""", (significance, phenomena))
data = {}
rows = []
for row in cursor:
wfo = row[0] if row[0] != 'JSJ' else 'SJU'
rows.append(dict(wfo=wfo, days=row[1]))
data[wfo] = max([row[1], 0])
df = pd.DataFrame(rows)
m = MapPlot(sector='nws', axisbg='white', nocaption=True,
title='Days since Last %s %s by NWS Office' % (
vtec._phenDict.get(phenomena, phenomena),
vtec._sigDict.get(significance, significance)),
subtitle='Valid %s' % (utc.strftime("%d %b %Y %H%M UTC"),))
m.fill_cwas(data, bins=bins, ilabel=True, units='Days',
lblformat='%.0f')
return m.fig, df
|
mit
|
Python
|
a8d639cbac2439c0079b86b72dd3daee6505e9d0
|
Update version file
|
noxdafox/clipspy,noxdafox/clipspy
|
version.py
|
version.py
|
"""Versioning controlled via Git Tag, check setup.py"""
__version__ = "0.3.3"
|
"""Versioning controlled via Git Tag, check setup.py"""
__version__ = "0.3.2"
|
bsd-3-clause
|
Python
|
137b20e4aa779be3c97c500ab485126085492ce5
|
comment format
|
azatoth/pywikipedia
|
pywikibot/families/scratchpad_wikia_family.py
|
pywikibot/families/scratchpad_wikia_family.py
|
# -*- coding: utf-8 -*-
from pywikibot import family
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'scratchpad_wikia'
self.langs = {
'de':'de.mini.wikia.com',
'en':'scratchpad.wikia.com',
'fr':'bloc-notes.wikia.com',
'ja':'ja.scratchpad.wikia.com',
'zh':'zh.scratchpad.wikia.com',
}
# A few selected big languages for things that we do not want to loop
# over all languages. This is only needed by the titletranslate.py
# module, so if you carefully avoid the options, you could get away
# without these for another wikimedia family.
self.languages_by_size = ['en','de']
def version(self, code):
return "1.14.0"
def scriptpath(self, code):
return ''
|
# -*- coding: utf-8 -*-
from pywikibot import family
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'scratchpad_wikia'
self.langs = {
'de':'de.mini.wikia.com',
'en':'scratchpad.wikia.com',
'fr':'bloc-notes.wikia.com',
'ja':'ja.scratchpad.wikia.com',
'zh':'zh.scratchpad.wikia.com',
}
# A few selected big languages for things that we do not want
# to loop over all languages. This is only needed by the
# titletranslate.py module, so if you carefully avoid the
# options, you could get away without these for another
# wikimedia family.
self.languages_by_size = ['en','de']
def version(self, code):
return "1.14.0"
def scriptpath(self, code):
return ''
|
mit
|
Python
|
c898b68fa8d81963b7a5282e67ecb28764bbd0a3
|
Add comment explaining mocking
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
tests/app/models/test_contact_list.py
|
tests/app/models/test_contact_list.py
|
from datetime import datetime
from app.models.contact_list import ContactList
from app.models.job import PaginatedJobs
def test_created_at():
created_at = ContactList({'created_at': '2016-05-06T07:08:09.061258'}).created_at
assert isinstance(created_at, datetime)
assert created_at.isoformat() == '2016-05-06T08:08:09.061258+01:00'
def test_get_jobs(mock_get_jobs):
contact_list = ContactList({'id': 'a', 'service_id': 'b'})
assert isinstance(contact_list.get_jobs(page=123), PaginatedJobs)
# mock_get_jobs mocks the underlying API client method, not
# contact_list.get_jobs
mock_get_jobs.assert_called_once_with(
'b',
contact_list_id='a',
statuses={
'finished',
'sending limits exceeded',
'ready to send',
'scheduled',
'sent to dvla',
'pending',
'in progress',
},
page=123,
)
|
from datetime import datetime
from app.models.contact_list import ContactList
from app.models.job import PaginatedJobs
def test_created_at():
created_at = ContactList({'created_at': '2016-05-06T07:08:09.061258'}).created_at
assert isinstance(created_at, datetime)
assert created_at.isoformat() == '2016-05-06T08:08:09.061258+01:00'
def test_get_jobs(mock_get_jobs):
contact_list = ContactList({'id': 'a', 'service_id': 'b'})
assert isinstance(contact_list.get_jobs(page=123), PaginatedJobs)
mock_get_jobs.assert_called_once_with(
'b',
contact_list_id='a',
statuses={
'finished',
'sending limits exceeded',
'ready to send',
'scheduled',
'sent to dvla',
'pending',
'in progress',
},
page=123,
)
|
mit
|
Python
|
39c34860fa9992f38892aa026c5b0c6547bd4b23
|
Fix flaky evergreen test
|
theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs
|
tests/content/test_content_manager.py
|
tests/content/test_content_manager.py
|
from django.test import override_settings
from django.utils import timezone
from bulbs.campaigns.models import Campaign
from bulbs.content.models import Content
from bulbs.utils.test import make_content, BaseIndexableTestCase
from example.testcontent.models import TestContentObj, TestContentObjTwo
class ContentManagerTestCase(BaseIndexableTestCase):
def setUp(self):
super(ContentManagerTestCase, self).setUp()
campaign = Campaign.objects.create(
sponsor_name="TheCobbler",
start_date=timezone.now() - timezone.timedelta(days=5),
end_date=timezone.now() + timezone.timedelta(days=5)
)
make_content(TestReadingListObj, evergreen=True, published=timezone.now(), _quantity=50)
make_content(TestContentObj, campaign=campaign, published=timezone.now(), _quantity=50)
Content.search_objects.refresh()
def test_sponsored(self):
sponsored = Content.search_objects.sponsored().extra(from_=0, size=50)
qs = TestContentObj.objects.filter(campaign__isnull=False)
self.assertEqual(qs.count(), sponsored.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in sponsored])
)
def test_evergreen(self):
evergreen = Content.search_objects.evergreen().extra(from_=0, size=50)
qs = Content.objects.filter(evergreen=True)
self.assertEqual(qs.count(), evergreen.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in evergreen])
)
@override_settings(VIDEO_DOC_TYPE=TestContentObjTwo.search_objects.mapping.doc_type)
def test_evergreen_video(self):
make_content(TestContentObjTwo, evergreen=True, published=self.now, _quantity=12)
make_content(TestContentObjTwo, published=self.now, _quantity=12)
Content.search_objects.refresh()
evergreen = Content.search_objects.evergreen_video().extra(from_=0, size=50)
qs = TestContentObjTwo.objects.filter(evergreen=True)
self.assertEqual(12, evergreen.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in evergreen])
)
|
from django.test import override_settings
from django.utils import timezone
from bulbs.campaigns.models import Campaign
from bulbs.content.models import Content
from bulbs.utils.test import make_content, BaseIndexableTestCase
from example.testcontent.models import TestContentObj, TestContentObjTwo
class ContentManagerTestCase(BaseIndexableTestCase):
def setUp(self):
super(ContentManagerTestCase, self).setUp()
campaign = Campaign.objects.create(
sponsor_name="TheCobbler",
start_date=timezone.now() - timezone.timedelta(days=5),
end_date=timezone.now() + timezone.timedelta(days=5)
)
make_content(evergreen=True, published=timezone.now(), _quantity=50)
make_content(TestContentObj, campaign=campaign, published=timezone.now(), _quantity=50)
Content.search_objects.refresh()
def test_sponsored(self):
sponsored = Content.search_objects.sponsored().extra(from_=0, size=50)
qs = TestContentObj.objects.filter(campaign__isnull=False)
self.assertEqual(qs.count(), sponsored.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in sponsored])
)
def test_evergreen(self):
evergreen = Content.search_objects.evergreen().extra(from_=0, size=50)
qs = Content.objects.filter(evergreen=True)
self.assertEqual(qs.count(), evergreen.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in evergreen])
)
@override_settings(VIDEO_DOC_TYPE=TestContentObjTwo.search_objects.mapping.doc_type)
def test_evergreen_video(self):
make_content(TestContentObjTwo, evergreen=True, published=self.now, _quantity=12)
make_content(TestContentObjTwo, published=self.now, _quantity=12)
Content.search_objects.refresh()
evergreen = Content.search_objects.evergreen_video().extra(from_=0, size=50)
qs = TestContentObjTwo.objects.filter(evergreen=True)
self.assertEqual(12, evergreen.count())
self.assertEqual(
sorted([obj.id for obj in qs]),
sorted([obj.id for obj in evergreen])
)
|
mit
|
Python
|
8c8bc1ef8e3ba7519d4612856a420ed410974e12
|
add redactor on installed apps settings
|
opps/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps
|
opps/core/__init__.py
|
opps/core/__init__.py
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('redactor',)
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
mit
|
Python
|
6a02c5e1844ad7d1b9ae50cd5dbae6975fb685ee
|
Make internal error more clear
|
stefanseefeld/numba,shiquanwang/numba,gdementen/numba,seibert/numba,numba/numba,cpcloud/numba,shiquanwang/numba,gdementen/numba,gdementen/numba,cpcloud/numba,sklam/numba,numba/numba,jriehl/numba,IntelLabs/numba,seibert/numba,ssarangi/numba,GaZ3ll3/numba,pombredanne/numba,pitrou/numba,gdementen/numba,pombredanne/numba,stonebig/numba,pitrou/numba,seibert/numba,gmarkall/numba,jriehl/numba,stonebig/numba,GaZ3ll3/numba,sklam/numba,stuartarchibald/numba,stefanseefeld/numba,pitrou/numba,stuartarchibald/numba,gmarkall/numba,numba/numba,sklam/numba,IntelLabs/numba,shiquanwang/numba,pitrou/numba,gdementen/numba,ssarangi/numba,ssarangi/numba,pitrou/numba,stonebig/numba,cpcloud/numba,GaZ3ll3/numba,sklam/numba,numba/numba,seibert/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,IntelLabs/numba,stonebig/numba,stuartarchibald/numba,cpcloud/numba,pombredanne/numba,gmarkall/numba,jriehl/numba,ssarangi/numba,jriehl/numba,gmarkall/numba,cpcloud/numba,sklam/numba,pombredanne/numba,stefanseefeld/numba,pombredanne/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,stefanseefeld/numba,seibert/numba,IntelLabs/numba,GaZ3ll3/numba,stefanseefeld/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba
|
numba/error.py
|
numba/error.py
|
import traceback
def format_pos(node):
if node is not None and hasattr(node, 'lineno'):
return "%s:%s: " % (node.lineno, node.col_offset)
else:
return ""
class NumbaError(Exception):
"Some error happened during compilation"
def __init__(self, node, msg=None, *args):
if msg is None:
node, msg = None, node
self.node = node
self.msg = msg
self.args = args
def __str__(self):
try:
pos = format_pos(self.node)
msg = "%s%s %s" % (pos, self.msg, " ".join(map(str, self.args)))
return msg.rstrip()
except:
traceback.print_exc()
return "<internal error creating numba error message>"
class InternalError(NumbaError):
"Indicates a compiler bug"
class _UnknownAttribute(Exception):
pass
|
import traceback
def format_pos(node):
if node is not None and hasattr(node, 'lineno'):
return "%s:%s: " % (node.lineno, node.col_offset)
else:
return ""
class NumbaError(Exception):
"Some error happened during compilation"
def __init__(self, node, msg=None, *args):
if msg is None:
node, msg = None, node
self.node = node
self.msg = msg
self.args = args
def __str__(self):
try:
pos = format_pos(self.node)
msg = "%s%s %s" % (pos, self.msg, " ".join(map(str, self.args)))
return msg.rstrip()
except:
traceback.print_exc()
return ""
class InternalError(NumbaError):
"Indicates a compiler bug"
class _UnknownAttribute(Exception):
pass
|
bsd-2-clause
|
Python
|
429bf52eb482955cfe195708898ce275e1a72dcb
|
Validate input.
|
devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django
|
src/devilry_qualifiesforexam/devilry_qualifiesforexam/rest/preview.py
|
src/devilry_qualifiesforexam/devilry_qualifiesforexam/rest/preview.py
|
from djangorestframework.views import View
from djangorestframework.permissions import IsAuthenticated
from djangorestframework.response import ErrorResponse
from djangorestframework import status as statuscodes
from django.shortcuts import get_object_or_404
from devilry_qualifiesforexam.pluginhelpers import create_sessionkey
from devilry.apps.core.models import Period
from devilry.utils.groups_groupedby_relatedstudent_and_assignment import GroupsGroupedByRelatedStudentAndAssignment
from devilry_subjectadmin.rest.auth import IsPeriodAdmin
class Preview(View):
"""
Generate the data required to provide a preview for the qualified for exam wizard.
# GET
## Parameters
The following parameters are required:
- ``periodid``: The ID of the period. Supplied as the last part of the URL-path.
404 is returned unless the user is admin on this period.
- ``pluginsessionid``: Forwarded from the first page of the wizard. It is an ID
used to lookup the output from the plugin.
## Returns
An object/dict with the following attributes:
- ``pluginoutput``: The serialized output from the plugin.
- ``perioddata``: All results for all students on the period.
"""
permissions = (IsAuthenticated, IsPeriodAdmin)
def get(self, request, id):
pluginsessionid = self.request.GET.get('pluginsessionid', None)
if not pluginsessionid:
raise ErrorResponse(statuscodes.HTTP_400_BAD_REQUEST,
{'detail': '``pluginsessionid`` is a required parameter'})
period = get_object_or_404(Period, pk=id)
previewdata = self.request.session[create_sessionkey(pluginsessionid)]
grouper = GroupsGroupedByRelatedStudentAndAssignment(period)
return {
'perioddata': grouper.serialize(),
'pluginoutput': previewdata.serialize()
}
|
from djangorestframework.views import View
from djangorestframework.permissions import IsAuthenticated
from django.shortcuts import get_object_or_404
from devilry_qualifiesforexam.pluginhelpers import create_sessionkey
from devilry.apps.core.models import Period
from devilry.utils.groups_groupedby_relatedstudent_and_assignment import GroupsGroupedByRelatedStudentAndAssignment
from devilry_subjectadmin.rest.auth import IsPeriodAdmin
class Preview(View):
"""
Generate the data required to provide a preview for the qualified for exam wizard.
# GET
## Parameters
The following parameters are required:
- ``periodid``: The ID of the period. Supplied as the last part of the URL-path.
404 is returned unless the user is admin on this period.
- ``pluginsessionid``: Forwarded from the first page of the wizard. It is an ID
used to lookup the output from the plugin.
## Returns
An object/dict with the following attributes:
- ``pluginoutput``: The serialized output from the plugin.
- ``perioddata``: All results for all students on the period.
"""
permissions = (IsAuthenticated, IsPeriodAdmin)
def get(self, request, id):
pluginsessionid = self.request.GET['pluginsessionid']
period = get_object_or_404(Period, pk=id)
previewdata = self.request.session[create_sessionkey(pluginsessionid)]
grouper = GroupsGroupedByRelatedStudentAndAssignment(period)
return {
'perioddata': grouper.serialize(),
'pluginoutput': previewdata.serialize()
}
|
bsd-3-clause
|
Python
|
8ab7ad1f6aee485c64a7e1347c76e628cc820ba8
|
add some docker Builder args
|
gopythongo/gopythongo,gopythongo/gopythongo
|
src/py/gopythongo/builders/docker.py
|
src/py/gopythongo/builders/docker.py
|
# -* encoding: utf-8 *-
import argparse
import gopythongo.shared.docker_args
from gopythongo.utils import print_info, highlight
from gopythongo.builders import BaseBuilder
from typing import Any
class DockerBuilder(BaseBuilder):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@property
def builder_name(self) -> str:
return "docker"
def add_args(self, parser: argparse.ArgumentParser) -> None:
gopythongo.shared.docker_args.add_shared_args(parser)
gp_docker = parser.add_argument_group("Docker Builder options")
gp_docker.add_argument("--docker-buildfile", dest="docker_buildfile", default=None,
help="Specify a Dockerfile to build the the build environment. The build commands will "
"then be executed inside the resulting container.")
gp_docker.add_argument("--docker-leave-containers", dest="docker_leave_containers", action="store_true",
default=False, env_var="DOCKER_LEAVE_CONTAINERS",
help="After creating a build environment and a runtime container, if this option is "
"used, GoPythonGo will not use 'docker rm' and 'docker rmi' to clean up the "
"resulting containers.")
def validate_args(self, args: argparse.Namespace) -> None:
gopythongo.shared.docker_args.validate_shared_args(args)
def build(self, args: argparse.Namespace) -> None:
print_info("Building with %s" % highlight("docker"))
builder_class = DockerBuilder
|
# -* encoding: utf-8 *-
import argparse
import gopythongo.shared.docker_args
from gopythongo.utils import print_info, highlight
from gopythongo.builders import BaseBuilder
from typing import Any
class DockerBuilder(BaseBuilder):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@property
def builder_name(self) -> str:
return u"docker"
def add_args(self, parser: argparse.ArgumentParser) -> None:
gopythongo.shared.docker_args.add_shared_args(parser)
def validate_args(self, args: argparse.Namespace) -> None:
gopythongo.shared.docker_args.validate_shared_args(args)
def build(self, args: argparse.Namespace) -> None:
print_info("Building with %s" % highlight("docker"))
builder_class = DockerBuilder
|
mpl-2.0
|
Python
|
6248a0b813fc6598d964639ad696ecd506015918
|
Rename to TaarifaAPI
|
gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa
|
taarifa_api/settings.py
|
taarifa_api/settings.py
|
"""Global API configuration."""
from os import environ
from urlparse import urlparse
from schemas import facility_schema, request_schema, resource_schema, \
service_schema
API_NAME = 'TaarifaAPI'
URL_PREFIX = 'api'
if 'EVE_DEBUG' in environ:
DEBUG = True
if 'MONGOLAB_URI' in environ:
url = urlparse(environ['MONGOLAB_URI'])
MONGO_HOST = url.hostname
MONGO_PORT = url.port
MONGO_USERNAME = url.username
MONGO_PASSWORD = url.password
MONGO_DBNAME = url.path[1:]
else:
MONGO_DBNAME = API_NAME
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
# Enable reads (GET), edits (PATCH) and deletes of individual items
# (defaults to read-only item access).
ITEM_METHODS = ['GET', 'PATCH', 'DELETE']
services = {
"schema": service_schema,
}
requests = {
"schema": request_schema,
"source": "requests",
"key": "service_code",
}
facilities = {
"item_title": "facility",
"schema": facility_schema,
}
resources = {
"schema": resource_schema,
"versioning": True,
"source": "resources",
"key": "facility_code",
}
DOMAIN = {
'services': services,
'requests': requests,
'facilities': facilities,
'resources': resources,
}
# FIXME: Temporarily allow CORS requests for development purposes
X_DOMAINS = "*"
|
"""Global API configuration."""
from os import environ
from urlparse import urlparse
from schemas import facility_schema, request_schema, resource_schema, \
service_schema
API_NAME = 'Taarifa'
URL_PREFIX = 'api'
if 'EVE_DEBUG' in environ:
DEBUG = True
if 'MONGOLAB_URI' in environ:
url = urlparse(environ['MONGOLAB_URI'])
MONGO_HOST = url.hostname
MONGO_PORT = url.port
MONGO_USERNAME = url.username
MONGO_PASSWORD = url.password
MONGO_DBNAME = url.path[1:]
else:
MONGO_DBNAME = API_NAME
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
# Enable reads (GET), edits (PATCH) and deletes of individual items
# (defaults to read-only item access).
ITEM_METHODS = ['GET', 'PATCH', 'DELETE']
services = {
"schema": service_schema,
}
requests = {
"schema": request_schema,
"source": "requests",
"key": "service_code",
}
facilities = {
"item_title": "facility",
"schema": facility_schema,
}
resources = {
"schema": resource_schema,
"versioning": True,
"source": "resources",
"key": "facility_code",
}
DOMAIN = {
'services': services,
'requests': requests,
'facilities': facilities,
'resources': resources,
}
# FIXME: Temporarily allow CORS requests for development purposes
X_DOMAINS = "*"
|
apache-2.0
|
Python
|
059a799b9c347b6abfcd2daa3678d98cd0884210
|
Add "no cover" to teardown() and handle_address_delete() on TiedModelRealtimeSignalProcessor. These are never called.
|
OpenVolunteeringPlatform/django-ovp-search
|
ovp_search/signals.py
|
ovp_search/signals.py
|
from django.db import models
from haystack import signals
from ovp_projects.models import Project
from ovp_organizations.models import Organization
from ovp_core.models import GoogleAddress
class TiedModelRealtimeSignalProcessor(signals.BaseSignalProcessor):
"""
TiedModelRealTimeSignalProcessor handles updates to a index tied to a model
We need to be able to detect changes to a model a rebuild another index,
such as detecting changes to GoogleAddress and updating the index
for projects and organizations.
"""
attach_to = [
(Project, 'handle_save', 'handle_delete'),
(Organization, 'handle_save', 'handle_delete'),
(GoogleAddress, 'handle_address_save', 'handle_address_delete'),
]
m2m = [
Project.causes.through, Project.skills.through, Organization.causes.through
]
def setup(self):
for item in self.attach_to:
models.signals.post_save.connect(getattr(self, item[1]), sender=item[0])
models.signals.post_delete.connect(getattr(self, item[1]), sender=item[0])
for item in self.m2m:
models.signals.m2m_changed.connect(self.handle_m2m, sender=item)
# never really called
def teardown(self): # pragma: no cover
for item in self.attach_to:
models.signals.post_save.disconnect(getattr(self, item[1]), sender=item[0])
models.signals.post_delete.disconnect(getattr(self, item[1]), sender=item[0])
for item in self.m2m:
models.signals.m2m_changed.disconnect(self.handle_m2m, sender=item)
def handle_address_save(self, sender, instance, **kwargs):
""" Custom handler for address save """
objects = self.find_associated_with_address(instance)
for obj in objects:
self.handle_save(obj.__class__, obj)
# this function is never really called on sqlite dbs
def handle_address_delete(self, sender, instance, **kwargs): # pragma: no cover
""" Custom handler for address delete """
objects = self.find_associated_with_address(instance)
for obj in objects:
self.handle_delete(obj.__class__, obj)
def handle_m2m(self, sender, instance, **kwargs):
""" Handle many to many relationships """
self.handle_save(instance.__class__, instance)
def find_associated_with_address(self, instance):
""" Returns list with projects and organizations associated with given address """
objects = []
objects += list(Project.objects.filter(address=instance))
objects += list(Organization.objects.filter(address=instance))
return objects
|
from django.db import models
from haystack import signals
from ovp_projects.models import Project
from ovp_organizations.models import Organization
from ovp_core.models import GoogleAddress
class TiedModelRealtimeSignalProcessor(signals.BaseSignalProcessor):
"""
TiedModelRealTimeSignalProcessor handles updates to a index tied to a model
We need to be able to detect changes to a model a rebuild another index,
such as detecting changes to GoogleAddress and updating the index
for projects and organizations.
"""
attach_to = [
(Project, 'handle_save', 'handle_delete'),
(Organization, 'handle_save', 'handle_delete'),
(GoogleAddress, 'handle_address_save', 'handle_address_delete'),
]
m2m = [
Project.causes.through, Project.skills.through, Organization.causes.through
]
def setup(self):
for item in self.attach_to:
models.signals.post_save.connect(getattr(self, item[1]), sender=item[0])
models.signals.post_delete.connect(getattr(self, item[1]), sender=item[0])
for item in self.m2m:
models.signals.m2m_changed.connect(self.handle_m2m, sender=item)
def teardown(self):
for item in self.attach_to:
models.signals.post_save.disconnect(getattr(self, item[1]), sender=item[0])
models.signals.post_delete.disconnect(getattr(self, item[1]), sender=item[0])
for item in self.m2m:
models.signals.m2m_changed.disconnect(self.handle_m2m, sender=item)
def handle_address_save(self, sender, instance, **kwargs):
""" Custom handler for address save """
objects = self.find_associated_with_address(instance)
for obj in objects:
self.handle_save(obj.__class__, obj)
def handle_address_delete(self, sender, instance, **kwargs):
""" Custom handler for address delete """
objects = self.find_associated_with_address(instance)
for obj in objects:
self.handle_delete(obj.__class__, obj)
def handle_m2m(self, sender, instance, **kwargs):
""" Handle many to many relationships """
self.handle_save(instance.__class__, instance)
def find_associated_with_address(self, instance):
""" Returns list with projects and organizations associated with given address """
objects = []
objects += list(Project.objects.filter(address=instance))
objects += list(Organization.objects.filter(address=instance))
return objects
|
agpl-3.0
|
Python
|
9cbdc64bcc1144b8ca7d32d08aa5d36afa7f1e73
|
index command - reflected _log_id_short change
|
lukas-linhart/pageobject
|
pageobject/commands/index.py
|
pageobject/commands/index.py
|
def index(self, value):
"""
Return index of the first child containing the specified value.
:param str value: text value to look for
:returns: index of the first child containing the specified value
:rtype: int
:raises ValueError: if the value is not found
"""
self.logger.info('getting index of text "{}" within page object list {}'.format(value, self._log_id_short))
self.logger.debug('getting index of text "{}" within page object list; {}'.format(value, self._log_id_long))
index = self.text_values.index(value)
self.logger.info('index of text "{}" within page object list {} is {}'.format(value, self._log_id_short, index))
self.logger.debug('index of text "{}" within page object is {}; {}'.format(value, index, self._log_id_long))
return index
|
def index(self, value):
"""
Return index of the first child containing the specified value.
:param str value: text value to look for
:returns: index of the first child containing the specified value
:rtype: int
:raises ValueError: if the value is not found
"""
self.logger.info('getting index of "{}" within {}'.format(value, self._log_id_short))
self.logger.debug('getting index of "{}" within page object; {}'.format(value, self._log_id_long))
index = self.text_values.index(value)
self.logger.info('index of "{}" within {} is {}'.format(value, self._log_id_short, index))
self.logger.debug('index of "{}" within page object is {}; {}'.format(value, index, self._log_id_long))
return index
|
mit
|
Python
|
6143e6b015ed0435dc747b8d4242d47dca79c7a8
|
improve busydialog handling
|
phil65/script.module.kodi65
|
lib/kodi65/busyhandler.py
|
lib/kodi65/busyhandler.py
|
# -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <[email protected]>
# This program is Free Software see LICENSE file for details
import xbmcgui
from kodi65 import utils
import traceback
from functools import wraps
class BusyHandler(object):
"""
Class to deal with busydialog handling
"""
def __init__(self, *args, **kwargs):
self.busy = 0
self.enabled = True
self.dialog = xbmcgui.DialogBusy()
def enable(self):
"""
Enables busydialog handling
"""
self.enabled = True
def disable(self):
"""
Disables busydialog handling
"""
self.enabled = False
def show_busy(self):
"""
Increase busycounter and open busydialog if needed
"""
if not self.enabled:
return None
if self.busy == 0:
self.dialog.create()
self.busy += 1
def set_progress(self, percent):
self.dialog.update(percent)
def hide_busy(self):
"""
Decrease busycounter and close busydialog if needed
"""
if not self.enabled:
return None
self.busy = max(0, self.busy - 1)
if self.busy == 0:
self.dialog.close()
def set_busy(self, func):
"""
Decorator to show busy dialog while function is running
"""
@wraps(func)
def decorator(cls, *args, **kwargs):
self.show_busy()
result = None
try:
result = func(cls, *args, **kwargs)
except Exception:
utils.log(traceback.format_exc())
utils.notify("Error", "please contact add-on author")
finally:
self.hide_busy()
return result
return decorator
busyhandler = BusyHandler()
|
# -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <[email protected]>
# This program is Free Software see LICENSE file for details
import xbmc
from kodi65 import utils
import traceback
from functools import wraps
class BusyHandler(object):
"""
Class to deal with busydialog handling
"""
def __init__(self, *args, **kwargs):
self.busy = 0
self.enabled = True
def enable(self):
"""
Enables busydialog handling
"""
self.enabled = True
def disable(self):
"""
Disables busydialog handling
"""
self.enabled = False
def show_busy(self):
"""
Increase busycounter and open busydialog if needed
"""
if not self.enabled:
return None
if self.busy == 0:
xbmc.executebuiltin("ActivateWindow(busydialog)")
self.busy += 1
def hide_busy(self):
"""
Decrease busycounter and close busydialog if needed
"""
if not self.enabled:
return None
self.busy = max(0, self.busy - 1)
if self.busy == 0:
xbmc.executebuiltin("Dialog.Close(busydialog)")
def set_busy(self, func):
"""
Decorator to show busy dialog while function is running
"""
@wraps(func)
def decorator(cls, *args, **kwargs):
self.show_busy()
result = None
try:
result = func(cls, *args, **kwargs)
except Exception:
utils.log(traceback.format_exc())
utils.notify("Error", "please contact add-on author")
finally:
self.hide_busy()
return result
return decorator
busyhandler = BusyHandler()
|
lgpl-2.1
|
Python
|
767a50052895cf10386f01bab83941a2141c30f1
|
fix json test and add json from string test
|
Mappy/mapnik,mbrukman/mapnik,yiqingj/work,mapycz/python-mapnik,cjmayo/mapnik,mapycz/mapnik,mapnik/python-mapnik,stefanklug/mapnik,tomhughes/python-mapnik,qianwenming/mapnik,sebastic/python-mapnik,whuaegeanse/mapnik,pramsey/mapnik,yohanboniface/python-mapnik,whuaegeanse/mapnik,manz/python-mapnik,Airphrame/mapnik,Uli1/mapnik,stefanklug/mapnik,tomhughes/python-mapnik,qianwenming/mapnik,lightmare/mapnik,zerebubuth/mapnik,whuaegeanse/mapnik,strk/mapnik,mbrukman/mapnik,naturalatlas/mapnik,pramsey/mapnik,yiqingj/work,pnorman/mapnik,mapnik/mapnik,davenquinn/python-mapnik,mapnik/mapnik,mapycz/mapnik,kapouer/mapnik,rouault/mapnik,mapnik/python-mapnik,mbrukman/mapnik,qianwenming/mapnik,tomhughes/python-mapnik,Airphrame/mapnik,tomhughes/mapnik,mapycz/python-mapnik,qianwenming/mapnik,naturalatlas/mapnik,rouault/mapnik,stefanklug/mapnik,jwomeara/mapnik,lightmare/mapnik,Uli1/mapnik,Airphrame/mapnik,zerebubuth/mapnik,Mappy/mapnik,stefanklug/mapnik,naturalatlas/mapnik,rouault/mapnik,yiqingj/work,rouault/mapnik,pnorman/mapnik,davenquinn/python-mapnik,mapnik/mapnik,mapnik/mapnik,garnertb/python-mapnik,jwomeara/mapnik,tomhughes/mapnik,kapouer/mapnik,garnertb/python-mapnik,whuaegeanse/mapnik,cjmayo/mapnik,strk/mapnik,strk/mapnik,pnorman/mapnik,Uli1/mapnik,kapouer/mapnik,davenquinn/python-mapnik,jwomeara/mapnik,Uli1/mapnik,mbrukman/mapnik,tomhughes/mapnik,CartoDB/mapnik,naturalatlas/mapnik,Airphrame/mapnik,lightmare/mapnik,tomhughes/mapnik,mapnik/python-mapnik,jwomeara/mapnik,yiqingj/work,Mappy/mapnik,CartoDB/mapnik,yohanboniface/python-mapnik,manz/python-mapnik,yohanboniface/python-mapnik,cjmayo/mapnik,cjmayo/mapnik,CartoDB/mapnik,sebastic/python-mapnik,zerebubuth/mapnik,manz/python-mapnik,Mappy/mapnik,mapycz/mapnik,garnertb/python-mapnik,qianwenming/mapnik,pramsey/mapnik,lightmare/mapnik,sebastic/python-mapnik,kapouer/mapnik,strk/mapnik,pramsey/mapnik,pnorman/mapnik
|
tests/python_tests/datasource_test.py
|
tests/python_tests/datasource_test.py
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik2
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_field_listing():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
fields = lyr.datasource.fields()
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
def test_total_feature_count_shp():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 10)
def test_total_feature_count_json():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Ogr(file='../data/json/points.json',layer_by_index=0)
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 5)
def test_reading_json_from_string():
json = open('../data/json/points.json','r').read()
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Ogr(file=json,layer_by_index=0)
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 5)
def test_feature_envelope():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
for feat in features:
env = feat.envelope()
contains = lyr.envelope().contains(env)
eq_(contains, True)
intersects = lyr.envelope().contains(env)
eq_(intersects, True)
def test_feature_attributes():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
feat = features[0]
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
eq_(feat.attributes, attrs)
eq_(lyr.datasource.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
eq_(lyr.datasource.field_types(),[float,int,str])
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik2
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_field_listing():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
fields = lyr.datasource.fields()
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
def test_total_feature_count_shp():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 10)
def test_total_feature_count_json():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Ogr(file='../data/json/points.json',layer_by_index=0)
features = lyr.datasource.all_features()
num_feats = len(features)
eq_(num_feats, 3)
def test_feature_envelope():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
for feat in features:
env = feat.envelope()
contains = lyr.envelope().contains(env)
eq_(contains, True)
intersects = lyr.envelope().contains(env)
eq_(intersects, True)
def test_feature_attributes():
lyr = mapnik2.Layer('test')
lyr.datasource = mapnik2.Shapefile(file='../data/shp/poly.shp')
features = lyr.datasource.all_features()
feat = features[0]
attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
eq_(feat.attributes, attrs)
eq_(lyr.datasource.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
eq_(lyr.datasource.field_types(),[float,int,str])
|
lgpl-2.1
|
Python
|
05855c934624c667053635a8ab8679c54426e49f
|
Rewrite the initialization of Release.eol_date.
|
django/djangoproject.com,django/djangoproject.com,django/djangoproject.com,nanuxbe/django,django/djangoproject.com,xavierdutreilh/djangoproject.com,xavierdutreilh/djangoproject.com,nanuxbe/django,nanuxbe/django,django/djangoproject.com,xavierdutreilh/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,nanuxbe/django
|
releases/migrations/0003_populate_release_eol_date.py
|
releases/migrations/0003_populate_release_eol_date.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
def set_eol_date(apps, schema_editor):
Release = apps.get_model('releases', 'Release')
# Set the EOL date of all releases to the date of the following release
# except for the final one in the 0 series and in each 1.x series.
releases = list(Release.objects.all().order_by('major', 'minor', 'micro',
'status', 'iteration'))
for previous, current in zip(releases[:-1], releases[1:]):
if current.major != previous.major:
continue
if current.major == 1 and previous.minor != current.minor:
continue
previous.eol_date = current.date
previous.save()
# Set the EOL date of final releases the 0 series and in each 1.x series.
for version, eol_date in [
('0.96.5', datetime.date(2008, 9, 3)), # 1.0 release
('1.0.4', datetime.date(2010, 5, 17)), # 1.2 release
('1.1.4', datetime.date(2011, 3, 23)), # 1.3 release
('1.2.7', datetime.date(2012, 3, 23)), # 1.4 release
('1.3.7', datetime.date(2013, 2, 26)), # 1.5 release
('1.4.22', datetime.date(2015, 10, 1)), # end of LTS support
('1.5.12', datetime.date(2014, 9, 2)), # 1.7 release
('1.6.11', datetime.date(2015, 4, 1)), # 1.8 release
# 1.7.10 and 1.8.5 are still supported at the time of writing.
]:
# This patterns ignores missing releases e.g. during tests.
Release.objects.filter(version=version).update(eol_date=eol_date)
def unset_eol_date(apps, schema_editor):
Release = apps.get_model('releases', 'Release')
Release.objects.update(eol_date=None)
class Migration(migrations.Migration):
dependencies = [
('releases', '0002_release_eol_date'),
]
operations = [
migrations.RunPython(set_eol_date, unset_eol_date),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
def set_eol_date(apps, schema_editor):
Release = apps.get_model('releases', 'Release')
# List of EOL dates for releases for which docs are published.
for version, eol_date in [
('1.4', datetime.date(2015, 10, 1)),
('1.5', datetime.date(2014, 9, 2)),
('1.6', datetime.date(2015, 4, 1)),
]:
Release.objects.filter(version=version).update(eol_date=eol_date)
def unset_eol_date(apps, schema_editor):
Release = apps.get_model('releases', 'Release')
Release.objects.update(eol_date=None)
class Migration(migrations.Migration):
dependencies = [
('releases', '0002_release_eol_date'),
]
operations = [
migrations.RunPython(set_eol_date, unset_eol_date),
]
|
bsd-3-clause
|
Python
|
ab93ea01dacc0fbd63fac91b1afcf5af1b711c2f
|
correct latest migration
|
mohrm/umklapp_site,mohrm/umklapp_site,mohrm/umklapp_site
|
umklapp/migrations/0009_teller_hasleft.py
|
umklapp/migrations/0009_teller_hasleft.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-31 20:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('umklapp', '0008_auto_20160528_2332'),
]
operations = [
migrations.AddField(
model_name='teller',
name='hasLeft',
field=models.BooleanField(default=False),
preserve_default=False,
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-31 19:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('umklapp', '0008_auto_20160528_2332'),
]
operations = [
migrations.AddField(
model_name='teller',
name='hasLeft',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
|
mit
|
Python
|
c5bfd55147e7fb18264f601c34e180453974f55e
|
DEBUG messages deleted
|
avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf
|
vt_manager/src/python/agent/provisioning/ProvisioningDispatcher.py
|
vt_manager/src/python/agent/provisioning/ProvisioningDispatcher.py
|
'''
@author: msune
Provisioning dispatcher. Selects appropiate Driver for VT tech
'''
from communications.XmlRpcClient import XmlRpcClient
from utils.VmMutexStore import VmMutexStore
import threading
class ProvisioningDispatcher:
@staticmethod
def __getProvisioningDispatcher(vtype):
#Import of Dispatchers must go here to avoid import circular dependecy
from xen.provisioning.XenProvisioningDispatcher import XenProvisioningDispatcher
if vtype == "xen":
return XenProvisioningDispatcher
else:
raise Exception("Virtualization type not supported by the agent")
@staticmethod
def __dispatchAction(dispatcher,action,vm):
#Inventory
if action.type_ == "create":
return dispatcher.createVMfromImage(action.id,vm)
if action.type_ == "modify" :
return dispatcher.modifyVM(action.id,vm)
if action.type_ == "delete" :
return dispatcher.deleteVM(action.id,vm)
#Scheduling
if action.type_ == "start":
return dispatcher.startVM(action.id,vm)
if action.type_ == "reboot" :
return dispatcher.rebootVM(action.id,vm)
if action.type_ == "stop" :
return dispatcher.stopVM(action.id,vm)
if action.type_ == "hardStop" :
return dispatcher.hardStopVM(action.id,vm)
raise Exception("Unknown action type")
@staticmethod
def processProvisioning(provisioning):
for action in provisioning.action:
vm = action.server.virtual_machines[0]
try:
dispatcher = ProvisioningDispatcher.__getProvisioningDispatcher(vm.virtualization_type)
except Exception as e:
XmlRpcClient.sendAsyncProvisioningActionStatus(action.id,"FAILED",str(e))
print e
return
try:
#Acquire VM lock
VmMutexStore.lock(vm)
#Send async notification
XmlRpcClient.sendAsyncProvisioningActionStatus(action.id,"ONGOING","")
ProvisioningDispatcher.__dispatchAction(dispatcher,action,vm)
except Exception as e:
#TODO improve this trace
print e
raise e
finally:
#Release VM lock
VmMutexStore.unlock(vm)
##Abstract methods definition for ProvisioningDispatchers
#Inventory
@staticmethod
def createVMfromImage(id,vm):
raise Exception("Abstract method cannot be called")
@staticmethod
def modifyVM(id,vm):
raise Exception("Abstract method cannot be called")
@staticmethod
def deleteVM(id,vm):
raise Exception("Abstract method cannot be called")
#Scheduling
def startVM(id,vm):
raise Exception("Abstract method cannot be called")
def rebootVM(id,vm):
raise Exception("Abstract method cannot be called")
def stopVM(id,vm):
raise Exception("Abstract method cannot be called")
def hardStopVM(id,vm):
raise Exception("Abstract method cannot be called")
|
'''
@author: msune
Provisioning dispatcher. Selects appropiate Driver for VT tech
'''
from communications.XmlRpcClient import XmlRpcClient
from utils.VmMutexStore import VmMutexStore
import threading
class ProvisioningDispatcher:
@staticmethod
def __getProvisioningDispatcher(vtype):
#Import of Dispatchers must go here to avoid import circular dependecy
from xen.provisioning.XenProvisioningDispatcher import XenProvisioningDispatcher
if vtype == "xen":
return XenProvisioningDispatcher
else:
raise Exception("Virtualization type not supported by the agent")
@staticmethod
def __dispatchAction(dispatcher,action,vm):
#Inventory
if action.type_ == "create":
return dispatcher.createVMfromImage(action.id,vm)
if action.type_ == "modify" :
return dispatcher.modifyVM(action.id,vm)
if action.type_ == "delete" :
return dispatcher.deleteVM(action.id,vm)
#Scheduling
if action.type_ == "start":
return dispatcher.startVM(action.id,vm)
if action.type_ == "reboot" :
return dispatcher.rebootVM(action.id,vm)
if action.type_ == "stop" :
return dispatcher.stopVM(action.id,vm)
if action.type_ == "hardStop" :
return dispatcher.hardStopVM(action.id,vm)
raise Exception("Unknown action type")
@staticmethod
def processProvisioning(provisioning):
print "******************************************************************\n
LEODEBUG: CURRENT THREAD: "+str(threading.currentThread().get_ident())+"\n
*******************************************************************"
for action in provisioning.action:
vm = action.server.virtual_machines[0]
try:
dispatcher = ProvisioningDispatcher.__getProvisioningDispatcher(vm.virtualization_type)
except Exception as e:
XmlRpcClient.sendAsyncProvisioningActionStatus(action.id,"FAILED",str(e))
print e
return
try:
#Acquire VM lock
VmMutexStore.lock(vm)
#Send async notification
XmlRpcClient.sendAsyncProvisioningActionStatus(action.id,"ONGOING","")
ProvisioningDispatcher.__dispatchAction(dispatcher,action,vm)
except Exception as e:
#TODO improve this trace
print e
raise e
finally:
#Release VM lock
VmMutexStore.unlock(vm)
##Abstract methods definition for ProvisioningDispatchers
#Inventory
@staticmethod
def createVMfromImage(id,vm):
raise Exception("Abstract method cannot be called")
@staticmethod
def modifyVM(id,vm):
raise Exception("Abstract method cannot be called")
@staticmethod
def deleteVM(id,vm):
raise Exception("Abstract method cannot be called")
#Scheduling
def startVM(id,vm):
raise Exception("Abstract method cannot be called")
def rebootVM(id,vm):
raise Exception("Abstract method cannot be called")
def stopVM(id,vm):
raise Exception("Abstract method cannot be called")
def hardStopVM(id,vm):
raise Exception("Abstract method cannot be called")
|
bsd-3-clause
|
Python
|
b78165d68e1e01e722b746e926a36b5680debdfa
|
remove email filter and rfactor
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
web/impact/impact/v1/views/mentor_program_office_hour_list_view.py
|
web/impact/impact/v1/views/mentor_program_office_hour_list_view.py
|
# MIT License
# Copyright (c) 2019 MassChallenge, Inc.
from impact.v1.views.base_list_view import BaseListView
from impact.v1.helpers import (
MentorProgramOfficeHourHelper,
)
class MentorProgramOfficeHourListView(BaseListView):
view_name = "office_hour"
helper_class = MentorProgramOfficeHourHelper
def filter(self, queryset):
allowed_params = ['mentor_id', 'finalist_d']
param_items = self.request.query_params.items()
if not param_items:
return queryset
filter_values = {
key: value for (key, value) in param_items
if key in allowed_params}
return queryset.filter(**filter_values)
|
# MIT License
# Copyright (c) 2019 MassChallenge, Inc.
from impact.v1.views.base_list_view import BaseListView
from impact.v1.helpers import (
MentorProgramOfficeHourHelper,
)
LOOKUPS = {
'mentor_email': 'mentor__email__icontains',
'mentor_id': 'mentor_id',
'finalist_email': 'finalist__email__icontains',
'finalist_id': 'finalist_id',
}
class MentorProgramOfficeHourListView(BaseListView):
view_name = "office_hour"
helper_class = MentorProgramOfficeHourHelper
def filter(self, queryset):
if self.request.query_params.keys():
filter_values = self._get_filter()
return queryset.filter(**filter_values)
return queryset
def _get_filter(self):
query_params = self.request.query_params.dict()
query_filter = {
LOOKUPS[key]: value for key, value in query_params.items()
if key in LOOKUPS.keys()
}
return query_filter
|
mit
|
Python
|
05b7f56bdfa600e72d4cca5a4c51324ff3c94d4d
|
Update file distancematrixtest.py
|
ajnebro/pyMSAScoring
|
pymsascoring/distancematrix/test/distancematrixtest.py
|
pymsascoring/distancematrix/test/distancematrixtest.py
|
import unittest
from pymsascoring.distancematrix.distancematrix import DistanceMatrix
__author__ = "Antonio J. Nebro"
class TestMethods(unittest.TestCase):
def setUp(self):
pass
def test_should_default_gap_penalty_be_minus_eight(self):
matrix = DistanceMatrix()
self.assertEqual(-8, matrix.get_gap_penalty())
def test_should_constructor__modify_the_gap_penalty(self):
matrix = DistanceMatrix(-10)
self.assertEqual(-10, matrix.get_gap_penalty())
def test_should_get_distance_return_the_gap_penalty_if_a_char_is_a_gap(self):
matrix = DistanceMatrix()
self.assertEqual(matrix.get_gap_penalty(), matrix.get_distance('A', '-'))
self.assertEqual(matrix.get_gap_penalty(), matrix.get_distance('-', 'B'))
def test_should_get_distance_return_one_if_the_two_chars_are_gaps(self):
matrix = DistanceMatrix()
self.assertEqual(1, matrix.get_distance('-', '-'))
if __name__ == '__main__':
unittest.main()
|
import unittest
__author__ = "Antonio J. Nebro"
class TestMethods(unittest.TestCase):
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
bd32faf934bd26957a16a0aa2ac092c5759d2342
|
annotate new test
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
python/ql/test/experimental/dataflow/fieldflow/test.py
|
python/ql/test/experimental/dataflow/fieldflow/test.py
|
# These are defined so that we can evaluate the test code.
NONSOURCE = "not a source"
SOURCE = "source"
def is_source(x):
return x == "source" or x == b"source" or x == 42 or x == 42.0 or x == 42j
def SINK(x):
if is_source(x):
print("OK")
else:
print("Unexpected flow", x)
def SINK_F(x):
if is_source(x):
print("Unexpected flow", x)
else:
print("OK")
# Preamble
class MyObj(object):
def __init__(self, foo):
self.foo = foo
def setFoo(self, foo):
self.foo = foo
class NestedObj(object):
def __init__(self):
self.obj = MyObj("OK")
def getObj(self):
return self.obj
def setFoo(obj, x):
SINK_F(obj.foo)
obj.foo = x
def test_example1():
myobj = MyObj("OK")
setFoo(myobj, SOURCE)
SINK(myobj.foo)
def test_example1_method():
myobj = MyObj("OK")
myobj.setFoo(SOURCE)
SINK(myobj.foo) # Flow not found
def test_example2():
x = SOURCE
a = NestedObj()
a.obj.foo = x
SINK(a.obj.foo)
def test_example2_method():
x = SOURCE
a = NestedObj()
a.getObj().foo = x
SINK(a.obj.foo) # Flow missing
def test_example3():
obj = MyObj(SOURCE)
SINK(obj.foo)
def test_example3_kw():
obj = MyObj(foo=SOURCE)
SINK(obj.foo)
def fields_with_local_flow(x):
obj = MyObj(x)
a = obj.foo
return a
def test_fields():
SINK(fields_with_local_flow(SOURCE))
|
# These are defined so that we can evaluate the test code.
NONSOURCE = "not a source"
SOURCE = "source"
def is_source(x):
return x == "source" or x == b"source" or x == 42 or x == 42.0 or x == 42j
def SINK(x):
if is_source(x):
print("OK")
else:
print("Unexpected flow", x)
def SINK_F(x):
if is_source(x):
print("Unexpected flow", x)
else:
print("OK")
# Preamble
class MyObj(object):
def __init__(self, foo):
self.foo = foo
def setFoo(self, foo):
self.foo = foo
class NestedObj(object):
def __init__(self):
self.obj = MyObj("OK")
def getObj(self):
return self.obj
def setFoo(obj, x):
SINK_F(obj.foo)
obj.foo = x
def test_example1():
myobj = MyObj("OK")
setFoo(myobj, SOURCE)
SINK(myobj.foo)
def test_example1_method():
myobj = MyObj("OK")
myobj.setFoo(SOURCE)
SINK(myobj.foo)
def test_example2():
x = SOURCE
a = NestedObj()
a.obj.foo = x
SINK(a.obj.foo)
def test_example2_method():
x = SOURCE
a = NestedObj()
a.getObj().foo = x
SINK(a.obj.foo) # Flow missing
def test_example3():
obj = MyObj(SOURCE)
SINK(obj.foo)
def test_example3_kw():
obj = MyObj(foo=SOURCE)
SINK(obj.foo)
def fields_with_local_flow(x):
obj = MyObj(x)
a = obj.foo
return a
def test_fields():
SINK(fields_with_local_flow(SOURCE))
|
mit
|
Python
|
091ebd935c6145ac233c03bedeb52c65634939f4
|
Include the version-detecting code to allow PyXML to override the "standard" xml package. Require at least PyXML 0.6.1.
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Lib/xml/__init__.py
|
Lib/xml/__init__.py
|
"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$"[1:-1].split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
|
"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
|
mit
|
Python
|
3a27568211c07cf614aa9865a2f08d2a9b9bfb71
|
Return errors in json only
|
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
|
dinosaurs/views.py
|
dinosaurs/views.py
|
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def write_error(self, status_code, **kwargs):
self.finish({
"code": status_code,
"message": self._reason,
})
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
try:
ret, passwd = api.create_email(connection, email)
except api.YandexException as e:
if e.message != 'occupied':
raise
self.write({})
raise tornado.web.HTTPError(http.FORBIDDEN)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
ret, passwd = api.create_email(connection, email)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
|
mit
|
Python
|
c9f25b7fb983c3d635ab7f13f350a53422059a8c
|
Handle errors in reloaded code
|
edne/pineal
|
cpp/pineal-run.py
|
cpp/pineal-run.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
from time import sleep
from sys import argv
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import hy
from pineal.hy_utils import run_hy_code
logger = logging.getLogger("pineal-run")
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
def run_code(ns, history):
"Run last code in the history, if available"
if history:
try:
run_hy_code(history[-1], ns)
except Exception as e:
logger.info("Error evaluating code")
logger.error(e)
history.pop()
run_code(ns, history)
else:
logger.error("Empty history, there is no valid code")
def update_file(file_name, ns, history):
"Update running code, saving in the history"
logger.info("Updating file")
with open(file_name) as f:
code = f.read()
history.append(code)
run_code(ns, history)
def watch_file(file_name, action, *args, **kwargs):
"Return a watchdog observer, it will call the action callback"
def on_modified(event):
"File-changed event"
logger.info(file_name, " changed")
if event.src_path == file_name:
action(file_name, *args, **kwargs)
handler = FileSystemEventHandler()
handler.on_modified = on_modified
observer = Observer()
base_path = os.path.split(file_name)[0]
observer.schedule(handler, path=base_path)
observer.start()
return observer
def main(file_name):
"Main function"
ns = {} # namespace
history = [] # handle old versions of code
update_file(file_name, ns, history)
watcher = watch_file(file_name, update_file, ns, history)
try:
while True:
try:
ns["loop"]()
except Exception as e:
logger.error(e)
history.pop()
run_code(ns, history)
sleep(1.0/120)
except KeyboardInterrupt:
watcher.stop()
watcher.join()
if __name__ == "__main__":
if argv[1:]:
main(argv[1])
else:
print("Usage: ", argv[0], "filename")
|
#!/usr/bin/env python
from __future__ import print_function
import os
from time import sleep
from sys import argv
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import hy
from pineal.hy_utils import run_hy_code
def update_file(file_name, ns, history):
"Update running code, saving in the history"
print("Updating file") # TODO logging
with open(file_name) as f:
code = f.read()
history.append(code)
try:
run_hy_code(code, ns)
except e:
print(e)
history.pop() # TODO test and debug this
def watch_file(file_name, action, *args, **kwargs):
"Return a watchdog observer, it will call the action callback"
def on_modified(event):
"File-changed event"
print("File changed") # TODO logging
if event.src_path == file_name:
action(file_name, *args, **kwargs)
handler = FileSystemEventHandler()
handler.on_modified = on_modified
observer = Observer()
base_path = os.path.split(file_name)[0]
observer.schedule(handler, path=base_path)
observer.start()
return observer
def main(file_name):
"Main function"
ns = {} # namespace
history = [] # handle old versions of code
update_file(file_name, ns, history)
watcher = watch_file(file_name, update_file, ns, history)
try:
while True:
ns["loop"]()
sleep(1.0/120)
except KeyboardInterrupt:
watcher.stop()
watcher.join()
if __name__ == "__main__":
if argv[1:]:
main(argv[1])
else:
print("Usage: ", argv[0], "filename")
|
agpl-3.0
|
Python
|
f574e19b14ff861c45f6c66c64a2570bdb0e3a3c
|
Apply change of file name
|
tosh1ki/NicoCrawler
|
crawl_comments.py
|
crawl_comments.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = '''
Crawl comment from nicovideo.jp
Usage:
crawl_comments.py [--sqlite <sqlite>] [--csv <csv>]
Options:
--sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3]
--csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv]
'''
from docopt import docopt
from nicocrawler.nicocrawler import NicoCrawler
if __name__ == '__main__':
# コマンドライン引数の取得
args = docopt(__doc__)
sqlite_path = args['--sqlite']
csv_path = args['--csv']
ncrawler = NicoCrawler()
ncrawler.connect_sqlite(sqlite_path)
url = 'http://ch.nicovideo.jp/2016winter_anime'
df = ncrawler.get_all_video_url_of_season(url)
ncrawler.initialize_csv_from_db(csv_path)
# # デイリーランキング1~300位の動画を取得する
# url = 'http://www.nicovideo.jp/ranking/fav/daily/all'
# ncrawler.initialize_csv_from_url(url, csv_path, max_page=3)
# ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = '''
Crawl comment from nicovideo.jp
Usage:
main_crawl.py [--sqlite <sqlite>] [--csv <csv>]
Options:
--sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3]
--csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv]
'''
from docopt import docopt
from nicocrawler.nicocrawler import NicoCrawler
if __name__ == '__main__':
# コマンドライン引数の取得
args = docopt(__doc__)
sqlite_path = args['--sqlite']
csv_path = args['--csv']
ncrawler = NicoCrawler()
ncrawler.connect_sqlite(sqlite_path)
url = 'http://ch.nicovideo.jp/2016winter_anime'
df = ncrawler.get_all_video_url_of_season(url)
ncrawler.initialize_csv_from_db(csv_path)
# # デイリーランキング1~300位の動画を取得する
# url = 'http://www.nicovideo.jp/ranking/fav/daily/all'
# ncrawler.initialize_csv_from_url(url, csv_path, max_page=3)
# ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
|
mit
|
Python
|
3bc4fa33c3ec9272fed565260677518dcf5957fe
|
change version to 0.10.0.dev0
|
espdev/csaps
|
csaps/_version.py
|
csaps/_version.py
|
# -*- coding: utf-8 -*-
__version__ = '0.10.0.dev0'
|
# -*- coding: utf-8 -*-
__version__ = '0.9.0'
|
mit
|
Python
|
3bb9c0aacdfff372e41d7a8d4c43e71535bff930
|
Remove perf regression in not yet finished size estimation code
|
amitsela/incubator-beam,wangyum/beam,rangadi/beam,jbonofre/incubator-beam,amarouni/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,apache/beam,lukecwik/incubator-beam,peihe/incubator-beam,apache/beam,lukecwik/incubator-beam,sammcveety/incubator-beam,sammcveety/incubator-beam,jbonofre/beam,jasonkuster/beam,eljefe6a/incubator-beam,chamikaramj/beam,vikkyrk/incubator-beam,mxm/incubator-beam,RyanSkraba/beam,manuzhang/beam,RyanSkraba/beam,chamikaramj/beam,apache/beam,rangadi/incubator-beam,yk5/beam,vikkyrk/incubator-beam,mxm/incubator-beam,chamikaramj/beam,jasonkuster/beam,eljefe6a/incubator-beam,tgroh/incubator-beam,manuzhang/incubator-beam,chamikaramj/beam,jbonofre/incubator-beam,markflyhigh/incubator-beam,ravwojdyla/incubator-beam,eljefe6a/incubator-beam,chamikaramj/beam,apache/beam,charlesccychen/incubator-beam,apache/beam,apache/beam,rangadi/beam,rangadi/beam,peihe/incubator-beam,apache/beam,robertwb/incubator-beam,robertwb/incubator-beam,iemejia/incubator-beam,vikkyrk/incubator-beam,amarouni/incubator-beam,amitsela/beam,markflyhigh/incubator-beam,sammcveety/incubator-beam,lukecwik/incubator-beam,charlesccychen/beam,staslev/beam,wangyum/beam,tgroh/beam,RyanSkraba/beam,wtanaka/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,charlesccychen/beam,yk5/beam,iemejia/incubator-beam,manuzhang/beam,xsm110/Apache-Beam,amitsela/beam,RyanSkraba/beam,yk5/beam,chamikaramj/beam,robertwb/incubator-beam,charlesccychen/beam,rangadi/incubator-beam,RyanSkraba/beam,markflyhigh/incubator-beam,wangyum/beam,dhalperi/incubator-beam,RyanSkraba/beam,dhalperi/beam,lukecwik/incubator-beam,amitsela/beam,tgroh/beam,chamikaramj/beam,charlesccychen/beam,lukecwik/incubator-beam,apache/beam,RyanSkraba/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,jasonkuster/incubator-beam,markflyhigh/incubator-beam,jbonofre/beam,apache/beam,ravwojdyla/incubator-beam,wtanaka/beam,chamikaramj/beam,lukecwik/incubator-beam,charlesccychen/beam,tgroh/incubator-beam,jasonkuster/incubator-beam,charlesccychen/beam,charlesccychen/incubator-beam,wangyum/beam,dhalperi/incubator-beam,staslev/beam,apache/beam,chamikaramj/incubator-beam,charlesccychen/incubator-beam,jasonkuster/beam,jbonofre/beam,markflyhigh/incubator-beam,rangadi/beam,robertwb/incubator-beam,rangadi/beam,staslev/incubator-beam,robertwb/incubator-beam,amitsela/incubator-beam,manuzhang/beam,manuzhang/incubator-beam,jbonofre/beam,lukecwik/incubator-beam,rangadi/beam,ravwojdyla/incubator-beam,tgroh/beam,chamikaramj/incubator-beam,dhalperi/beam,lukecwik/incubator-beam,xsm110/Apache-Beam,apache/beam,robertwb/incubator-beam,xsm110/Apache-Beam,robertwb/incubator-beam,rangadi/incubator-beam,tgroh/beam,charlesccychen/beam,wtanaka/beam,robertwb/incubator-beam,rangadi/beam,chamikaramj/beam,peihe/incubator-beam,staslev/beam,dhalperi/beam,staslev/incubator-beam
|
sdks/python/google/cloud/dataflow/worker/opcounters.py
|
sdks/python/google/cloud/dataflow/worker/opcounters.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Counters collect the progress of the Worker for reporting to the service."""
from __future__ import absolute_import
from google.cloud.dataflow.utils.counters import Counter
class OperationCounters(object):
"""The set of basic counters to attach to an Operation."""
def __init__(self, counter_factory, step_name, coder, output_index):
self.element_counter = counter_factory.get_counter(
'%s-out%d-ElementCount' % (step_name, output_index), Counter.SUM)
self.mean_byte_counter = counter_factory.get_counter(
'%s-out%d-MeanByteCount' % (step_name, output_index), Counter.MEAN)
self.coder = coder
def update_from(self, windowed_value, coder=None):
"""Add one value to this counter."""
self.element_counter.update(1)
# TODO(silviuc): Implement estimated size sampling.
# TODO(gildea):
# Actually compute the encoded size of this value.
# In spirit, something like this:
# if coder is None:
# coder = self.coder
# coder.store_estimated_size(windowed_value, byte_size_accumulator)
# but will need to do sampling.
def update_collect(self):
"""Collects the accumulated size estimates.
Now that the element has been processed, we ask our accumulator
for the total and store the result in a counter.
"""
# TODO(silviuc): Implement estimated size sampling.
pass
def __str__(self):
return '<%s [%s]>' % (self.__class__.__name__,
', '.join([str(x) for x in self.__iter__()]))
def __repr__(self):
return '<%s %s at %s>' % (self.__class__.__name__,
[x for x in self.__iter__()], hex(id(self)))
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Counters collect the progress of the Worker for reporting to the service."""
from __future__ import absolute_import
from google.cloud.dataflow.utils.counters import Accumulator
from google.cloud.dataflow.utils.counters import Counter
class OperationCounters(object):
"""The set of basic counters to attach to an Operation."""
def __init__(self, counter_factory, step_name, coder, output_index):
self.element_counter = counter_factory.get_counter(
'%s-out%d-ElementCount' % (step_name, output_index), Counter.SUM)
self.mean_byte_counter = counter_factory.get_counter(
'%s-out%d-MeanByteCount' % (step_name, output_index), Counter.MEAN)
self.coder = coder
self._active_accumulators = []
def update_from(self, windowed_value, coder=None):
"""Add one value to this counter."""
self.element_counter.update(1)
byte_size_accumulator = Accumulator(self.mean_byte_counter.name)
self._active_accumulators.append(byte_size_accumulator)
# TODO(gildea):
# Actually compute the encoded size of this value.
# In spirit, something like this:
# if coder is None:
# coder = self.coder
# coder.store_estimated_size(windowed_value, byte_size_accumulator)
# but will need to do sampling.
def update_collect(self):
"""Collects the accumulated size estimates.
Now that the element has been processed, we ask our accumulator
for the total and store the result in a counter.
"""
for pending in self._active_accumulators:
self.mean_byte_counter.update(pending.total)
self._active_accumulators = []
def __str__(self):
return '<%s [%s]>' % (self.__class__.__name__,
', '.join([str(x) for x in self.__iter__()]))
def __repr__(self):
return '<%s %s at %s>' % (self.__class__.__name__,
[x for x in self.__iter__()], hex(id(self)))
|
apache-2.0
|
Python
|
aab7c01c94088594258e33e3074f76d8735b8c2e
|
Add default config and config schema
|
swak/mopidy,quartz55/mopidy,liamw9534/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,vrs01/mopidy,vrs01/mopidy,pacificIT/mopidy,jodal/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,bencevans/mopidy,abarisain/mopidy,vrs01/mopidy,ZenithDK/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,bencevans/mopidy,ali/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,bencevans/mopidy,bacontext/mopidy,rawdlite/mopidy,vrs01/mopidy,jmarsik/mopidy,quartz55/mopidy,jcass77/mopidy,diandiankan/mopidy,rawdlite/mopidy,jcass77/mopidy,kingosticks/mopidy,dbrgn/mopidy,adamcik/mopidy,jmarsik/mopidy,pacificIT/mopidy,abarisain/mopidy,jcass77/mopidy,quartz55/mopidy,dbrgn/mopidy,priestd09/mopidy,hkariti/mopidy,kingosticks/mopidy,mokieyue/mopidy,mokieyue/mopidy,dbrgn/mopidy,diandiankan/mopidy,jmarsik/mopidy,adamcik/mopidy,jodal/mopidy,hkariti/mopidy,mokieyue/mopidy,tkem/mopidy,ali/mopidy,mopidy/mopidy,dbrgn/mopidy,tkem/mopidy,woutervanwijk/mopidy,rawdlite/mopidy,mopidy/mopidy,quartz55/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,priestd09/mopidy,tkem/mopidy,jodal/mopidy,adamcik/mopidy,mopidy/mopidy,ZenithDK/mopidy,priestd09/mopidy,diandiankan/mopidy,hkariti/mopidy,jmarsik/mopidy,bencevans/mopidy,bacontext/mopidy,bacontext/mopidy,pacificIT/mopidy,pacificIT/mopidy,swak/mopidy,diandiankan/mopidy,ali/mopidy,tkem/mopidy,ZenithDK/mopidy,swak/mopidy,glogiotatidis/mopidy,swak/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,ali/mopidy
|
mopidy/frontends/mpd/__init__.py
|
mopidy/frontends/mpd/__init__.py
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
from mopidy.utils import config, formatting
default_config = """
[ext.mpd]
# If the MPD extension should be enabled or not
enabled = true
# Which address the MPD server should bind to
#
# 127.0.0.1
# Listens only on the IPv4 loopback interface
# ::1
# Listens only on the IPv6 loopback interface
# 0.0.0.0
# Listens on all IPv4 interfaces
# ::
# Listens on all interfaces, both IPv4 and IPv6
hostname = 127.0.0.1
# Which TCP port the MPD server should listen to
port = 6600
# The password required for connecting to the MPD server
password =
# The maximum number of concurrent connections the MPD server will accept
max_connections = 20
# Number of seconds an MPD client can stay inactive before the connection is
# closed by the server
connection_timeout = 60
"""
__doc__ = """The MPD server frontend.
MPD stands for Music Player Daemon. MPD is an independent project and server.
Mopidy implements the MPD protocol, and is thus compatible with clients for the
original MPD server.
**Dependencies:**
- None
**Default config:**
.. code-block:: ini
%(config)s
**Usage:**
The frontend is enabled by default.
**Limitations:**
This is a non exhaustive list of MPD features that Mopidy doesn't support.
Items on this list will probably not be supported in the near future.
- Toggling of audio outputs is not supported
- Channels for client-to-client communication are not supported
- Stickers are not supported
- Crossfade is not supported
- Replay gain is not supported
- ``count`` does not provide any statistics
- ``stats`` does not provide any statistics
- ``list`` does not support listing tracks by genre
- ``decoders`` does not provide information about available decoders
The following items are currently not supported, but should be added in the
near future:
- Modifying stored playlists is not supported
- ``tagtypes`` is not supported
- Browsing the file system is not supported
- Live update of the music database is not supported
""" % {'config': formatting.indent(default_config)}
class Extension(ext.Extension):
name = 'Mopidy-MPD'
version = mopidy.__version__
def get_default_config(self):
return default_config
def get_config_schema(self):
schema = config.ExtensionConfigSchema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
schema['password'] = config.String(optional=True, secret=True)
schema['max_connections'] = config.Integer(minimum=1)
schema['connection_timeout'] = config.Integer(minimum=1)
return schema
def validate_environment(self):
pass
def get_frontend_classes(self):
from .actor import MpdFrontend
return [MpdFrontend]
|
from __future__ import unicode_literals
import mopidy
from mopidy import ext
__doc__ = """The MPD server frontend.
MPD stands for Music Player Daemon. MPD is an independent project and server.
Mopidy implements the MPD protocol, and is thus compatible with clients for the
original MPD server.
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
- :attr:`mopidy.settings.MPD_SERVER_PASSWORD`
**Usage:**
The frontend is enabled by default.
**Limitations:**
This is a non exhaustive list of MPD features that Mopidy doesn't support.
Items on this list will probably not be supported in the near future.
- Toggling of audio outputs is not supported
- Channels for client-to-client communication are not supported
- Stickers are not supported
- Crossfade is not supported
- Replay gain is not supported
- ``count`` does not provide any statistics
- ``stats`` does not provide any statistics
- ``list`` does not support listing tracks by genre
- ``decoders`` does not provide information about available decoders
The following items are currently not supported, but should be added in the
near future:
- Modifying stored playlists is not supported
- ``tagtypes`` is not supported
- Browsing the file system is not supported
- Live update of the music database is not supported
"""
class Extension(ext.Extension):
name = 'Mopidy-MPD'
version = mopidy.__version__
def get_default_config(self):
return '[ext.mpd]'
def validate_config(self, config):
pass
def validate_environment(self):
pass
def get_frontend_classes(self):
from .actor import MpdFrontend
return [MpdFrontend]
|
apache-2.0
|
Python
|
317926c18ac2e139d2018acd767d10b4f53428f3
|
Remove unneeded post method from CreateEnvProfile view
|
ezPy-co/ezpy,alibulota/Package_Installer,ezPy-co/ezpy,alibulota/Package_Installer
|
installer/installer_config/views.py
|
installer/installer_config/views.py
|
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
|
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = form_class(request.POST)
if form.is_valid():
config_profile = form.save(commit=False)
config_profile.user = request.user
config_profile.save()
return HttpResponseRedirect(reverse('profile:profile'))
return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
# import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
|
mit
|
Python
|
c24dbc2d4d8b59a62a68f326edb350b3c633ea25
|
Change the comment of InterleavingMethod.evaluate
|
mpkato/interleaving
|
interleaving/interleaving_method.py
|
interleaving/interleaving_method.py
|
class InterleavingMethod(object):
'''
Interleaving
'''
def interleave(self, k, a, b):
'''
k: the maximum length of resultant interleaving
a: a list of document IDs
b: a list of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def multileave(self, k, *lists):
'''
k: the maximum length of resultant multileaving
*lists: lists of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def evaluate(self, ranking, clicks):
'''
ranking: an instance of Ranking generated by Balanced.interleave
clicks: a list of indices clicked by a user
Return a list of pairs of ranker indices
in which element (i, j) indicates i won j.
e.g. a result [(1, 0), (2, 1), (2, 0)] indicates
ranker 1 won ranker 0, and ranker 2 won ranker 0 as well as ranker 1.
'''
raise NotImplementedError()
|
class InterleavingMethod(object):
'''
Interleaving
'''
def interleave(self, k, a, b):
'''
k: the maximum length of resultant interleaving
a: a list of document IDs
b: a list of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def multileave(self, k, *lists):
'''
k: the maximum length of resultant multileaving
*lists: lists of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def evaluate(self, ranking, clicks):
'''
ranking: an instance of Ranking generated by Balanced.interleave
clicks: a list of indices clicked by a user
Return one of the following tuples:
- (1, 0): Ranking 'a' won
- (0, 1): Ranking 'b' won
- (0, 0): Tie
'''
raise NotImplementedError()
|
mit
|
Python
|
e94af78bbeae26933d987494e628b18e201f8da2
|
fix logger error message
|
uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server
|
spotseeker_server/management/commands/sync_techloan.py
|
spotseeker_server/management/commands/sync_techloan.py
|
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from schema import Schema
from .techloan.techloan import Techloan
from .techloan.spotseeker import Spots
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync techloan data from the cte"
_settings_scheme = Schema({
'server_host': str,
'oauth_key': str,
'oauth_secret': str,
'oauth_user': str,
})
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **options):
try:
self._settings_scheme.validate(
settings.SPOTSEEKER_TECHLOAN_UPDATER)
except Exception as ex:
logger.error(f"Settings misconfigured: {str(ex)}")
return
techloan = self.get_techloan()
spots = self.get_spots()
self.sync_techloan_to_spots(techloan, spots)
def get_techloan(self):
return Techloan.from_cte_api()
def get_spots(self):
return Spots.from_spotseeker_server(
settings.SPOTSEEKER_TECHLOAN_UPDATER)
def sync_techloan_to_spots(self, techloan, spots):
spots.sync_with_techloan(techloan)
spots.upload_data()
|
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from schema import Schema
from .techloan.techloan import Techloan
from .techloan.spotseeker import Spots
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Sync techloan data from the cte"
_settings_scheme = Schema({
'server_host': str,
'oauth_key': str,
'oauth_secret': str,
'oauth_user': str,
})
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **options):
try:
self._settings_scheme.validate(
settings.SPOTSEEKER_TECHLOAN_UPDATER)
except Exception as ex:
logger.error("Settings misconfigured: ", ex)
return
techloan = self.get_techloan()
spots = self.get_spots()
self.sync_techloan_to_spots(techloan, spots)
def get_techloan(self):
return Techloan.from_cte_api()
def get_spots(self):
return Spots.from_spotseeker_server(
settings.SPOTSEEKER_TECHLOAN_UPDATER)
def sync_techloan_to_spots(self, techloan, spots):
spots.sync_with_techloan(techloan)
spots.upload_data()
|
apache-2.0
|
Python
|
a9bcbe8bf69403dbf7780843fe362cf8e1f02c95
|
update tree topo
|
TakeshiTseng/SDN-Work,TakeshiTseng/SDN-Work,TakeshiTseng/SDN-Work,TakeshiTseng/SDN-Work
|
mininet/tree/tree.py
|
mininet/tree/tree.py
|
#!/usr/bin/env python
from mininet.cli import CLI
from mininet.node import Link
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.term import makeTerm
from functools import partial
def ofp_version(switch, protocols):
protocols_str = ','.join(protocols)
command = 'ovs-vsctl set Bridge %s protocols=%s' % (switch, protocols)
switch.cmd(command.split(' '))
if '__main__' == __name__:
net = Mininet(controller=partial( RemoteController, ip='10.42.0.27', port=6633 ))
c0 = net.addController('c0')
s1 = net.addSwitch('s1')
s2 = net.addSwitch('s2')
s3 = net.addSwitch('s3')
s4 = net.addSwitch('s4')
s5 = net.addSwitch('s5')
s6 = net.addSwitch('s6')
h1 = net.addHost('h1')
h2 = net.addHost('h2')
h3 = net.addHost('h3')
h4 = net.addHost('h4')
net.addLink(s1, h1)
net.addLink(s2, h2)
net.addLink(s5, h3)
net.addLink(s6, h4)
net.addLink(s1, s2)
net.addLink(s2, s3)
net.addLink(s2, s4)
net.addLink(s4, s5)
net.addLink(s4, s6)
net.build()
c0.start()
s1.start([c0])
s2.start([c0])
s3.start([c0])
s4.start([c0])
s5.start([c0])
s6.start([c0])
ofp_version(s1, ['OpenFlow13'])
ofp_version(s2, ['OpenFlow13'])
ofp_version(s3, ['OpenFlow13'])
ofp_version(s4, ['OpenFlow13'])
ofp_version(s5, ['OpenFlow13'])
ofp_version(s6, ['OpenFlow13'])
CLI(net)
net.stop()
|
#!/usr/bin/env python
from mininet.cli import CLI
from mininet.link import Link
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.term import makeTerm
def ofp_version(switch, protocols):
protocols_str = ','.join(protocols)
command = 'ovs-vsctl set Bridge %s protocols=%s' % (switch, protocols)
switch.cmd(command.split(' '))
if '__main__' == __name__:
net = Mininet(controller=RemoteController)
c0 = net.addController('c0')
s1 = net.addSwitch('s1')
s2 = net.addSwitch('s2')
s3 = net.addSwitch('s3')
s4 = net.addSwitch('s4')
s5 = net.addSwitch('s5')
s6 = net.addSwitch('s6')
h1 = net.addHost('h1')
h2 = net.addHost('h2')
h3 = net.addHost('h3')
h4 = net.addHost('h4')
Link(s1, h1)
Link(s2, h2)
Link(s5, h3)
Link(s6, h4)
Link(s1, s2)
Link(s2, s3)
Link(s2, s4)
Link(s4, s5)
Link(s4, s6)
net.build()
c0.start()
s1.start([c0])
s2.start([c0])
s3.start([c0])
s4.start([c0])
s5.start([c0])
s6.start([c0])
ofp_version(s1, ['OpenFlow13'])
ofp_version(s2, ['OpenFlow13'])
ofp_version(s3, ['OpenFlow13'])
ofp_version(s4, ['OpenFlow13'])
ofp_version(s5, ['OpenFlow13'])
ofp_version(s6, ['OpenFlow13'])
CLI(net)
net.stop()
|
mit
|
Python
|
e2e57a89b63943857eb2954d0c5bdcf8e2191ff4
|
simplify logic for player count requirement
|
SupaHam/mark2,SupaHam/mark2
|
mk2/plugins/alert.py
|
mk2/plugins/alert.py
|
import os
import random
from mk2.plugins import Plugin
from mk2.events import Hook, StatPlayerCount
class Alert(Plugin):
interval = Plugin.Property(default=200)
command = Plugin.Property(default="say {message}")
path = Plugin.Property(default="alerts.txt")
min_pcount = Plugin.Property(default=0)
messages = []
def setup(self):
self.register(self.count_check, StatPlayerCount)
if self.path and os.path.exists(self.path):
f = open(self.path, 'r')
for l in f:
l = l.strip()
if l:
self.messages.append(l)
f.close()
def count_check(self, event):
self.requirements_met = event.players_current >= self.min_pcount
def server_started(self, event):
if self.messages:
self.repeating_task(self.repeater, self.interval)
def repeater(self, event):
if self.requirements_met:
self.send_format(self.command, message=random.choice(self.messages))
|
import os
import random
from mk2.plugins import Plugin
from mk2.events import Hook, StatPlayerCount
class Alert(Plugin):
interval = Plugin.Property(default=200)
command = Plugin.Property(default="say {message}")
path = Plugin.Property(default="alerts.txt")
min_pcount = Plugin.Property(default=0)
messages = []
def setup(self):
self.register(self.count_check, StatPlayerCount)
if self.path and os.path.exists(self.path):
f = open(self.path, 'r')
for l in f:
l = l.strip()
if l:
self.messages.append(l)
f.close()
def count_check(self, event):
if event.players_current >= self.min_pcount:
self.requirements_met = True
else:
self.requirements_met = False
def server_started(self, event):
if self.messages:
self.repeating_task(self.repeater, self.interval)
def repeater(self, event):
if self.requirements_met:
self.send_format(self.command, message=random.choice(self.messages))
|
mit
|
Python
|
1519d9dc2f483671aee0f92252dd839a4d7af9c3
|
Add About page TemplateView
|
xanv/painindex
|
painindex_app/urls.py
|
painindex_app/urls.py
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, FormView
from painindex_app import views
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
url(r'^about/$', TemplateView.as_view(template_name='painindex_app/about.html'), name='about'),
url(r'^painsource/(?P<painsource_id>\d+)/$', views.painsource_detail, name='painsource_detail'),
# url(r'^painreport/new/$', views.painreport_form, name='painreport'),
url(r'^painreport/new/$', views.PainReportView.as_view(), name='painreport'),
)
|
from django.conf.urls import patterns, url
from django.views.generic import TemplateView, FormView
from painindex_app import views
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
url(r'^painsource/(?P<painsource_id>\d+)/$', views.painsource_detail, name='painsource_detail'),
# url(r'^painreport/new/$', views.painreport_form, name='painreport'),
url(r'^painreport/new/$', views.PainReportView.as_view(), name='painreport'),
)
|
mit
|
Python
|
40705a39292d0080126933b2318d20ef1a4499a2
|
Remove obsolete input.
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
lobster/cmssw/data/job.py
|
lobster/cmssw/data/job.py
|
#!/usr/bin/env python
import json
import os
import pickle
import shutil
import subprocess
import sys
fragment = """import FWCore.ParameterSet.Config as cms
process.source.fileNames = cms.untracked.vstring({input_files})
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange({lumis})"""
def edit_process_source(cmssw_config_file, files, lumis):
with open(cmssw_config_file, 'a') as config:
frag = fragment.format(input_files=repr([str(f) for f in files]), lumis=[str(l) for l in lumis])
print "--- config file fragment:"
print frag
print "---"
config.write(frag)
(config, data) = sys.argv[1:]
with open(data, 'rb') as f:
(args, files, lumis) = pickle.load(f)
configfile = config.replace(".py", "_mod.py")
shutil.copy2(config, configfile)
env = os.environ
env['X509_USER_PROXY'] = 'proxy'
edit_process_source(configfile, files, lumis)
# exit_code = subprocess.call('python "{0}" {1} > cmssw.log 2>&1'.format(configfile, ' '.join(map(repr, args))), shell=True, env=env)
exit_code = subprocess.call('cmsRun -j report.xml "{0}" {1} > cmssw.log 2>&1'.format(configfile, ' '.join(map(repr, args))), shell=True, env=env)
sys.exit(exit_code)
|
#!/usr/bin/env python
import base64
import json
import os
import pickle
import shutil
import subprocess
import sys
fragment = """import FWCore.ParameterSet.Config as cms
process.source.fileNames = cms.untracked.vstring({input_files})
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))
process.source.lumisToProcess = cms.untracked.VLuminosityBlockRange({lumis})"""
def edit_process_source(cmssw_config_file, files, lumis):
with open(cmssw_config_file, 'a') as config:
frag = fragment.format(input_files=repr([str(f) for f in files]), lumis=[str(l) for l in lumis])
print "--- config file fragment:"
print frag
print "---"
config.write(frag)
(config, data) = sys.argv[1:]
with open(data, 'rb') as f:
(args, files, lumis) = pickle.load(f)
configfile = config.replace(".py", "_mod.py")
shutil.copy2(config, configfile)
env = os.environ
env['X509_USER_PROXY'] = 'proxy'
edit_process_source(configfile, files, lumis)
# exit_code = subprocess.call('python "{0}" {1} > cmssw.log 2>&1'.format(configfile, ' '.join(map(repr, args))), shell=True, env=env)
exit_code = subprocess.call('cmsRun -j report.xml "{0}" {1} > cmssw.log 2>&1'.format(configfile, ' '.join(map(repr, args))), shell=True, env=env)
sys.exit(exit_code)
|
mit
|
Python
|
85769162560d83a58ccc92f818559ddd3dce2a09
|
Fix another bug in the authentication
|
layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious
|
pages/index.py
|
pages/index.py
|
import web
from modules.base import renderer
from modules.login import loginInstance
from modules.courses import Course
#Index page
class IndexPage:
#Simply display the page
def GET(self):
if loginInstance.isLoggedIn():
userInput = web.input();
if "logoff" in userInput:
loginInstance.disconnect();
return renderer.index(False)
else:
return renderer.main(Course.GetAllCoursesIds())
else:
return renderer.index(False)
#Try to log in
def POST(self):
userInput = web.input();
if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password):
return renderer.main(Course.GetAllCoursesIds())
else:
return renderer.index(True)
|
import web
from modules.base import renderer
from modules.login import loginInstance
from modules.courses import Course
#Index page
class IndexPage:
#Simply display the page
def GET(self):
if loginInstance.isLoggedIn():
userInput = web.input();
if "logoff" in userInput:
loginInstance.disconnect();
return renderer.index(False)
else:
courses = Course.GetAllCoursesIds()
return renderer.main(courses)
else:
return renderer.index(False)
#Try to log in
def POST(self):
userInput = web.input();
if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password):
return renderer.main()
else:
return renderer.index(True)
|
agpl-3.0
|
Python
|
1363c12251cb6aaad37f2b3be6890f70e7f80a66
|
Fix invalid syntax
|
caioariede/django-location-field,caioariede/django-location-field,Mixser/django-location-field,undernewmanagement/django-location-field,janusnic/django-location-field,caioariede/django-location-field,Mixser/django-location-field,undernewmanagement/django-location-field,recklessromeo/django-location-field,recklessromeo/django-location-field,janusnic/django-location-field,Mixser/django-location-field,voodmania/django-location-field,undernewmanagement/django-location-field,voodmania/django-location-field,recklessromeo/django-location-field,voodmania/django-location-field,janusnic/django-location-field
|
location_field/widgets.py
|
location_field/widgets.py
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
GOOGLE_MAPS_V3_APIKEY = getattr(settings, 'GOOGLE_MAPS_V3_APIKEY', None)
GOOGLE_API_JS = '//maps.google.com/maps/api/js?sensor=false'
if GOOGLE_MAPS_V3_APIKEY:
GOOGLE_API_JS = '{0}&key={0}'.format(GOOGLE_API_JS, GOOGLE_MAPS_V3_APIKEY)
class LocationWidget(widgets.TextInput):
def __init__(self, attrs=None, based_fields=None, zoom=None, suffix='', **kwargs):
self.based_fields = based_fields
self.zoom = zoom
self.suffix = suffix
super(LocationWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if value is not None:
if isinstance(value, basestring):
lat, lng = value.split(',')
else:
lng = value.x
lat = value.y
value = '%s,%s' % (
float(lat),
float(lng),
)
else:
value = ''
if '-' not in name:
prefix = ''
else:
prefix = name[:name.rindex('-') + 1]
based_fields = ','.join(
map(lambda f: '#id_' + prefix + f.name, self.based_fields))
attrs = attrs or {}
attrs['data-location-widget'] = name
attrs['data-based-fields'] = based_fields
attrs['data-zoom'] = self.zoom
attrs['data-suffix'] = self.suffix
attrs['data-map'] = '#map_' + name
text_input = super(LocationWidget, self).render(name, value, attrs)
map_div = u'''
<div style="margin:4px 0 0 0">
<label></label>
<div id="map_%(name)s" style="width: 500px; height: 250px"></div>
</div>
'''
return mark_safe(text_input + map_div % {'name': name})
class Media:
# Use schemaless URL so it works with both, http and https websites
js = (
GOOGLE_API_JS,
settings.STATIC_URL + 'location_field/js/form.js',
)
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
GOOGLE_MAPS_V3_APIKEY = getattr(settings, 'GOOGLE_MAPS_V3_APIKEY', None)
GOOGLE_API_JS = '//maps.google.com/maps/api/js?sensor=false'
if GOOGLE_MAPS_V3_APIKEY:
GOOGLE_API_JS = '{0}&key={0}'.format(GOOGLE_API_JS, GOOGLE_MAPS_V3_APIKEY))
class LocationWidget(widgets.TextInput):
def __init__(self, attrs=None, based_fields=None, zoom=None, suffix='', **kwargs):
self.based_fields = based_fields
self.zoom = zoom
self.suffix = suffix
super(LocationWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if value is not None:
if isinstance(value, basestring):
lat, lng = value.split(',')
else:
lng = value.x
lat = value.y
value = '%s,%s' % (
float(lat),
float(lng),
)
else:
value = ''
if '-' not in name:
prefix = ''
else:
prefix = name[:name.rindex('-') + 1]
based_fields = ','.join(
map(lambda f: '#id_' + prefix + f.name, self.based_fields))
attrs = attrs or {}
attrs['data-location-widget'] = name
attrs['data-based-fields'] = based_fields
attrs['data-zoom'] = self.zoom
attrs['data-suffix'] = self.suffix
attrs['data-map'] = '#map_' + name
text_input = super(LocationWidget, self).render(name, value, attrs)
map_div = u'''
<div style="margin:4px 0 0 0">
<label></label>
<div id="map_%(name)s" style="width: 500px; height: 250px"></div>
</div>
'''
return mark_safe(text_input + map_div % {'name': name})
class Media:
# Use schemaless URL so it works with both, http and https websites
js = (
GOOGLE_API_JS,
settings.STATIC_URL + 'location_field/js/form.js',
)
|
mit
|
Python
|
30c8e4d7a1e6e237772aa89256b83ec37a015803
|
increment version
|
MediaMath/t1-python
|
terminalone/metadata.py
|
terminalone/metadata.py
|
# -*- coding: utf-8 -*-
__name__ = 'TerminalOne'
__author__ = 'MediaMath'
__copyright__ = 'Copyright 2015, MediaMath'
__license__ = 'Apache License, Version 2.0'
__version__ = '1.9.9'
__maintainer__ = 'MediaMath Developer Relations'
__email__ = '[email protected]'
__status__ = 'Stable'
__url__ = 'http://www.mediamath.com'
__description__ = "A package for interacting with MediaMath's TerminalOne API."
|
# -*- coding: utf-8 -*-
__name__ = 'TerminalOne'
__author__ = 'MediaMath'
__copyright__ = 'Copyright 2015, MediaMath'
__license__ = 'Apache License, Version 2.0'
__version__ = '1.9.8'
__maintainer__ = 'MediaMath Developer Relations'
__email__ = '[email protected]'
__status__ = 'Stable'
__url__ = 'http://www.mediamath.com'
__description__ = "A package for interacting with MediaMath's TerminalOne API."
|
apache-2.0
|
Python
|
1fca3a48b0617b19554ab55c54db322090a69c3d
|
Add with statement tests
|
tyrannosaurus/python-libmagic
|
magic/tests/test_magic.py
|
magic/tests/test_magic.py
|
import unittest
import magic
import magic.flags
class MagicTestCase(unittest.TestCase):
def setUp(self):
self.magic = magic.Magic()
def test_get_version(self):
self.assertTrue(isinstance(self.magic.version, int))
def test_from_buffer(self):
mimetype = self.magic.from_buffer("ehlo")
self.assertEqual(mimetype, "ASCII text, with no line terminators")
def test_from_file(self):
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "ASCII text")
def test_with(self):
with magic.Magic(mimetype=True) as m:
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "text/plain")
def test_set_flags(self):
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "ASCII text")
self.magic.set_flags(magic.flags.MAGIC_MIME_TYPE)
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "text/plain")
|
import unittest
import magic
import magic.flags
class MagicTestCase(unittest.TestCase):
def setUp(self):
self.magic = magic.Magic()
def test_get_version(self):
self.assertTrue(isinstance(self.magic.version, int))
def test_from_buffer(self):
mimetype = self.magic.from_buffer("ehlo")
self.assertEqual(mimetype, "ASCII text, with no line terminators")
def test_from_file(self):
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "ASCII text")
def test_set_flags(self):
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "ASCII text")
self.magic.set_flags(magic.flags.MAGIC_MIME_TYPE)
mimetype = self.magic.from_file("/etc/passwd")
self.assertEqual(mimetype, "text/plain")
|
mit
|
Python
|
436aa56758403b96aa4c0038db6d2a24047cfa16
|
fix bug
|
youtaya/mothertree,youtaya/mothertree,youtaya/mothertree,youtaya/mothertree
|
monthertree/monthertree/wsgi.py
|
monthertree/monthertree/wsgi.py
|
"""
WSGI config for monthertree project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
import sys
from django.conf import settings
sys.path.append(settings.PROJECT_DIR)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monthertree.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for monthertree project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
import sys
sys.path.append('/home/jinxp/Documents/shell/mothertree/monthertree/')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monthertree.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
mit
|
Python
|
6d8dbb6621da2ddfffd58303131eb6cda345e37c
|
Make person experience the default tab for ZA
|
hzj123/56th,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,geoffkilpin/pombola,ken-muturi/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola
|
pombola/south_africa/urls.py
|
pombola/south_africa/urls.py
|
from django.conf.urls import patterns, include, url
from pombola.core.views import PersonDetailSub
from pombola.south_africa.views import LatLonDetailView,SAPlaceDetailSub
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
url(r'^person/(?P<slug>[-\w]+)/$', PersonDetailSub.as_view(), { 'sub_page': 'experience' }, name='person'),
)
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView,SAPlaceDetailSub
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
agpl-3.0
|
Python
|
76289f734f622227c44487d8f44879e078dbdcb3
|
Improve gzweb launcher
|
cyrillg/ros-playground,cyrillg/ros-playground
|
src/deedee_tutorials/src/deedee_tutorials/launcher.py
|
src/deedee_tutorials/src/deedee_tutorials/launcher.py
|
#! /usr/bin/env python
import rospy
import subprocess
class MainLauncher:
''' Node spawning the environment with respect to the global configs
'''
def __init__(self):
rospy.init_node("middleware_spawner")
rospy.sleep(0.5)
# Configs
self.configs = {"robot_name": "deedee",
"sim_plant": "true",
"autonomous": "true"}
self.cmd = ""
self.retrieve_config()
self.build_cmd()
self.spawn()
def retrieve_config(self):
for setting in self.configs.keys():
self.configs[setting] = rospy.get_param("/{}".format(setting))
def build_cmd(self):
self.cmd = "roslaunch deedee_tutorials follow_waypoints.launch"
for setting in self.configs.keys():
self.cmd += " {}:={}".format(setting, self.configs[setting])
def spawn(self):
subprocess.call(self.cmd, shell=True)
class GzwebManager:
''' Node spawning the environment with respect to the global configs
'''
def __init__(self):
rospy.init_node("gzweb_manager")
rospy.sleep(0.5)
# Configs
self.configs = {"gzweb_enable": "true",
"gzweb_path": ""}
self.retrieve_config()
if self.configs["gzweb_enable"]:
self.cmd = "{}/start_gzweb.sh".format(self.configs["gzweb_path"])
subprocess.call("{}/start_gzweb.sh".format(self.configs["gzweb_path"]),
shell=True)
rospy.on_shutdown(self.shutdown_hook)
rospy.spin()
def retrieve_config(self):
gzweb_params = rospy.get_param("gzweb")
for setting in self.configs.keys():
self.configs[setting] = gzweb_params[setting]
def shutdown_hook(self):
print "Stopping webserver!"
subprocess.call("{}/stop_gzweb.sh".format(self.configs["gzweb_path"]),
shell=True)
|
#! /usr/bin/env python
import rospy
import subprocess
class MainLauncher:
''' Node spawning the environment with respect to the global configs
'''
def __init__(self):
rospy.init_node("middleware_spawner")
rospy.sleep(0.5)
# Configs
self.configs = {"robot_name": "deedee",
"sim_plant": "true",
"autonomous": "true"}
self.cmd = ""
self.retrieve_config()
self.build_cmd()
self.spawn()
def retrieve_config(self):
for setting in self.configs.keys():
self.configs[setting] = rospy.get_param("/{}".format(setting))
def build_cmd(self):
self.cmd = "roslaunch deedee_tutorials follow_waypoints.launch"
for setting in self.configs.keys():
self.cmd += " {}:={}".format(setting, self.configs[setting])
def spawn(self):
subprocess.call(self.cmd, shell=True)
class GzwebManager:
''' Node spawning the environment with respect to the global configs
'''
def __init__(self):
rospy.init_node("gzweb_manager")
rospy.sleep(0.5)
# Configs
self.configs = {"gzweb_enable": "true",
"gzweb_path": ""}
self.retrieve_config()
self.cmd = "{}/start_gzweb.sh".format(self.configs["gzweb_path"])
if self.configs["gzweb_enable"]:
subprocess.call("{}/start_gzweb.sh".format(self.configs["gzweb_path"]),
shell=True)
rospy.on_shutdown(self.shutdown_hook)
rospy.spin()
def retrieve_config(self):
gzweb_params = rospy.get_param("gzweb")
for setting in self.configs.keys():
self.configs[setting] = gzweb_params[setting]
def shutdown_hook(self):
print "Stopping webserver!"
subprocess.call("{}/stop_gzweb.sh".format(self.configs["gzweb_path"]),
shell=True)
|
mit
|
Python
|
61c6f174b1e406955c3e881217ff863d6ff6c3ce
|
Fix validate/sanitize functions for click
|
thombashi/pathvalidate
|
pathvalidate/click.py
|
pathvalidate/click.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
import click
from ._common import PathType
from ._file import sanitize_filename, sanitize_filepath, validate_filename, validate_filepath
from .error import ValidationError
def validate_filename_arg(ctx, param, value) -> str:
if not value:
return ""
try:
validate_filename(value)
except ValidationError as e:
raise click.BadParameter(str(e))
return value
def validate_filepath_arg(ctx, param, value) -> str:
if not value:
return ""
try:
validate_filepath(value)
except ValidationError as e:
raise click.BadParameter(str(e))
return value
def sanitize_filename_arg(ctx, param, value) -> PathType:
if not value:
return ""
return sanitize_filename(value)
def sanitize_filepath_arg(ctx, param, value) -> PathType:
if not value:
return ""
return sanitize_filepath(value)
def filename(ctx, param, value):
# Deprecated
if not value:
return None
try:
validate_filename(value)
except ValidationError as e:
raise click.BadParameter(e)
return sanitize_filename(value)
def filepath(ctx, param, value):
# Deprecated
if not value:
return None
try:
validate_filepath(value)
except ValidationError as e:
raise click.BadParameter(e)
return sanitize_filepath(value)
|
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
import click
from ._common import PathType
from ._file import sanitize_filename, sanitize_filepath, validate_filename, validate_filepath
from .error import ValidationError
def validate_filename_arg(ctx, param, value) -> None:
if not value:
return
try:
validate_filename(value)
except ValidationError as e:
raise click.BadParameter(str(e))
def validate_filepath_arg(ctx, param, value) -> None:
if not value:
return
try:
validate_filepath(value)
except ValidationError as e:
raise click.BadParameter(str(e))
def sanitize_filename_arg(ctx, param, value) -> PathType:
if not value:
return ""
return sanitize_filename(value)
def sanitize_filepath_arg(ctx, param, value) -> PathType:
if not value:
return ""
return sanitize_filepath(value)
def filename(ctx, param, value):
# Deprecated
if not value:
return None
try:
validate_filename(value)
except ValidationError as e:
raise click.BadParameter(e)
return sanitize_filename(value)
def filepath(ctx, param, value):
# Deprecated
if not value:
return None
try:
validate_filepath(value)
except ValidationError as e:
raise click.BadParameter(e)
return sanitize_filepath(value)
|
mit
|
Python
|
b65283984b1be7e8bb88d3281bb3654a3dd12233
|
Make sure test setup is run for subdirectories
|
n0ano/ganttclient
|
nova/tests/scheduler/__init__.py
|
nova/tests/scheduler/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
apache-2.0
|
Python
|
|
aeb69479a6bf5492411e82bbcb77331daa8da819
|
add a test to test the monitor
|
lapisdecor/bzoing
|
tests/test_bzoing.py
|
tests/test_bzoing.py
|
"""
test_bzoing
----------------------------------
Tests for `bzoing` module.
"""
import unittest
from bzoing.tasks import Bzoinq, Monitor
import time
class TestTasksAndMonitor(unittest.TestCase):
def test_creating_task(self):
a = Bzoinq()
a.create_task()
self.assertTrue(len(a.task_list) == 1)
def test_delete_task(self):
a = Bzoinq()
a.create_task()
the_id = a.task_list[0].id
a.remove_task(the_id)
self.assertTrue(len(a.task_list) == 0)
def test_monitor(self):
import datetime
a = Bzoinq()
b = Monitor(a)
b.start()
first_time = datetime.datetime.now() + datetime.timedelta(seconds=10)
a.create_task("My test task", alarm=first_time)
# sleep a bit to see if alarm works
time.sleep(15)
# check the if task was removed from task list
self.assertTrue(len(a.task_list) == 0)
# kill Monitor
b.stop()
if __name__ == '__main__':
unittest.main()
|
"""
test_bzoing
----------------------------------
Tests for `bzoing` module.
"""
import unittest
from bzoing.tasks import Bzoinq, Monitor
import time
class TestTasksAndMonitor(unittest.TestCase):
def test_creating_task(self):
a = Bzoinq()
a.create_task()
self.assertTrue(len(a.task_list) == 1)
def test_delete_task(self):
a = Bzoinq()
a.create_task()
the_id = a.task_list[0].id
a.remove_task(the_id)
self.assertTrue(len(a.task_list) == 0)
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
148826f75072576d7f0d0f206e3d1dba34688720
|
Refactor getLongestWord to simplify maximum collection and reduce number of conditionals
|
alkaitz/general-programming
|
stream_processor/stream_processor.py
|
stream_processor/stream_processor.py
|
'''
Created on Aug 7, 2017
@author: alkaitz
'''
import heapq
'''
You have a function that will be called with a stream of strings.
Every time you receive a new word, you should return the length of the longest
word that you have received that has showed in the string only once. Ex:
f("Yes") -> 3
f("No") -> 3
f("Yes") -> 2
'''
working_set = []
heapq.heapify(working_set)
repeated = set()
def process(str):
includeWord(str)
return getLongestWord()
'''
Structure will be sorted by negative numbers to transform it from a min heap to a max heap.
Storing the tuple, to provide right sorting.
None returned if data set is empty (all received words have appeared more than once)
'''
def includeWord(str):
if str not in repeated:
lenPlusStringTuple = (-len(str),str)
if lenPlusStringTuple not in working_set:
heapq.heappush(working_set, lenPlusStringTuple)
else:
working_set.remove(lenPlusStringTuple)
repeated.add(str)
def getLongestWord():
if len(working_set) > 0:
(length, _) = (working_set[0])
return -length
return None
if __name__ == '__main__':
assert(process("Hello") == 5)
assert(process("Hello") == None)
assert(process("Hello") == None)
assert(process("Hallo") == 5)
assert(process("Bye") == 5)
assert(process("By") == 5)
assert(process("B") == 5)
assert(process("Hallo") == 3)
assert(process("By") == 3)
assert(process("Bye") == 1)
print "Successful"
|
'''
Created on Aug 7, 2017
@author: alkaitz
'''
import heapq
'''
You have a function that will be called with a stream of strings.
Every time you receive a new word, you should return the length of the longest
word that you have received that has showed in the string only once. Ex:
f("Yes") -> 3
f("No") -> 3
f("Yes") -> 2
'''
working_set = []
heapq.heapify(working_set)
repeated = set()
def process(str):
includeWord(str)
return getLongestWord(str)
'''
Structure will be sorted by negative numbers to transform it from a min heap to a max heap.
Storing the tuple, to provide right sorting.
None returned if data set is empty (all received words have appeared more than once)
'''
def includeWord(str):
if str not in repeated:
lenPlusStringTuple = (-len(str),str)
if lenPlusStringTuple not in working_set:
heapq.heappush(working_set, lenPlusStringTuple)
else:
working_set.remove(lenPlusStringTuple)
repeated.add(str)
def getLongestWord(str):
(length, _) = (working_set[0]) if working_set else (None, None)
return -length if length else None
if __name__ == '__main__':
assert(process("Hello") == 5)
assert(process("Hello") == None)
assert(process("Hello") == None)
assert(process("Hallo") == 5)
assert(process("Bye") == 5)
assert(process("By") == 5)
assert(process("B") == 5)
assert(process("Hallo") == 3)
assert(process("By") == 3)
assert(process("Bye") == 1)
print "Successful"
|
mit
|
Python
|
6d267faaf9d18e58b24cf93906961b152ef0fcb7
|
build vehicle list based on if make is provided
|
sitture/trade-motors,sitture/trade-motors,sitture/trade-motors,sitture/trade-motors,sitture/trade-motors
|
src/vehicles/views.py
|
src/vehicles/views.py
|
from django.shortcuts import render, render_to_response, RequestContext
# import the custom context processor
from vehicles.context_processor import global_context_processor
from vehicles.models import Vehicle, VehicleMake, Category
def home_page(request):
return render_to_response("home_page.html", locals(),
context_instance=RequestContext(request, processors=[global_context_processor]))
def category_page(request, slug):
# check if make slug parameter is passed into the url
vehicle_make_slug = request.GET.get('make', None)
# get category by slug
category = Category.objects.get_category_by_slug(slug)
# get all the vehicles by the category and make (if provided)
vehicles_list = None
if vehicle_make_slug is not None:
# get make by slug
make = VehicleMake.objects.get_make_by_slug(vehicle_make_slug)
vehicles_list = Vehicle.objects.get_vehicles_by_category_and_make(
category, make)
else:
vehicles_list = Vehicle.objects.get_vehicles_by_category(category)
return render_to_response("home_page.html", locals(),
context_instance=RequestContext(request, processors=[global_context_processor]))
def get_makes_in_category(category):
makes_in_category = []
# get all the vehicle objects by category
vehicles_in_category = Vehicle.objects.get_vehicles_by_category(category=category)
for vehicle in vehicles_in_category:
makes_in_category.append(vehicle.make)
# remove duplicate makes from the list
makes_in_category = list(set(makes_in_category))
makes_in_category = sorted(makes_in_category, key=lambda x:x.v_make)
return makes_in_category
|
from django.shortcuts import render, render_to_response, RequestContext
# import the custom context processor
from vehicles.context_processor import global_context_processor
from vehicles.models import Vehicle, Category
def home_page(request):
return render_to_response("home_page.html", locals(),
context_instance=RequestContext(request, processors=[global_context_processor]))
def category_page(request, slug):
# check if make parameter is passed into the url
vehicle_make = request.GET.get('make', None)
# get category by slug
category = Category.objects.get_category_by_slug(slug)
# get all the vehicles by the category and make (if provided)
vehicles_list = None
if vehicle_make is not None:
vehicles_list = Vehicle.objects.get_vehicles_by_category_and_make(
category, vehicle_make)
else:
vehicles_list = Vehicle.objects.get_vehicles_by_category(category)
return render_to_response("home_page.html", locals(),
context_instance=RequestContext(request, processors=[global_context_processor]))
def get_makes_in_category(category):
makes_in_category = []
# get all the vehicle objects by category
vehicles_in_category = Vehicle.objects.get_vehicles_by_category(category=category)
for vehicle in vehicles_in_category:
makes_in_category.append(vehicle.make)
# remove duplicate makes from the list
makes_in_category = list(set(makes_in_category))
makes_in_category = sorted(makes_in_category, key=lambda x:x.v_make)
return makes_in_category
|
mit
|
Python
|
39ab86b500cc28420aa0062395adc9e6ddf2017c
|
allow reading fom multiple configuration files
|
OpenTouch/vsphere-client
|
src/vsphere/config.py
|
src/vsphere/config.py
|
import sys
from os import path
from ConfigParser import ConfigParser
VSPHERE_CFG_FILE = "vsphere.conf"
unix_platforms = [
"darwin",
"Linux"
]
class EsxConfig:
def __init__(self):
ok = False
# specific configuration
local_cfg = VSPHERE_CFG_FILE
# user-global configuration
user_cfg = ""
if sys.platform in unix_platforms:
user_cfg = path.join(path.expanduser("~"), '.{0}'.format(VSPHERE_CFG_FILE))
# system-wide configuration
system_cfg = ""
if sys.platform in unix_platforms:
system_cfg = path.join(path.expanduser("/etc/vsphere"), VSPHERE_CFG_FILE)
files = [ local_cfg, user_cfg, system_cfg ]
for f in files:
if path.exists(f):
parser = ConfigParser()
parser.read(f)
ok = True
break
if ok:
self.vs_host = parser.get('server', 'host')
self.vs_user = parser.get('server', 'user')
self.vs_password = parser.get('server', 'password')
self.vs_dc = parser.get('server', 'dc')
|
from ConfigParser import ConfigParser
class EsxConfig:
def __init__(self):
parser = ConfigParser()
parser.read("vsphere.conf")
self.vs_host = parser.get('server', 'host')
self.vs_user = parser.get('server', 'user')
self.vs_password = parser.get('server', 'password')
self.vs_dc = parser.get('server', 'dc')
|
apache-2.0
|
Python
|
170a50eeca4249a488cc9d0c69876c5f2708b743
|
use two-tail for testing significance and right_tail for redundancy checking
|
anzev/hedwig,anzev/hedwig
|
stats/significance.py
|
stats/significance.py
|
'''
Significance testing methods.
@author: [email protected]
'''
from fisher import pvalue
def is_redundant(rule, new_rule):
'''
Computes the redundancy coefficient of a new rule compared to its
immediate generalization.
Rules with a coeff > 1 are deemed non-redundant.
'''
return _fisher(new_rule).right_tail > _fisher(rule).right_tail
def fisher(rule):
'''
Fisher's p-value for one rule.
'''
return _fisher(rule).two_tail
def _fisher(rule):
'''
Fisher's p-value for one rule.
'''
N = float(len(rule.kb.examples))
nX = float(rule.coverage)
nY = rule.kb.distribution[rule.target]
nXY = rule.distribution[rule.target]
nXnotY = nX - nXY
nnotXY = nY - nXY
nnotXnotY = N - nXnotY - nnotXY
return pvalue(nXY, nXnotY, nnotXY, nnotXnotY)
def apply_fisher(ruleset):
'''
Fisher's exact test to test rule significance.
'''
for rule in ruleset:
rule.pval = fisher(rule)
|
'''
Significance testing methods.
@author: [email protected]
'''
from fisher import pvalue
def is_redundant(rule, new_rule):
'''
Computes the redundancy coefficient of a new rule compared to its
immediate generalization.
Rules with a coeff > 1 are deemed non-redundant.
'''
return fisher(new_rule) > fisher(rule)
def fisher(rule):
'''
Fisher's p-value for one rule.
'''
N = float(len(rule.kb.examples))
nX = float(rule.coverage)
nY = rule.kb.distribution[rule.target]
nXY = rule.distribution[rule.target]
nXnotY = nX - nXY
nnotXY = nY - nXY
nnotXnotY = N - nXnotY - nnotXY
return pvalue(nXY, nXnotY, nnotXY, nnotXnotY).right_tail
def apply_fisher(ruleset):
'''
Fisher's exact test to test rule significance.
'''
for rule in ruleset:
rule.pval = fisher(rule)
|
mit
|
Python
|
8907993e48a59ce39dab1cdb359e287f527b7642
|
Add --verbose parameter
|
stb-tester/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester
|
stbt_control_relay.py
|
stbt_control_relay.py
|
#!/usr/bin/python
"""
Allows using any of the stbt remote control backends remotely using the lirc
protocol.
Presents the same socket protocol as lircd but sending keypresses using any of
stbt's controls. This allows for example controlling a roku over its HTTP
interface from some software that only speaks lirc.
Example usage:
$ stbt control-relay file:example
Listens on `/var/run/lirc/lircd` for lirc clients. Keypress sent will be
written to the file example. So
$ irsend SEND_ONCE stbt KEY_UP
Will write the text "KEY_UP" to the file `example`.
$ stbt control-relay --input=lircd:lircd.sock \\
roku:192.168.1.13 samsung:192.168.1.14
Listens on lircd.sock and will forward keypresses to the roku at 192.168.1.13
using its HTTP protocol and to the Samsung TV at 192.168.1.14 using its TCP
protocol. So
$ irsend -d lircd.sock SEND_ONCE stbt KEY_OK
Will press KEY_OK on both the Samsung and the roku devices simultaneously.
"""
import argparse
import signal
import sys
from _stbt.control import MultiRemote, uri_to_remote, uri_to_remote_recorder
from _stbt.logging import argparser_add_verbose_argument, debug
def main(argv):
parser = argparse.ArgumentParser(
epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"--input", default="lircd", help="""The source of remote control
presses. Values are the same as stbt record's --control-recorder.""")
parser.add_argument("output", nargs="+", help="""One or more remote control
configurations. Values are the same as stbt run's --control.""")
argparser_add_verbose_argument(parser)
args = parser.parse_args(argv[1:])
signal.signal(signal.SIGTERM, lambda _signo, _stack_frame: sys.exit(0))
r = MultiRemote(uri_to_remote(x) for x in args.output)
listener = uri_to_remote_recorder(args.input)
for key in listener:
debug("Received %s" % key)
try:
r.press(key)
except Exception as e: # pylint: disable=broad-except
sys.stderr.write("Error pressing key %r: %s\n" % (key, e))
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
#!/usr/bin/python
"""
Allows using any of the stbt remote control backends remotely using the lirc
protocol.
Presents the same socket protocol as lircd but sending keypresses using any of
stbt's controls. This allows for example controlling a roku over its HTTP
interface from some software that only speaks lirc.
Example usage:
$ stbt control-relay file:example
Listens on `/var/run/lirc/lircd` for lirc clients. Keypress sent will be
written to the file example. So
$ irsend SEND_ONCE stbt KEY_UP
Will write the text "KEY_UP" to the file `example`.
$ stbt control-relay --input=lircd:lircd.sock \\
roku:192.168.1.13 samsung:192.168.1.14
Listens on lircd.sock and will forward keypresses to the roku at 192.168.1.13
using its HTTP protocol and to the Samsung TV at 192.168.1.14 using its TCP
protocol. So
$ irsend -d lircd.sock SEND_ONCE stbt KEY_OK
Will press KEY_OK on both the Samsung and the roku devices simultaneously.
"""
import argparse
import signal
import sys
from _stbt.control import MultiRemote, uri_to_remote, uri_to_remote_recorder
def main(argv):
parser = argparse.ArgumentParser(
epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"--input", default="lircd", help="""The source of remote control
presses. Values are the same as stbt record's --control-recorder.""")
parser.add_argument("output", nargs="+", help="""One or more remote control
configurations. Values are the same as stbt run's --control.""")
args = parser.parse_args(argv[1:])
signal.signal(signal.SIGTERM, lambda _signo, _stack_frame: sys.exit(0))
r = MultiRemote(uri_to_remote(x) for x in args.output)
listener = uri_to_remote_recorder(args.input)
for key in listener:
sys.stderr.write("Received %s\n" % key)
try:
r.press(key)
except Exception as e: # pylint: disable=broad-except
sys.stderr.write("Error pressing key %r: %s\n" % (key, e))
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
lgpl-2.1
|
Python
|
84ee720fd2d8403de5f49c54fc41bfcb67a78f78
|
Add missing vat alias for Turkey
|
arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum
|
stdnum/tr/__init__.py
|
stdnum/tr/__init__.py
|
# __init__.py - collection of Turkish numbers
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Turkish numbers."""
from stdnum.tr import vkn as vat # noqa: F401
|
# __init__.py - collection of Turkish numbers
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Turkish numbers."""
|
lgpl-2.1
|
Python
|
f02eb748d33b621368198c10a965b27ee31effca
|
update tutorial section link
|
freme-project/Documentation,freme-project/Documentation,freme-project/Documentation,freme-project/Documentation,freme-project/freme-project.github.io,freme-project/Documentation,freme-project/freme-project.github.io,freme-project/freme-project.github.io,freme-project/freme-project.github.io
|
swagger/yamlscript.py
|
swagger/yamlscript.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# This script, when run, parses the file "swagger.yaml" and strips it down to only
# include those paths and methods specified in the included variable.
#
# As of now, it is called with every "jekyll build" - see jekyll-freme/_plugins/jekyll-pages-directory.rb
# line: "exec(python swagger/yamlscript.py)"
#
# To be able to import yaml, on linux, run "sudo pip install PyYAML"
#
# Author: Jonathan Sauder ([email protected])
#
def main():
import yaml,os,sys
try:
with open(os.path.dirname(__file__)+"/swagger.yaml","r") as f:
full=yaml.safe_load(f.read())
except IOError:
raise Exception("\n\tException Handled in /swagger/yamlscript.py:"+ os.path.dirname(__file__)+"/swagger.yaml could not be found. The generation of a simple API-Doc was skipped")
sys.exit(1)
except yaml.scanner.ScannerError:
raise Exception("\n\tException Handled in /swagger/yamlscript.py: The YAML File at "+ os.path.dirname(__file__)+"/swagger.yaml is invalid! The generation of a simple API-Doc was skipped")
sys.exit(1)
included_paths={
"/e-entity/freme-ner/documents": ["post"],
"/e-entity/dbpedia-spotlight/documents": ["post"],
"/e-publishing/html": ["post"],
"/e-link/documents/": ["post"],
"/e-translation/tilde": ["post"],
"/e-terminology/tilde": ["post"],
"/e-link/explore": ["post"]
}
for path in full["paths"].keys():
if path not in included_paths:
del full["paths"][path]
else:
for method in included_paths[path]:
if method not in full["paths"][path].keys():
del full["paths"][path][method]
# else:
# full["paths"][path][method]['tags']=["Enrichment Endpoints"]
full["tags"]=[x for x in full["tags"] if x["name"]!="General Information"]
full['info']['description']="This section only covers the most important endpoints of FREME: the enrichment endpoints.<br><br> The endpoints can be used to access FREME e-Services via common HTTP requests.<br><br> A full documentation of all e-Service endpoints, including all parameters, is provided <a href=\"full.html\">here</a>. For usage examples, see the <a href=\"../tutorials/overview.html\">tutorial section</a>."
with open(os.path.dirname(__file__)+"/simple.yaml",'w') as f:
f.write(yaml.dump(full))
return 0
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# This script, when run, parses the file "swagger.yaml" and strips it down to only
# include those paths and methods specified in the included variable.
#
# As of now, it is called with every "jekyll build" - see jekyll-freme/_plugins/jekyll-pages-directory.rb
# line: "exec(python swagger/yamlscript.py)"
#
# To be able to import yaml, on linux, run "sudo pip install PyYAML"
#
# Author: Jonathan Sauder ([email protected])
#
def main():
import yaml,os,sys
try:
with open(os.path.dirname(__file__)+"/swagger.yaml","r") as f:
full=yaml.safe_load(f.read())
except IOError:
raise Exception("\n\tException Handled in /swagger/yamlscript.py:"+ os.path.dirname(__file__)+"/swagger.yaml could not be found. The generation of a simple API-Doc was skipped")
sys.exit(1)
except yaml.scanner.ScannerError:
raise Exception("\n\tException Handled in /swagger/yamlscript.py: The YAML File at "+ os.path.dirname(__file__)+"/swagger.yaml is invalid! The generation of a simple API-Doc was skipped")
sys.exit(1)
included_paths={
"/e-entity/freme-ner/documents": ["post"],
"/e-entity/dbpedia-spotlight/documents": ["post"],
"/e-publishing/html": ["post"],
"/e-link/documents/": ["post"],
"/e-translation/tilde": ["post"],
"/e-terminology/tilde": ["post"],
"/e-link/explore": ["post"]
}
for path in full["paths"].keys():
if path not in included_paths:
del full["paths"][path]
else:
for method in included_paths[path]:
if method not in full["paths"][path].keys():
del full["paths"][path][method]
# else:
# full["paths"][path][method]['tags']=["Enrichment Endpoints"]
full["tags"]=[x for x in full["tags"] if x["name"]!="General Information"]
full['info']['description']="This section only covers the most important endpoints of FREME: the enrichment endpoints.<br><br> The endpoints can be used to access FREME e-Services via common HTTP requests.<br><br> A full documentation of all e-Service endpoints, including all parameters, is provided <a href=\"full.html\">here</a>. For usage examples, see the <a href=\"../Tutorials/overview.html\">tutorial section</a>."
with open(os.path.dirname(__file__)+"/simple.yaml",'w') as f:
f.write(yaml.dump(full))
return 0
if __name__ == '__main__':
main()
|
apache-2.0
|
Python
|
86b698a228ddf1309e8f2006726724af05c5fca1
|
bump version
|
pinax/symposion,miurahr/symposion,pydata/symposion,pyconca/2013-web,python-spain/symposion,pyconau2017/symposion,NelleV/pyconfr-test,miurahr/symposion,faulteh/symposion,pyohio/symposion,pyohio/symposion,NelleV/pyconfr-test,mbrochh/symposion,TheOpenBastion/symposion,euroscipy/symposion,toulibre/symposion,mbrochh/symposion,euroscipy/symposion,python-spain/symposion,TheOpenBastion/symposion,pyconau2017/symposion,pydata/symposion,toulibre/symposion,pinax/symposion,pyconca/2013-web,faulteh/symposion
|
symposion/__init__.py
|
symposion/__init__.py
|
__version__ = "1.0b1.dev12"
|
__version__ = "1.0b1.dev11"
|
bsd-3-clause
|
Python
|
18d551d2495fc122edb142e416a06ce4129da1f7
|
Update urls.py
|
BoraDowon/Life3.0,BoraDowon/Life3.0,BoraDowon/Life3.0
|
life3/config/urls.py
|
life3/config/urls.py
|
"""life3.0 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.conf.urls import include
from life3.dashboard import views as dashboard_view
urlpatterns = [
url(r'^$', dashboard_view.api_home),
url(r'^dashboard/api/', include('life3.dashboard.urls')),
url(r'^login/', include('life3.login.urls')),
]
|
"""life3.0 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.conf.urls import include
from life3.dashboard import views as dashboard_view
urlpatterns = [
url(r'^$/', dashboard_view.api_home),
url(r'^dashboard/api/', include('life3.dashboard.urls')),
url(r'^login/', include('life3.login.urls')),
]
|
mit
|
Python
|
ac3a9211725a0538c8c8f7899d86e4e22ceebb71
|
Update binary_search.py
|
ueg1990/aids
|
aids/sorting_and_searching/binary_search.py
|
aids/sorting_and_searching/binary_search.py
|
'''
In this module, we implement binary search in Python both
recrusively and iteratively
Assumption: Array is sorted
Time complexity: O(log n)
'''
def binary_search_recursive(arr, left, right, value):
'''
Recursive implementation of binary search of a sorted array
Return index of the value found else return None
'''
if arr and left <= right:
middle = left + (right - left) / 2
if arr[middle] == value:
return middle
if arr[middle] > value:
return binary_search_recursive(arr, left, middle - 1, value)
return binary_search_recursive(arr, middle + 1, right, value)
return None
def binary_search_iterative(arr, left, right, value):
'''
Iterative implementation of binary search of a sorted array
Return index of the value of found else return None
'''
if arr:
while left <= right:
middle = left + (right - left) / 2
if arr[middle] == value:
return middle
elif arr[middle] > value:
right = middle - 1
else:
left = middle + 1
return None
|
'''
In this module, we implement binary search in Python both
recrusively and iteratively
Assumption: Array is sorted
Time complexity: O(log n)
'''
def binary_search_recursive(arr, left, right, value):
'''
Recursive implementation of binary search of a sorted array
Return index of the value found else return None
'''
if arr and left <= right:
middle = (left + right) / 2
if arr[middle] == value:
return middle
if arr[middle] > value:
return binary_search_recursive(arr, left, middle - 1, value)
return binary_search_recursive(arr, middle + 1, right, value)
return None
def binary_search_iterative(arr, left, right, value):
'''
Iterative implementation of binary search of a sorted array
Return index of the value of found else return None
'''
if arr:
while left <= right:
middle = (left + right) / 2 # left + (right - left) / 2
if arr[middle] == value:
return middle
elif arr[middle] > value:
right = middle - 1
else:
left = middle + 1
return None
|
mit
|
Python
|
1433106d2e36a08f79b4b2c67e07c1fdd361bda6
|
fix MAINTENANCE_MODE logic
|
DemocracyClub/electionleaflets,JustinWingChungHui/electionleaflets,JustinWingChungHui/electionleaflets,JustinWingChungHui/electionleaflets,JustinWingChungHui/electionleaflets,DemocracyClub/electionleaflets,DemocracyClub/electionleaflets
|
electionleaflets/urls.py
|
electionleaflets/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.views.generic import TemplateView
admin.autodiscover()
from leaflets.feeds import *
from core.views import HomeView, MaintenanceView
MAINTENANCE_MODE = getattr(settings, 'MAINTENANCE_MODE', False)
if MAINTENANCE_MODE:
urlpatterns = patterns(
'',
url(r'.*', MaintenanceView.as_view(), name='maintenance_view'),
)
else:
urlpatterns = patterns(
'',
url(r'^$', HomeView.as_view(), name='home'),
url(r'^leaflets', include('leaflets.urls')),
url(r'^parties', include('parties.urls')),
url(r'^constituencies', include('constituencies.urls')),
url(r'^analysis', include('analysis.urls')),
url(r'^tags', include('tags.urls')),
url(r'^categories', include('categories.urls')),
url(r'^api/', include('api.urls')),
# Feeds
url(r'^feeds/latest/$', LatestLeafletsFeed(), name='latest_feed'),
# url(r'^feeds/party/(?P<party_slug>[\w_\-\.]+)/$', PartyFeed(), name='party_feed'),
# url(r'^feeds/attacking/(?P<party_slug>[\w_\-\.]+)/$', AttackingPartyFeed(), name='attacking_party_feed'),
url(r'^feeds/constituency/(?P<cons_slug>[\w_\-\.]+)/$', ConstituencyFeed(), name='constituency_feed'),
url(r'^feeds/category/(?P<cat_slug>[\w_\-\.]+)/$', CategoryFeed(), name='category_feed'),
url(r'^feeds/tag/(?P<tag_slug>[\w_\-\.]+)/$', TagFeed(), name='tag_feed'),
# Individual urls
url(r'^about/$', TemplateView.as_view(template_name='core/about.html'), name='about'),
url(r'^report/(?P<id>\d+)/sent/$', TemplateView.as_view(template_name='core/report_sent.html'), name='report_abuse_sent'),
url(r'^report/(?P<id>\d+)/$', 'core.views.report_abuse', name='report_abuse'),
# Administration URLS
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
)
urlpatterns + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.views.generic import TemplateView
admin.autodiscover()
from leaflets.feeds import *
from core.views import HomeView, MaintenanceView
if getattr(settings, 'MAINTENANCE_MODE', None):
urlpatterns = patterns(
'',
url(r'.*', MaintenanceView.as_view(), name='maintenance_view'),
)
else:
urlpatterns = patterns(
'',
url(r'^$', HomeView.as_view(), name='home'),
url(r'^leaflets', include('leaflets.urls')),
url(r'^parties', include('parties.urls')),
url(r'^constituencies', include('constituencies.urls')),
url(r'^analysis', include('analysis.urls')),
url(r'^tags', include('tags.urls')),
url(r'^categories', include('categories.urls')),
url(r'^api/', include('api.urls')),
# Feeds
url(r'^feeds/latest/$', LatestLeafletsFeed(), name='latest_feed'),
# url(r'^feeds/party/(?P<party_slug>[\w_\-\.]+)/$', PartyFeed(), name='party_feed'),
# url(r'^feeds/attacking/(?P<party_slug>[\w_\-\.]+)/$', AttackingPartyFeed(), name='attacking_party_feed'),
url(r'^feeds/constituency/(?P<cons_slug>[\w_\-\.]+)/$', ConstituencyFeed(), name='constituency_feed'),
url(r'^feeds/category/(?P<cat_slug>[\w_\-\.]+)/$', CategoryFeed(), name='category_feed'),
url(r'^feeds/tag/(?P<tag_slug>[\w_\-\.]+)/$', TagFeed(), name='tag_feed'),
# Individual urls
url(r'^about/$', TemplateView.as_view(template_name='core/about.html'), name='about'),
url(r'^report/(?P<id>\d+)/sent/$', TemplateView.as_view(template_name='core/report_sent.html'), name='report_abuse_sent'),
url(r'^report/(?P<id>\d+)/$', 'core.views.report_abuse', name='report_abuse'),
# Administration URLS
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
)
urlpatterns + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
mit
|
Python
|
bc9bbe0075f8a6571179e2310a9cfeaff89652b2
|
Remove unused argument
|
nerevu/riko,nerevu/riko
|
modules/pipeunion.py
|
modules/pipeunion.py
|
# pipeunion.py
#
from pipe2py import util
def pipe_union(context, _INPUT, **kwargs):
"""This operator merges up to 5 source together.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- _OTHER1 - another source generator
_OTHER2 etc.
Yields (_OUTPUT):
union of all source items
"""
#TODO the multiple sources should be pulled in parallel
# check David Beazely for suggestions (co-routines with queues?)
# or maybe use multiprocessing and Queues (perhaps over multiple servers too)
#Single thread and sequential pulling will do for now...
for item in _INPUT:
if item == True: #i.e. this is being fed forever, i.e. not a real source so just use _OTHERs
break
yield item
for other in kwargs:
if other.startswith('_OTHER'):
for item in kwargs[other]:
yield item
|
# pipeunion.py
#
from pipe2py import util
def pipe_union(context, _INPUT, conf, **kwargs):
"""This operator merges up to 5 source together.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- _OTHER1 - another source generator
_OTHER2 etc.
conf:
Yields (_OUTPUT):
union of all source items
"""
#TODO the multiple sources should be pulled in parallel
# check David Beazely for suggestions (co-routines with queues?)
# or maybe use multiprocessing and Queues (perhaps over multiple servers too)
#Single thread and sequential pulling will do for now...
for item in _INPUT:
if item == True: #i.e. this is being fed forever, i.e. not a real source so just use _OTHERs
break
yield item
for other in kwargs:
if other.startswith('_OTHER'):
for item in kwargs[other]:
yield item
|
mit
|
Python
|
3053c57a67c4dfb5e20bb93d6a586c7acf84275e
|
Prepare release v1.3.5.
|
bigbrozer/monitoring.nagios,bigbrozer/monitoring.nagios
|
monitoring/nagios/__init__.py
|
monitoring/nagios/__init__.py
|
import monitoring.nagios.logger
__version__ = '1.3.5'
|
import monitoring.nagios.logger
__version__ = '1.3.2'
|
mit
|
Python
|
cf07c34fe3a3d7b8767e50e77e609253dd177cff
|
Use isoformat date RFC 3339
|
YunoHost/moulinette
|
moulinette/utils/serialize.py
|
moulinette/utils/serialize.py
|
import logging
from json.encoder import JSONEncoder
import datetime
logger = logging.getLogger('moulinette.utils.serialize')
# JSON utilities -------------------------------------------------------
class JSONExtendedEncoder(JSONEncoder):
"""Extended JSON encoder
Extend default JSON encoder to recognize more types and classes. It
will never raise if the object can't be encoded and return its repr
instead.
The following objects and types are supported:
- set: converted into list
"""
def default(self, o):
"""Return a serializable object"""
# Convert compatible containers into list
if isinstance(o, set) or (
hasattr(o, '__iter__') and hasattr(o, 'next')):
return list(o)
# Convert compatible containers into list
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
# Return the repr for object that json can't encode
logger.warning('cannot properly encode in JSON the object %s, '
'returned repr is: %r', type(o), o)
return repr(o)
|
import logging
from json.encoder import JSONEncoder
import datetime
logger = logging.getLogger('moulinette.utils.serialize')
# JSON utilities -------------------------------------------------------
class JSONExtendedEncoder(JSONEncoder):
"""Extended JSON encoder
Extend default JSON encoder to recognize more types and classes. It
will never raise if the object can't be encoded and return its repr
instead.
The following objects and types are supported:
- set: converted into list
"""
def default(self, o):
"""Return a serializable object"""
# Convert compatible containers into list
if isinstance(o, set) or (
hasattr(o, '__iter__') and hasattr(o, 'next')):
return list(o)
# Convert compatible containers into list
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return str(o)
# Return the repr for object that json can't encode
logger.warning('cannot properly encode in JSON the object %s, '
'returned repr is: %r', type(o), o)
return repr(o)
|
agpl-3.0
|
Python
|
5d8b2224bf2864ad7e4bacb0624542dec8549b57
|
add mpf-mc entry points in machine test
|
missionpinball/mpf,missionpinball/mpf
|
mpf/tests/MpfMachineTestCase.py
|
mpf/tests/MpfMachineTestCase.py
|
import inspect
from mpf.core.machine import MachineController
from mpf.tests.MpfTestCase import MpfTestCase
class MpfMachineTestCase(MpfTestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
# only disable bcp. everything else should run
self.machine_config_patches = dict()
self.machine_config_patches['bcp'] = []
# increase test expected duration
self.expected_duration = 5.0
@staticmethod
def _load_mc_players(cls):
mc_players = {
"sound_player": "mpfmc.config_players.sound_player",
"widget_player": "mpfmc.config_players.widget_player",
"slide_player": "mpfmc.config_players.slide_player"
}
for name, module in mc_players.items():
imported_module = inspect.importlib.import_module(module)
setattr(cls, '{}_player'.format(name),
imported_module.player_cls(cls))
def setUp(self):
MachineController._register_plugin_config_players = self._load_mc_players
super().setUp()
def get_enable_plugins(self):
return True
def getConfigFile(self):
return "config.yaml"
def getMachinePath(self):
return ""
def getAbsoluteMachinePath(self):
# do not use path relative to MPF folder
return self.getMachinePath()
def get_platform(self):
return 'smart_virtual'
|
from mpf.tests.MpfTestCase import MpfTestCase
class MpfMachineTestCase(MpfTestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
# only disable bcp. everything else should run
self.machine_config_patches = dict()
self.machine_config_patches['bcp'] = []
# increase test expected duration
self.expected_duration = 5.0
def getConfigFile(self):
return "config.yaml"
def getMachinePath(self):
return ""
def getAbsoluteMachinePath(self):
# do not use path relative to MPF folder
return self.getMachinePath()
def get_platform(self):
return 'smart_virtual'
|
mit
|
Python
|
04745c9c4074ee44e2cfd7ef5fecae1eb796b109
|
Fix now_utc() to return aware datetime
|
Dark5ide/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core,linuxipho/mycroft-core
|
mycroft/util/time.py
|
mycroft/util/time.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from datetime import datetime
from dateutil.tz import gettz, tzlocal
def default_timezone():
""" Get the default timezone
Based on user location settings location.timezone.code or
the default system value if no setting exists.
Returns:
(datetime.tzinfo): Definition of the default timezone
"""
try:
# Obtain from user's configurated settings
# location.timezone.code (e.g. "America/Chicago")
# location.timezone.name (e.g. "Central Standard Time")
# location.timezone.offset (e.g. -21600000)
from mycroft.configuration import Configuration
config = Configuration.get()
code = config["location"]["timezone"]["code"]
return gettz(code)
except Exception:
# Just go with system default timezone
return tzlocal()
def now_utc():
""" Retrieve the current time in UTC
Returns:
(datetime): The current time in Universal Time, aka GMT
"""
return to_utc(datetime.utcnow())
def now_local(tz=None):
""" Retrieve the current time
Args:
tz (datetime.tzinfo, optional): Timezone, default to user's settings
Returns:
(datetime): The current time
"""
if not tz:
tz = default_timezone()
return datetime.now(tz)
def to_utc(dt):
""" Convert a datetime with timezone info to a UTC datetime
Args:
dt (datetime): A datetime (presumably in some local zone)
Returns:
(datetime): time converted to UTC
"""
tzUTC = gettz("UTC")
if dt.tzinfo:
return dt.astimezone(tzUTC)
else:
return dt.replace(tzinfo=gettz("UTC")).astimezone(tzUTC)
def to_local(dt):
""" Convert a datetime to the user's local timezone
Args:
dt (datetime): A datetime (if no timezone, defaults to UTC)
Returns:
(datetime): time converted to the local timezone
"""
tz = default_timezone()
if dt.tzinfo:
return dt.astimezone(tz)
else:
return dt.replace(tzinfo=gettz("UTC")).astimezone(tz)
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from datetime import datetime
from dateutil.tz import gettz, tzlocal
def default_timezone():
""" Get the default timezone
Based on user location settings location.timezone.code or
the default system value if no setting exists.
Returns:
(datetime.tzinfo): Definition of the default timezone
"""
try:
# Obtain from user's configurated settings
# location.timezone.code (e.g. "America/Chicago")
# location.timezone.name (e.g. "Central Standard Time")
# location.timezone.offset (e.g. -21600000)
from mycroft.configuration import Configuration
config = Configuration.get()
code = config["location"]["timezone"]["code"]
return gettz(code)
except Exception:
# Just go with system default timezone
return tzlocal()
def now_utc():
""" Retrieve the current time in UTC
Returns:
(datetime): The current time in Universal Time, aka GMT
"""
return datetime.utcnow()
def now_local(tz=None):
""" Retrieve the current time
Args:
tz (datetime.tzinfo, optional): Timezone, default to user's settings
Returns:
(datetime): The current time
"""
if not tz:
tz = default_timezone()
return datetime.now(tz)
def to_utc(dt):
""" Convert a datetime with timezone info to a UTC datetime
Args:
dt (datetime): A datetime (presumably in some local zone)
Returns:
(datetime): time converted to UTC
"""
tzUTC = gettz("UTC")
if dt.tzinfo:
return dt.astimezone(tzUTC)
else:
return dt.replace(tzinfo=gettz("UTC")).astimezone(tzUTC)
def to_local(dt):
""" Convert a datetime to the user's local timezone
Args:
dt (datetime): A datetime (if no timezone, defaults to UTC)
Returns:
(datetime): time converted to the local timezone
"""
tz = default_timezone()
if dt.tzinfo:
return dt.astimezone(tz)
else:
return dt.replace(tzinfo=gettz("UTC")).astimezone(tz)
|
apache-2.0
|
Python
|
e4ccfdb49951ed9c4073ba389421d89fea273288
|
make test more robust
|
missionpinball/mpf-mc,missionpinball/mpf-mc,missionpinball/mpf-mc
|
mpfmc/tests/MpfSlideTestCase.py
|
mpfmc/tests/MpfSlideTestCase.py
|
from mpf.tests.MpfTestCase import MpfTestCase
class MpfSlideTestCase(MpfTestCase):
def assertSlideOnTop(self, slide_name, target="default"):
self.assertEqual(slide_name, self.mc.targets[target].current_slide.name)
def assertTextOnTopSlide(self, text, target="default"):
self.assertTextInSlide(text, self.mc.targets[target].current_slide.name)
def assertTextNotOnTopSlide(self, text, target="default"):
self.assertTextNotInSlide(text, self.mc.targets[target].current_slide.name)
def assertSlideActive(self, slide_name):
self.assertIn(slide_name, self.mc.active_slides, "Slide {} is not active.".format(slide_name))
def assertSlideNotActive(self, slide_name):
self.assertNotIn(slide_name, self.mc.active_slides, "Slide {} is active but should not.".format(slide_name))
def _get_texts_from_slide(self, slide):
texts = []
for children in slide.children:
if children.children:
texts.extend(self._get_texts_from_slide(children))
if hasattr(children, "text"):
texts.append(children.text)
return texts
def assertTextInSlide(self, text, slide_name):
self.assertSlideActive(slide_name)
self.assertIn(text, self._get_texts_from_slide(self.mc.active_slides[slide_name]),
"Text {} not found in slide {}.".format(text, slide_name))
def assertTextNotInSlide(self, text, slide_name):
self.assertSlideActive(slide_name)
self.assertNotIn(text, self._get_texts_from_slide(self.mc.active_slides[slide_name]),
"Text {} found in slide {} but should not be there.".format(text, slide_name))
|
from mpf.tests.MpfTestCase import MpfTestCase
class MpfSlideTestCase(MpfTestCase):
def assertSlideOnTop(self, slide_name, target="default"):
self.assertEqual(slide_name, self.mc.targets[target].current_slide.name)
def assertTextOnTopSlide(self, text, target="default"):
self.assertTextInSlide(text, self.mc.targets[target].current_slide.name)
def assertTextNotOnTopSlide(self, text, target="default"):
self.assertTextNotInSlide(text, self.mc.targets[target].current_slide.name)
def assertSlideActive(self, slide_name):
self.assertIn(slide_name, self.mc.active_slides, "Slide {} is not active.".format(slide_name))
def assertSlideNotActive(self, slide_name):
self.assertNotIn(slide_name, self.mc.active_slides, "Slide {} is active but should not.".format(slide_name))
def assertTextInSlide(self, text, slide_name):
self.assertSlideActive(slide_name)
self.assertIn(text, [x.text for x in self.mc.active_slides[slide_name].children[0].children],
"Text {} not found in slide {}.".format(text, slide_name))
def assertTextNotInSlide(self, text, slide_name):
self.assertSlideActive(slide_name)
self.assertNotIn(text, [x.text for x in self.mc.active_slides[slide_name].children[0].children],
"Text {} found in slide {} but should not be there.".format(text, slide_name))
|
mit
|
Python
|
2d95b9a4b6d87e9f630c59995403988dee390c20
|
Fix simple typo: utilty -> utility (#5182)
|
dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost
|
doc/sphinx_util.py
|
doc/sphinx_util.py
|
# -*- coding: utf-8 -*-
"""Helper utility function for customization."""
import sys
import os
import docutils
import subprocess
READTHEDOCS_BUILD = (os.environ.get('READTHEDOCS', None) is not None)
if not os.path.exists('web-data'):
subprocess.call('rm -rf web-data;' +
'git clone https://github.com/dmlc/web-data', shell = True)
else:
subprocess.call('cd web-data; git pull', shell=True)
sys.stderr.write('READTHEDOCS=%s\n' % (READTHEDOCS_BUILD))
|
# -*- coding: utf-8 -*-
"""Helper utilty function for customization."""
import sys
import os
import docutils
import subprocess
READTHEDOCS_BUILD = (os.environ.get('READTHEDOCS', None) is not None)
if not os.path.exists('web-data'):
subprocess.call('rm -rf web-data;' +
'git clone https://github.com/dmlc/web-data', shell = True)
else:
subprocess.call('cd web-data; git pull', shell=True)
sys.stderr.write('READTHEDOCS=%s\n' % (READTHEDOCS_BUILD))
|
apache-2.0
|
Python
|
62f3a1ce0e2af511e897ac300e3ab32f4bf14463
|
Fix docs
|
pybel/pybel,pybel/pybel,pybel/pybel
|
src/pybel/struct/filters/node_predicates/modifications.py
|
src/pybel/struct/filters/node_predicates/modifications.py
|
# -*- coding: utf-8 -*-
"""Predicates for checking nodes' variants."""
from functools import wraps
from typing import Tuple, Type, Union
from .utils import node_predicate
from ..typing import NodePredicate
from ....dsl import BaseEntity, CentralDogma, Fragment, GeneModification, Hgvs, ProteinModification, Variant
__all__ = [
'has_variant',
'has_protein_modification',
'has_gene_modification',
'has_fragment',
'has_hgvs',
]
@node_predicate
def has_variant(node: BaseEntity) -> bool:
"""Return true if the node has any variants."""
return isinstance(node, CentralDogma) and node.variants
def _variant_checker(variant_cls: Union[Type[Variant], Tuple[Type[Variant], ...]]) -> NodePredicate:
@node_predicate
@wraps(node_has_variant)
def _rv(node: BaseEntity):
return node_has_variant(node, variant_cls)
return _rv
def node_has_variant(node: BaseEntity, variant_cls) -> bool:
"""Return true if the node has at least one of the given variant."""
return isinstance(node, CentralDogma) and node.variants and any(
isinstance(variant, variant_cls)
for variant in node.variants
)
has_protein_modification = _variant_checker(ProteinModification)
has_gene_modification = _variant_checker(GeneModification)
has_hgvs = _variant_checker(Hgvs)
has_fragment = _variant_checker(Fragment)
|
# -*- coding: utf-8 -*-
"""Predicates for checking nodes' variants."""
from typing import Tuple, Type, Union
from .utils import node_predicate
from ..typing import NodePredicate
from ....dsl import BaseEntity, CentralDogma, Fragment, GeneModification, Hgvs, ProteinModification, Variant
__all__ = [
'has_variant',
'has_protein_modification',
'has_gene_modification',
'has_fragment',
'has_hgvs',
]
@node_predicate
def has_variant(node: BaseEntity) -> bool:
"""Return true if the node has any variants."""
return isinstance(node, CentralDogma) and node.variants
def _variant_checker(variant_cls: Union[Type[Variant], Tuple[Type[Variant], ...]]) -> NodePredicate:
@node_predicate
def _node_has_variant(node: BaseEntity) -> bool:
"""Return true if the node has at least one of the given variant."""
return isinstance(node, CentralDogma) and node.variants and any(
isinstance(variant, variant_cls)
for variant in node.variants
)
return _node_has_variant
has_protein_modification = _variant_checker(ProteinModification)
has_gene_modification = _variant_checker(GeneModification)
has_hgvs = _variant_checker(Hgvs)
has_fragment = _variant_checker(Fragment)
|
mit
|
Python
|
65b658d9bb1b9220cfd15724692517c14f5e2cbc
|
Send more information
|
ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc
|
openprescribing/frontend/signals/handlers.py
|
openprescribing/frontend/signals/handlers.py
|
import logging
from allauth.account.signals import user_logged_in
from anymail.signals import tracking
from requests_futures.sessions import FuturesSession
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from common.utils import google_user_id
from frontend.models import Profile
logger = logging.getLogger(__name__)
@receiver(post_save, sender=User)
def handle_user_save(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(user_logged_in, sender=User)
def handle_user_logged_in(sender, request, user, **kwargs):
user.searchbookmark_set.update(approved=True)
user.orgbookmark_set.update(approved=True)
def send_ga_event(event):
user = User.objects.filter(email=event.recipient)
if user:
user = user[0]
session = FuturesSession()
payload = {
'v': 1,
'tid': settings.GOOGLE_TRACKING_ID,
'cid': google_user_id(user),
't': 'event',
'ec': 'email',
'ea': event.event_type,
'ua': event.user_agent,
'cm': 'email',
}
if event.esp_event:
payload['dt'] = event.esp_event['subject']
payload['cn'] = event.esp_event['campaign_name']
payload['cs'] = event.esp_event['campaign_source']
payload['dp'] = "/email/%s/%s/%s/%s" % (
event.esp_event['campaign_name'],
event.esp_event['campaign_source'],
event.esp_event['user_id'],
event.event_type
)
else:
logger.warn("No esp_event found for event: %s" % event.__dict__)
logger.info("Sending mail event data Analytics: %s" % payload)
session.post(
'https://www.google-analytics.com/collect', data=payload)
else:
logger.warn("Could not find receipient %s" % event.recipient)
@receiver(tracking)
def handle_anymail_webhook(sender, event, esp_name, **kwargs):
logger.debug("Received webhook from %s: %s" % (esp_name, event.__dict__))
send_ga_event(event)
|
import logging
from allauth.account.signals import user_logged_in
from anymail.signals import tracking
from requests_futures.sessions import FuturesSession
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from common.utils import google_user_id
from frontend.models import Profile
logger = logging.getLogger(__name__)
@receiver(post_save, sender=User)
def handle_user_save(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(user_logged_in, sender=User)
def handle_user_logged_in(sender, request, user, **kwargs):
user.searchbookmark_set.update(approved=True)
user.orgbookmark_set.update(approved=True)
def send_ga_event(event):
user = User.objects.filter(email=event.recipient)
if user:
user = user[0]
session = FuturesSession()
payload = {
'v': 1,
'tid': settings.GOOGLE_TRACKING_ID,
'cid': google_user_id(user),
't': 'event',
'ec': 'email',
'ea': event.event_type,
'ua': event.user_agent,
'cm': 'email',
}
if event.esp_event:
payload['dt'] = event.esp_event['subject']
payload['cn'] = event.esp_event['campaign_name']
payload['cs'] = event.esp_event['campaign_source']
payload['dp'] = "/email/%s/%s/%s/%s" % (
event.esp_event['campaign_name'],
event.esp_event['campaign_source'],
event.esp_event['user_id'],
event.event_type
)
else:
logger.warn("No esp_event found for event: %s" % event.__dict__)
logger.info("Sending mail event data Analytics: %s" % payload)
session.post(
'https://www.google-analytics.com/collect', data=payload)
else:
logger.warn("Could not find receipient %s" % event.recipient)
@receiver(tracking)
def handle_anymail_webhook(sender, event, esp_name, **kwargs):
logger.debug("Received webhook from %s: %s" % (esp_name))
send_ga_event(event)
|
mit
|
Python
|
09cb8a0fbb10f14d6622bbeed815e025e4eb1751
|
Update newServer.py
|
mrahman1122/Team4CS3240
|
Server/newServer.py
|
Server/newServer.py
|
__author__ = 'masudurrahman'
import sys
import os
from twisted.protocols import ftp
from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell
from twisted.cred.portal import Portal
from twisted.cred import checkers
from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB
from twisted.internet import reactor
from twisted.python import log
from twisted.internet.defer import succeed, failure
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
# def opsCall(obj):
# print "Processing", obj.fObj.name
# return "Completed"
# class MyFTPRealm(FTPRealm):
# def __init__(self, anonymousRoot):
# self.anonymousRoot = filepath.FilePath(anonymousRoot)
# def requestAvatar(self, avatarId, mind, *interfaces):
# for iface in interfaces:
# if iface is IFTPShell:
# if avatarId is checkers.ANONYMOUS:
# avatar = FTPAnonymousShell(self.anonymousRoot)
# else:
# avatar = FTPShell(filepath.FilePath("/home/") + avatarId)
# return (IFTPShell, avatar,
# getattr(avatar, 'logout', lambda: None))
# raise NotImplementedError("Only IFTPShell interface is supported by this realm")
if __name__ == "__main__":
# Try#1
# p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass.dat")])
# Try#2
# p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat", ":", 0, 0, True, None, False)])
# Try#3
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
checker.addUser("guest", "password")
realm = FTPRealm('./', userHome='/Users')
p = Portal(realm, [checker])
f = ftp.FTPFactory(p)
f.welcomeMessage = "CS3240 Team 4 Project"
log.startLogging(sys.stdout)
reactor.listenTCP(21, f)
reactor.run()
# PASSWORD = ''
# users = {
# os.environ['USER']: PASSWORD
# }
# p = Portal(FTPRealm('./', userHome='/Users'),
# ( AllowAnonymousAccess(),
# InMemoryDB(**users),)
# )
# f = FTPFactory(p)
# reactor.listenTCP(21, f)
# reactor.run()
|
__author__ = 'masudurrahman'
import sys
import os
from twisted.protocols import ftp
from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell
from twisted.cred.portal import Portal
from twisted.cred import checkers
from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB
from twisted.internet import reactor
from twisted.python import log
from twisted.internet.defer import succeed, failure
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
# def opsCall(obj):
# print "Processing", obj.fObj.name
# return "Completed"
# class MyFTPRealm(FTPRealm):
# def __init__(self, anonymousRoot):
# self.anonymousRoot = filepath.FilePath(anonymousRoot)
# def requestAvatar(self, avatarId, mind, *interfaces):
# for iface in interfaces:
# if iface is IFTPShell:
# if avatarId is checkers.ANONYMOUS:
# avatar = FTPAnonymousShell(self.anonymousRoot)
# else:
# avatar = FTPShell(filepath.FilePath("/home/") + avatarId)
# return (IFTPShell, avatar,
# getattr(avatar, 'logout', lambda: None))
# raise NotImplementedError("Only IFTPShell interface is supported by this realm")
if __name__ == "__main__":
# Try#1
# p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass.dat")])
# Try#2
# p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat", ":", 0, 0, True, None, False)])
# Try#3
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
check.addUser("guest", "password")
realm = FTPRealm()
p = portal.Portal(realm, [checker])
f = ftp.FTPFactory(p)
f.welcomeMessage = "CS3240 Team 4 Project"
log.startLogging(sys.stdout)
reactor.listenTCP(21, f)
reactor.run()
# PASSWORD = ''
# users = {
# os.environ['USER']: PASSWORD
# }
# p = Portal(FTPRealm('./', userHome='/Users'),
# ( AllowAnonymousAccess(),
# InMemoryDB(**users),)
# )
# f = FTPFactory(p)
# reactor.listenTCP(21, f)
# reactor.run()
|
apache-2.0
|
Python
|
25e71a56d48e5bdc4d73522333196d69d735707a
|
Update the PCA10056 example to use new pin naming
|
adafruit/micropython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython
|
ports/nrf/boards/pca10056/examples/buttons.py
|
ports/nrf/boards/pca10056/examples/buttons.py
|
import board
import digitalio
import gamepad
import time
pad = gamepad.GamePad(
digitalio.DigitalInOut(board.P0_11),
digitalio.DigitalInOut(board.P0_12),
digitalio.DigitalInOut(board.P0_24),
digitalio.DigitalInOut(board.P0_25),
)
prev_buttons = 0
while True:
buttons = pad.get_pressed()
if buttons != prev_buttons:
for i in range(0, 4):
bit = (1 << i)
if (buttons & bit) != (prev_buttons & bit):
print('Button %d %s' % (i + 1, 'pressed' if buttons & bit else 'released'))
prev_buttons = buttons
time.sleep(0.1)
|
import board
import digitalio
import gamepad
import time
pad = gamepad.GamePad(
digitalio.DigitalInOut(board.PA11),
digitalio.DigitalInOut(board.PA12),
digitalio.DigitalInOut(board.PA24),
digitalio.DigitalInOut(board.PA25),
)
prev_buttons = 0
while True:
buttons = pad.get_pressed()
if buttons != prev_buttons:
for i in range(0, 4):
bit = (1 << i)
if (buttons & bit) != (prev_buttons & bit):
print('Button %d %s' % (i + 1, 'pressed' if buttons & bit else 'released'))
prev_buttons = buttons
time.sleep(0.1)
|
mit
|
Python
|
3de29a3fdd17beece1fbe26c4f578cd854d16d0d
|
Fix bug introduced in update_from_old_problemformat.py
|
Kattis/problemtools,Kattis/problemtools,Kattis/problemtools,Kattis/problemtools
|
problemtools/update_from_old_problemformat.py
|
problemtools/update_from_old_problemformat.py
|
# -*- coding: utf-8 -*-
import argparse
import glob
import os.path
import yaml
def update(problemdir):
probyaml = os.path.join(problemdir, 'problem.yaml')
if not os.path.isfile(probyaml):
raise Exception('Could not find %s' % probyaml)
config = yaml.safe_load('%s' % open(probyaml, 'r').read())
stmts = glob.glob(os.path.join(problemdir, 'problem_statement/problem.tex'))
stmts.extend(glob.glob(os.path.join(problemdir, 'problem_statement/problem.[a-z][a-z].tex')))
yaml_changed = False
if 'name' in config:
print('Move problem name "%s" to these problem statement files: %s' % (config['name'], stmts))
for f in stmts:
stmt = open(f, 'r').read()
if stmt.find('\\problemname{') != -1:
print(' Statement %s already has a problemname, skipping' % f)
continue
newstmt = '\\problemname{%s}\n\n%s' % (config['name'], stmt)
open(f, 'w').write(newstmt)
del config['name']
yaml_changed = True
if 'validator' in config:
validator_flags = config['validator'].split()
validation = 'default'
if validator_flags[0] == 'custom':
validation = 'custom'
validator_flags = validator_flags[1:]
validator_flags = ' '.join(validator_flags)
print('Old validator option exists, moving to validation: %s, validator_flags: %s' % (validation, validator_flags))
config['validation'] = validation
if validator_flags != '':
config['validator_flags'] = validator_flags
del config['validator']
yaml_changed = True
if yaml_changed:
open(probyaml, 'w').write(yaml.dump(config, default_flow_style=False, allow_unicode=True))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('problemdir', nargs='+')
options = parser.parse_args()
for problemdir in options.problemdir:
try:
print('Updating %s' % problemdir)
update(problemdir)
except Exception as e:
print('Update FAILED: %s' % e)
|
# -*- coding: utf-8 -*-
import argparse
import glob
import os.path
import yaml
def update(problemdir):
probyaml = os.path.join(problemdir, 'problem.yaml')
if not os.path.isfile(probyaml):
raise Exception('Could not find %s' % probyaml)
config = yaml.safe_load('%s' % open(probyaml, 'r').read())
stmts = glob.glob(os.path.join(problemdir, 'problem_statement/problem.tex'))
stmts.extend(glob.glob(os.path.join(problemdir, 'problem_statement/problem.[a-z][a-z].tex')))
yaml_changed = False
if 'name' in config:
print('Move problem name "%s" to these problem statement files: %s' % (config['name'], stmts))
for f in stmts:
stmt = open(f, 'r').read()
if stmt.find('\\problemname{') != -1:
print(' Statement %s already has a problemname, skipping' % f)
continue
newstmt = '\\problemname{%s}\n\n%s' % (config['name'], stmt)
open(f, 'w').write(newstmt)
del config['name']
yaml_changed = True
if 'validator' in config:
validator_flags = config['validator'].split()
validation = 'default'
if validator_flags[0] == 'custom':
validation = 'custom'
validator_flags = validator_flags[1:]
validator_flags = ' '.join(validator_flags)
print('Old validator option exists, moving to validation: %s, validator_flags: %s' % (validation, validator_flags))
config['validation'] = validation
if validator_flags != '':
config['validator_flags'] = validator_flags
del config['validator']
yaml_changed = True
if yaml_changed:
open(probyaml, 'w').write(yaml.dump(config, default_flow_style=False, allow_unicode=True))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('problemdir', nargs='+')
options = parser.parse_args()
for problemdir in options:
try:
print('Updating %s' % problemdir)
update(problemdir)
except Exception as e:
print('Update FAILED: %s' % e)
|
mit
|
Python
|
a3bb1ff203789b6547e241f2ba0108e89bd1aefe
|
Remove mystery import
|
pavoljuhas/ipython_ophyd,NSLS-II-XPD/ipython_ophyd,pavoljuhas/ipython_ophyd,NSLS-II-XPD/ipython_ophyd
|
profile_collection/startup/80-areadetector.py
|
profile_collection/startup/80-areadetector.py
|
from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'G:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
|
from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'G:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
|
bsd-2-clause
|
Python
|
9bd5b66a50def87de2b8a37ba452ee4efc8a17b7
|
add docstring for update_average
|
xgi/aliendb,xgi/aliendb,xgi/aliendb,xgi/aliendb
|
web/aliendb/apps/analytics/helpers.py
|
web/aliendb/apps/analytics/helpers.py
|
def update_average(field, value, tracked) -> float:
"""Updates a previously calculated average with a new value.
Args:
field: the current average;
value: the new value to include in the average;
tracked: the number of elements used to form the _original_ average;
Returns:
float: the updated average
"""
return (value + field * tracked) / (1 + tracked)
|
def update_average(field, value, tracked):
return (value + field * tracked) / (1 + tracked)
|
bsd-3-clause
|
Python
|
aaac2228119bf965183d30ebf9d4b8cb13699fd8
|
fix tkinter for python 3
|
tdimiduk/groupeng
|
GroupEng.py
|
GroupEng.py
|
#!/usr/bin/python
# Copyright 2011, Thomas G. Dimiduk
#
# This file is part of GroupEng.
#
# GroupEng is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GroupEng is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with GroupEng. If not, see <http://www.gnu.org/licenses/>.
"""
External GroupEng Application. Handles user invocation and marshalls things for
use by the rest of GroupEng
.. moduleauthor:: Thomas G. Dimiduk [email protected]
"""
import sys
import os.path
import os
from src import controller
if len(sys.argv) > 1:
try:
debug = os.environ['DEBUG'].lower() == 'true'
except KeyError:
debug = False
if debug:
status, outdir = controller.run(sys.argv[1])
if not status:
print('Could not completely meet all rules')
else:
try:
status, outdir = controller.run(sys.argv[1])
if not status:
print('Could not completely meet all rules')
except Exception as e:
print(e)
else:
# import gui stuff only if we are going to use it
try:
from tkinter import *
except ImportError:
from Tkinter import *
from tkFileDialog import askopenfilename
from tkMessageBox import showerror, showinfo
path = askopenfilename()
d, f = os.path.split(path)
os.chdir(d)
try:
status, outdir = controller.run(f)
except Exception as e:
showerror('GroupEng Error', '{0}'.format(e))
if status:
showinfo("GroupEng", "GroupEng Run Succesful\n Output in: {0}".format(outdir))
else:
showinfo("GroupEng", "GroupEng Ran Correctly but not all rules could be met\n"
"Output in: {0}".format(outdir))
|
#!/usr/bin/python
# Copyright 2011, Thomas G. Dimiduk
#
# This file is part of GroupEng.
#
# GroupEng is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GroupEng is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with GroupEng. If not, see <http://www.gnu.org/licenses/>.
"""
External GroupEng Application. Handles user invocation and marshalls things for
use by the rest of GroupEng
.. moduleauthor:: Thomas G. Dimiduk [email protected]
"""
import sys
import os.path
import os
from src import controller
if len(sys.argv) > 1:
try:
debug = os.environ['DEBUG'].lower() == 'true'
except KeyError:
debug = False
if debug:
status, outdir = controller.run(sys.argv[1])
if not status:
print('Could not completely meet all rules')
else:
try:
status, outdir = controller.run(sys.argv[1])
if not status:
print('Could not completely meet all rules')
except Exception as e:
print(e)
else:
# import gui stuff only if we are going to use it
from Tkinter import *
from tkFileDialog import askopenfilename
from tkMessageBox import showerror, showinfo
path = askopenfilename()
d, f = os.path.split(path)
os.chdir(d)
try:
status, outdir = controller.run(f)
except Exception as e:
showerror('GroupEng Error', '{0}'.format(e))
if status:
showinfo("GroupEng", "GroupEng Run Succesful\n Output in: {0}".format(outdir))
else:
showinfo("GroupEng", "GroupEng Ran Correctly but not all rules could be met\n"
"Output in: {0}".format(outdir))
|
agpl-3.0
|
Python
|
3e7d433c193bd2e35b2c760297d81973f56b3eec
|
Fix test cases
|
muddyfish/PYKE,muddyfish/PYKE
|
node/floor_divide.py
|
node/floor_divide.py
|
#!/usr/bin/env python
from nodes import Node
import math
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
@Node.test_func(["123456789", 5], [['1', '2', '3', '4', '56789']])
@Node.test_func([[4,8,15,16,23,42], 7], [[[],[],[],[],[],[],[4,8,15,16,23,42]]])
def chunk(self, inp:Node.indexable, num:int):
"""Return inp seperated into num groups"""
rtn = []
size = len(inp)//num
try:
for i in range(0, num*size, size):
rtn.append(inp[i:i+size])
except ValueError:
for i in range(num): rtn.append([])
i = 0
if len(rtn) != num:
rtn.append(inp[i+size:])
else:
rtn[-1] += inp[i+size:]
return [rtn]
@Node.test_func([[4, 4, 2, 2, 9, 9], [0, -2, 0, 7, 0]], [[[4],[4,2],[2,9,9]]])
def split_at(self, inp:Node.sequence, splits:Node.sequence):
"""Split inp at truthy values in splits"""
rtn = [[]]
for i, do_split in zip(inp, splits+[0]):
if do_split: rtn.append([])
rtn[-1].append(i)
return [rtn]
|
#!/usr/bin/env python
from nodes import Node
import math
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
@Node.test_func(["123456789", 5], [['1', '2', '3', '4', '56789']])
@Node.test_func([[4,8,15,16,23,42], 7], [[[],[],[],[],[],[],[4,8,15,16,23,42]]])
def chunk(self, inp:Node.indexable, num:int):
"""Return inp seperated into num groups"""
rtn = []
size = len(inp)//num
try:
for i in range(0, num*size, size):
rtn.append(inp[i:i+size])
except ValueError:
for i in range(num): rtn.append([])
i = 0
if len(rtn) != num:
rtn.append(inp[i+size:])
else:
rtn[-1] += inp[i+size:]
return [rtn]
@Node.test_func([[4, 4, 2, 2, 9, 9], [0, -2, 0, 7, 0]], [[[4,4],[2,2],[9,9]]])
def split_at(self, inp:Node.sequence, splits:Node.sequence):
"""Split inp at truthy values in splits"""
rtn = [[]]
for i, do_split in zip(inp, splits+[0]):
if do_split: rtn.append([])
rtn[-1].append(i)
return [rtn]
|
mit
|
Python
|
a9c9cbac36568676be194024f6f660e4fc3f03b6
|
Add old list to applist migration
|
YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost
|
src/yunohost/data_migrations/0010_migrate_to_apps_json.py
|
src/yunohost/data_migrations/0010_migrate_to_apps_json.py
|
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_to_unified_list.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s" % (APPSLISTS_JSON, APPSLISTS_BACKUP))
# Remove all the deprecated lists
lists_to_remove = [
"http://app.yunohost.org/list.json", # Old list on old installs, alias to official.json
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s" % (APPSLISTS_BACKUP, APPSLISTS_JSON))
|
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_to_unified_list.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s" % (APPSLISTS_JSON, APPSLISTS_BACKUP))
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s" % (APPSLISTS_BACKUP, APPSLISTS_JSON))
|
agpl-3.0
|
Python
|
9316ec9f2246ac14176d9bf9d27287dfccedb3f3
|
Update to 0.3.0
|
Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,AutorestCI/azure-sdk-for-python,lmazuel/azure-sdk-for-python,Azure/azure-sdk-for-python,Azure/azure-sdk-for-python
|
azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py
|
azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
VERSION = "0.3.0"
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
VERSION = "0.2.0"
|
mit
|
Python
|
8c89a0d52c43f96d9673b8b84786a7185ddc3f6f
|
Bump WireCloud version
|
rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud
|
src/wirecloud/platform/__init__.py
|
src/wirecloud/platform/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2014 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
__version_info__ = (0, 7, 0)
__version__ = '.'.join(map(str, __version_info__)) + 'b2'
|
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2014 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
__version_info__ = (0, 7, 0)
__version__ = '.'.join(map(str, __version_info__)) + 'b1'
|
agpl-3.0
|
Python
|
36bfa8f556941848eb1a809d48aae1aa43f23c3f
|
Add option to choose if we keep the <none> images
|
aebm/docker-image-cleaner,aleasoluciones/docker-image-cleaner
|
di-cleaner.py
|
di-cleaner.py
|
#!/usr/bin/env python
import argparse
import atexit
import logging
import sys
from pprint import pformat
DEFAULT_DOCKER_BASE_URL = 'unix://var/run/docker.sock'
HELP_DOCKER_BASE_URL = ('Refers to the protocol+hostname+port where the '
'Docker server is hosted. Defaults to %s') % DEFAULT_DOCKER_BASE_URL
DEFAULT_DOCKER_API_VERSION = 'auto'
HELP_DOCKER_API_VERSION = ('The version of the API the client will use. '
'Defaults to use the API version provided by the server')
DEFAULT_DOCKER_HTTP_TIMEOUT = 5
HELP_DOCKER_HTTP_TIMEOUT = ('The HTTP request timeout, in seconds. '
'Defaults to %d secs') % DEFAULT_DOCKER_HTTP_TIMEOUT
DEFAULT_IMAGES_TO_KEEP = 2
HELP_IMAGES_TO_KEEP = ('How many docker images to keep. '
'Defaults to %d images') % DEFAULT_IMAGES_TO_KEEP
HELP_KEEP_NONE_IMAGES = 'Keep <none> images'
def _exit():
logging.shutdown()
def debug_var(name, var):
logging.debug('Var %s has: %s' % (name, pformat(var)))
def setup_parser(parser):
parser.add_argument('--debug', help='debug mode', action='store_true')
parser.add_argument('--base-url', help=HELP_DOCKER_BASE_URL, default=DEFAULT_DOCKER_BASE_URL)
parser.add_argument('--api-version', help=HELP_DOCKER_API_VERSION, default=DEFAULT_DOCKER_API_VERSION)
parser.add_argument('--http-timeout', help=HELP_DOCKER_HTTP_TIMEOUT, default=DEFAULT_DOCKER_HTTP_TIMEOUT, type=int)
parser.add_argument('--images-to-keep', help=HELP_IMAGES_TO_KEEP, default=DEFAULT_IMAGES_TO_KEEP, type=int)
parser.add_argument('--keep-none-images', help=HELP_KEEP_NONE_IMAGES, action='store_true')
return parser
def validate_args(args):
if args.http_timeout < 0:
sys.stderr.write('HTTP timeout should be 0 or bigger\n')
if args.images_to_keep < 0:
sys.stderr.write('Images to keep should be 0 or bigger\n')
sys.exit(1)
def main():
atexit.register(func=_exit)
parser = setup_parser(argparse.ArgumentParser(description='Clean old docker images'))
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
debug_var(name='args', var=args)
validate_args(args)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import argparse
import atexit
import logging
import sys
from pprint import pformat
DEFAULT_DOCKER_BASE_URL = 'unix://var/run/docker.sock'
HELP_DOCKER_BASE_URL = ('Refers to the protocol+hostname+port where the '
'Docker server is hosted. Defaults to %s') % DEFAULT_DOCKER_BASE_URL
DEFAULT_DOCKER_API_VERSION = 'auto'
HELP_DOCKER_API_VERSION = ('The version of the API the client will use. '
'Defaults to use the API version provided by the server')
DEFAULT_DOCKER_HTTP_TIMEOUT = 5
HELP_DOCKER_HTTP_TIMEOUT = ('The HTTP request timeout, in seconds. '
'Defaults to %d secs') % DEFAULT_DOCKER_HTTP_TIMEOUT
DEFAULT_IMAGES_TO_KEEP = 2
HELP_IMAGES_TO_KEEP = ('How many docker images to keep. '
'Defaults to %d images') % DEFAULT_IMAGES_TO_KEEP
def _exit():
logging.shutdown()
def debug_var(name, var):
logging.debug('Var %s has: %s' % (name, pformat(var)))
def setup_parser(parser):
parser.add_argument('--debug', help='debug mode', action='store_true')
parser.add_argument('--base-url', help=HELP_DOCKER_BASE_URL, default=DEFAULT_DOCKER_BASE_URL)
parser.add_argument('--api-version', help=HELP_DOCKER_API_VERSION, default=DEFAULT_DOCKER_API_VERSION)
parser.add_argument('--http-timeout', help=HELP_DOCKER_HTTP_TIMEOUT, default=DEFAULT_DOCKER_HTTP_TIMEOUT, type=int)
parser.add_argument('--images-to-keep', help=HELP_IMAGES_TO_KEEP, default=DEFAULT_IMAGES_TO_KEEP, type=int)
return parser
def validate_args(args):
if args.http_timeout < 0:
sys.stderr.write('HTTP timeout should be 0 or bigger\n')
if args.images_to_keep < 0:
sys.stderr.write('Images to keep should be 0 or bigger\n')
sys.exit(1)
def main():
atexit.register(func=_exit)
parser = setup_parser(argparse.ArgumentParser(description='Clean old docker images'))
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
debug_var(name='args', var=args)
validate_args(args)
if __name__ == '__main__':
main()
|
mit
|
Python
|
edf099ca644aae12daef65ff65744d99fcd3a634
|
Remove function we won't actually use.
|
StackStorm/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2
|
st2common/st2common/util/compat.py
|
st2common/st2common/util/compat.py
|
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
__all__ = [
'to_unicode',
'to_ascii',
]
def to_unicode(value):
"""
Ensure that the provided text value is represented as unicode.
:param value: Value to convert.
:type value: ``str`` or ``unicode``
:rtype: ``unicode``
"""
if not isinstance(value, six.string_types):
raise ValueError('Value "%s" must be a string.' % (value))
if not isinstance(value, six.text_type):
value = six.u(value)
return value
def to_ascii(value):
"""
Function which encodes the provided bytes / string to ASCII encoding ignoring any errors
which could come up when trying to encode a non-ascii value.
"""
return value.decode('ascii', errors='ignore')
|
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import six
__all__ = [
'to_unicode',
'to_ascii',
'add_st2actions_pythonrunner_to_sys_path'
]
def to_unicode(value):
"""
Ensure that the provided text value is represented as unicode.
:param value: Value to convert.
:type value: ``str`` or ``unicode``
:rtype: ``unicode``
"""
if not isinstance(value, six.string_types):
raise ValueError('Value "%s" must be a string.' % (value))
if not isinstance(value, six.text_type):
value = six.u(value)
return value
def to_ascii(value):
"""
Function which encodes the provided bytes / string to ASCII encoding ignoring any errors
which could come up when trying to encode a non-ascii value.
"""
return value.decode('ascii', errors='ignore')
def add_st2actions_pythonrunner_to_sys_path():
"""
Function which adds "st2common.runners.pythonrunner" to sys.path and redirects it to
"st2common.runners.base_action".
First path was deprecated a long time ago, but some modules still rely on on it. This
is to be used in places where "st2common" is used as a standalone package without access to
st2actions (e.g. serverless).
"""
import st2common.runners.base_action
sys.modules['st2actions'] = {}
sys.modules['st2actions.runners'] = {}
sys.modules['st2actions.runners.pythonrunner'] = st2common.runners.base_action
return sys.modules
|
apache-2.0
|
Python
|
a187bd1f89d40d4274f884bba567a2f6be160dcd
|
Remove unintended changes from reverthousekeeping command
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
cla_backend/apps/cla_butler/management/commands/reverthousekeeping.py
|
# coding=utf-8
import os
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_butler.qs_to_file import QuerysetToFile
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case,
EligibilityCheck,
CaseNotesHistory,
Person,
Income,
Savings,
Deductions,
PersonalDetails,
ThirdPartyDetails,
AdaptationDetails,
CaseKnowledgebaseAssignment,
EODDetails,
EODDetailsCategory,
Property,
)
from timer.models import Timer
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
class Command(BaseCommand):
help = "Attempts to re-load data that was deleted in the housekeeping"
def add_arguments(self, parser):
parser.add_argument("directory", nargs=1)
def handle(self, *args, **options):
path = os.path.join(settings.TEMP_DIR, args[0])
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
# coding=utf-8
import os
import logging
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from cla_butler.qs_to_file import QuerysetToFile
from cla_eventlog.models import Log
from cla_provider.models import Feedback
from complaints.models import Complaint
from diagnosis.models import DiagnosisTraversal
from legalaid.models import (
Case,
EligibilityCheck,
CaseNotesHistory,
Person,
Income,
Savings,
Deductions,
PersonalDetails,
ThirdPartyDetails,
AdaptationDetails,
CaseKnowledgebaseAssignment,
EODDetails,
EODDetailsCategory,
Property,
)
from timer.models import Timer
MODELS = [
Deductions,
Income,
Savings,
Person,
AdaptationDetails,
PersonalDetails,
ThirdPartyDetails,
EligibilityCheck,
Property,
DiagnosisTraversal,
Case,
EODDetails,
EODDetailsCategory,
Complaint,
CaseKnowledgebaseAssignment,
Timer,
Feedback,
CaseNotesHistory,
Log,
LogEntry,
]
logger = logging.getLogger("django")
class Command(BaseCommand):
help = "Attempts to re-load data that was deleted in the housekeeping"
def add_arguments(self, parser):
parser.add_argument("directory", nargs=1)
def handle(self, *args, **options):
logger.info("Running monitor_multiple_outcome_codes cron job")
path = os.path.join(settings.TEMP_DIR, args[0])
filewriter = QuerysetToFile(path)
for model in MODELS:
self.stdout.write(model.__name__)
filewriter.load(model)
|
mit
|
Python
|
123401cb6ed88b77d9a584eea8f2de75e518e5da
|
remove try except when hintsvm is not installed
|
ntucllab/libact,ntucllab/libact,ntucllab/libact
|
libact/query_strategies/__init__.py
|
libact/query_strategies/__init__.py
|
"""
Concrete query strategy classes.
"""
import logging
logger = logging.getLogger(__name__)
from .active_learning_by_learning import ActiveLearningByLearning
from .hintsvm import HintSVM
from .uncertainty_sampling import UncertaintySampling
from .query_by_committee import QueryByCommittee
from .quire import QUIRE
from .random_sampling import RandomSampling
from .variance_reduction import VarianceReduction
|
"""
Concrete query strategy classes.
"""
import logging
logger = logging.getLogger(__name__)
from .active_learning_by_learning import ActiveLearningByLearning
try:
from .hintsvm import HintSVM
except ImportError:
logger.warn('HintSVM library not found, not importing.')
from .uncertainty_sampling import UncertaintySampling
from .query_by_committee import QueryByCommittee
from .quire import QUIRE
from .random_sampling import RandomSampling
from .variance_reduction import VarianceReduction
|
bsd-2-clause
|
Python
|
0e56ed6234e1f28b0aac2e22063bb39faab1d54c
|
use '!XyZZy!' as value to be sustituted in metric name
|
librato/librato-python-web
|
librato_python_web/tools/compose.py
|
librato_python_web/tools/compose.py
|
# Copyright (c) 2015. Librato, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Librato, Inc. nor the names of project contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL LIBRATO, INC. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Helper methods to model Librato composite query dsl
METRIC_PREFIX = "!XyZZy!"
DUMMY_PREFIX = "DUMMY-PREFIX"
DEFAULT_PERIOD = 60
def s_(metric, source="{}-*".format(DUMMY_PREFIX), period=DEFAULT_PERIOD, function="mean"):
return 's("{}.{}", "{}", {{period: "{}", function: "{}"}})'.format(METRIC_PREFIX, metric, source, period, function)
def timeshift_(shift, series):
return 'timeshift("{}", {})'.format(shift, series)
def sum_(*args):
return 'sum([{}])'.format(', '.join(args))
def subtract_(series1, series2):
return 'subtract([{}, {}])'.format(series1, series2)
def multiply_(*args):
return 'multiply([{}])'.format(', '.join(args))
def divide_(series1, series2):
return 'divide([{}, {}])'.format(series1, series2)
def scale_(series, factor):
return 'scale({}, {{factor: "{}"}})'.format(series, factor)
def derive_(series, detect_reset="true"):
return 'derive({}, {{detect_reset: "{}"}})'.format(series, detect_reset)
|
# Copyright (c) 2015. Librato, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Librato, Inc. nor the names of project contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL LIBRATO, INC. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Helper methods to model Librato composite query dsl
METRIC_PREFIX = "XyZZy"
DUMMY_PREFIX = "DUMMY-PREFIX"
DEFAULT_PERIOD = 60
def s_(metric, source="{}-*".format(DUMMY_PREFIX), period=DEFAULT_PERIOD, function="mean"):
return 's("{}.{}", "{}", {{period: "{}", function: "{}"}})'.format(METRIC_PREFIX, metric, source, period, function)
def timeshift_(shift, series):
return 'timeshift("{}", {})'.format(shift, series)
def sum_(*args):
return 'sum([{}])'.format(', '.join(args))
def subtract_(series1, series2):
return 'subtract([{}, {}])'.format(series1, series2)
def multiply_(*args):
return 'multiply([{}])'.format(', '.join(args))
def divide_(series1, series2):
return 'divide([{}, {}])'.format(series1, series2)
def scale_(series, factor):
return 'scale({}, {{factor: "{}"}})'.format(series, factor)
def derive_(series, detect_reset="true"):
return 'derive({}, {{detect_reset: "{}"}})'.format(series, detect_reset)
|
bsd-3-clause
|
Python
|
f379d8ce256159a4fc7ce58abf87c609a4a0c3ab
|
rename present() _present(), indicating private
|
alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl
|
AlphaTwirl/EventReader/ProgressMonitor.py
|
AlphaTwirl/EventReader/ProgressMonitor.py
|
# Tai Sakuma <[email protected]>
import multiprocessing
import time
from ProgressReport import ProgressReport
##____________________________________________________________________________||
class ProgressReporter(object):
def __init__(self, queue, pernevents = 1000):
self.queue = queue
self.pernevents = pernevents
self.lastReportTime = time.time()
def report(self, event, component):
if not self.needToReport(event, component): return
done = event.iEvent + 1
report = ProgressReport(name = component.name, done = done, total = event.nEvents)
self.queue.put(report)
self.lastReportTime = time.time()
def needToReport(self, event, component):
iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0
if time.time() - self.lastReportTime > 0.02: return True
if iEvent % self.pernevents == 0: return True
if iEvent == event.nEvents: return True
return False
##____________________________________________________________________________||
class Queue(object):
def __init__(self, presentation):
self.presentation = presentation
def put(self, report):
self.presentation.present(report)
##____________________________________________________________________________||
class ProgressMonitor(object):
def __init__(self, presentation):
self.queue = Queue(presentation = presentation)
def monitor(self): pass
def createReporter(self):
reporter = ProgressReporter(self.queue)
return reporter
##____________________________________________________________________________||
class MPProgressMonitor(object):
def __init__(self, presentation):
self.queue = multiprocessing.Queue()
self._presentation = presentation
self.lastTime = time.time()
def monitor(self):
if time.time() - self.lastTime < 0.1: return
self.lastTime = time.time()
self._present()
def last(self):
self._present()
def _present(self):
while not self.queue.empty():
report = self.queue.get()
self._presentation.present(report)
def createReporter(self):
return ProgressReporter(self.queue)
##____________________________________________________________________________||
|
# Tai Sakuma <[email protected]>
import multiprocessing
import time
from ProgressReport import ProgressReport
##____________________________________________________________________________||
class ProgressReporter(object):
def __init__(self, queue, pernevents = 1000):
self.queue = queue
self.pernevents = pernevents
self.lastReportTime = time.time()
def report(self, event, component):
if not self.needToReport(event, component): return
done = event.iEvent + 1
report = ProgressReport(name = component.name, done = done, total = event.nEvents)
self.queue.put(report)
self.lastReportTime = time.time()
def needToReport(self, event, component):
iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0
if time.time() - self.lastReportTime > 0.02: return True
if iEvent % self.pernevents == 0: return True
if iEvent == event.nEvents: return True
return False
##____________________________________________________________________________||
class Queue(object):
def __init__(self, presentation):
self.presentation = presentation
def put(self, report):
self.presentation.present(report)
##____________________________________________________________________________||
class ProgressMonitor(object):
def __init__(self, presentation):
self.queue = Queue(presentation = presentation)
def monitor(self): pass
def createReporter(self):
reporter = ProgressReporter(self.queue)
return reporter
##____________________________________________________________________________||
class MPProgressMonitor(object):
def __init__(self, presentation):
self.queue = multiprocessing.Queue()
self._presentation = presentation
self.lastTime = time.time()
def monitor(self):
if time.time() - self.lastTime < 0.1: return
self.lastTime = time.time()
self.present()
def last(self):
self.present()
def present(self):
while not self.queue.empty():
report = self.queue.get()
self._presentation.present(report)
def createReporter(self):
return ProgressReporter(self.queue)
##____________________________________________________________________________||
|
bsd-3-clause
|
Python
|
68fe7ecadeda267b5645fd804bb7bbf29afa3667
|
add docstring
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/apps/cleanup/management/commands/delete_es_docs_in_domain.py
|
corehq/apps/cleanup/management/commands/delete_es_docs_in_domain.py
|
from django.core.management import BaseCommand, CommandError
from corehq.apps.domain.models import Domain
from corehq.apps.es import AppES, CaseES, CaseSearchES, FormES, GroupES, UserES
from corehq.apps.es.registry import registry_entry
from corehq.apps.es.transient_util import doc_adapter_from_info
class Command(BaseCommand):
"""
Intended for use in the event that a domain has been deleted, but ES docs have not been fully cleaned up
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
domain_obj = Domain.get_by_name(domain)
if domain_obj and not domain_obj.doc_type.endswith('-Deleted'):
raise CommandError(
f"{domain} has not been deleted. This command is intended for use on deleted domains only."
)
for hqESQuery in [AppES, CaseES, CaseSearchES, FormES, GroupES, UserES]:
doc_ids = hqESQuery().domain(domain).source(['_id']).run().hits
doc_ids = [doc['_id'] for doc in doc_ids]
if not doc_ids:
continue
adapter = doc_adapter_from_info(registry_entry(hqESQuery.index))
adapter.bulk_delete(doc_ids)
|
from django.core.management import BaseCommand, CommandError
from corehq.apps.domain.models import Domain
from corehq.apps.es import AppES, CaseES, CaseSearchES, FormES, GroupES, UserES
from corehq.apps.es.registry import registry_entry
from corehq.apps.es.transient_util import doc_adapter_from_info
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
domain_obj = Domain.get_by_name(domain)
if domain_obj and not domain_obj.doc_type.endswith('-Deleted'):
raise CommandError(
f"{domain} has not been deleted. This command is intended for use on deleted domains only."
)
for hqESQuery in [AppES, CaseES, CaseSearchES, FormES, GroupES, UserES]:
doc_ids = hqESQuery().domain(domain).source(['_id']).run().hits
doc_ids = [doc['_id'] for doc in doc_ids]
if not doc_ids:
continue
adapter = doc_adapter_from_info(registry_entry(hqESQuery.index))
adapter.bulk_delete(doc_ids)
|
bsd-3-clause
|
Python
|
bff3a087ec70ab07fe163394826a41c33f6bc38f
|
Add extra version of py-jinja2 (#14989)
|
iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
var/spack/repos/builtin/packages/py-jinja2/package.py
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
url = "https://pypi.io/packages/source/J/Jinja2/Jinja2-2.10.3.tar.gz"
import_modules = ['jinja2']
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10.1', sha256='065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
url = "https://pypi.io/packages/source/J/Jinja2/Jinja2-2.10.3.tar.gz"
import_modules = ['jinja2']
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
|
lgpl-2.1
|
Python
|
2b8716f5a1f0e1f147b6bbda3e45e4abec59811d
|
fix TB in indexing debug toolbar
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
abilian/services/indexing/debug_toolbar.py
|
abilian/services/indexing/debug_toolbar.py
|
# coding=utf-8
"""
"""
from __future__ import absolute_import
from flask import current_app
from flask_debugtoolbar.panels import DebugPanel
from abilian.core.util import fqcn
from abilian.i18n import _
from abilian.web.action import actions
class IndexedTermsDebugPanel(DebugPanel):
"""
A panel to display term values found in index for "current" object
FIXME: this notion of "current" object should formalized in
abilian.app.Application
"""
name = 'IndexedTerms'
@property
def current_obj(self):
return actions.context.get('object')
@property
def has_content(self):
obj = self.current_obj
return (obj is not None
and hasattr(obj, 'object_type')
and hasattr(obj, 'id')
and obj.id is not None)
def nav_title(self):
return _('Indexed Terms')
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
obj = self.current_obj
if not obj:
return _(u'No current object')
try:
return u'{}(id={})'.format(obj.__class__.__name__, obj.id)
except:
return u''
def title(self):
return _('Indexed Terms')
def url(self):
return ''
def content(self):
obj = self.current_obj
svc = current_app.services['indexing']
index = svc.app_state.indexes['default']
schema = index.schema
context = self.context.copy()
context['schema'] = schema
context['sorted_fields'] = sorted(schema.names())
adapter = svc.adapted.get(fqcn(obj.__class__))
if adapter and adapter.indexable:
doc = context['current_document'] = svc.get_document(obj, adapter)
indexed = {}
for name, field in schema.items():
value = doc.get(name)
indexed[name] = None
if value and field.analyzer and field.format:
indexed[name] = list(field.process_text(value))
context['current_indexed'] = indexed
context['current_keys'] = sorted(set(doc) | set(indexed))
with index.searcher() as search:
document = search.document(object_key=obj.object_key)
sorted_keys = sorted(document) if document is not None else None
context.update({
'document': document,
'sorted_keys': sorted_keys,
})
jinja_env = current_app.jinja_env
jinja_env.filters.update(self.jinja_env.filters)
template = jinja_env.get_or_select_template(
'debug_panels/indexing_panel.html'
)
return template.render(context)
|
# coding=utf-8
"""
"""
from __future__ import absolute_import
from flask import current_app
from flask_debugtoolbar.panels import DebugPanel
from abilian.core.util import fqcn
from abilian.i18n import _
from abilian.web.action import actions
class IndexedTermsDebugPanel(DebugPanel):
"""
A panel to display term values found in index for "current" object
FIXME: this notion of "current" object should formalized in
abilian.app.Application
"""
name = 'IndexedTerms'
@property
def current_obj(self):
return actions.context.get('object')
@property
def has_content(self):
obj = self.current_obj
return (obj is not None
and hasattr(obj, 'object_type')
and hasattr(obj, 'id')
and obj.id is not None)
def nav_title(self):
return _('Indexed Terms')
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
obj = self.current_obj
if not obj:
return _(u'No current object')
try:
return u'{}(id={})'.format(obj.__class__.__name__, obj.id)
except:
return u''
def title(self):
return _('Indexed Terms')
def url(self):
return ''
def content(self):
obj = self.current_obj
svc = current_app.services['indexing']
index = svc.app_state.indexes['default']
schema = index.schema
context = self.context.copy()
context['schema'] = schema
context['sorted_fields'] = sorted(schema.names())
adapter = svc.adapted.get(fqcn(obj.__class__))
if adapter and adapter.indexable:
doc = context['current_document'] = svc.get_document(obj, adapter)
indexed = {}
for name, field in schema.items():
value = doc.get(name)
indexed[name] = None
if value and field.format:
indexed[name] = list(field.process_text(value))
context['current_indexed'] = indexed
context['current_keys'] = sorted(set(doc) | set(indexed))
with index.searcher() as search:
document = search.document(object_key=obj.object_key)
sorted_keys = sorted(document) if document is not None else None
context.update({
'document': document,
'sorted_keys': sorted_keys,
})
jinja_env = current_app.jinja_env
jinja_env.filters.update(self.jinja_env.filters)
template = jinja_env.get_or_select_template(
'debug_panels/indexing_panel.html'
)
return template.render(context)
|
lgpl-2.1
|
Python
|
0c9f2f51778b26bb126eccfbef0b098da3db2877
|
normalize version numbers
|
mozaik-association/mozaik,mozaik-association/mozaik
|
asynchronous_batch_mailings/__openerp__.py
|
asynchronous_batch_mailings/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of asynchronous_batch_mailings, an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# asynchronous_batch_mailings is free software:
# you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# asynchronous_batch_mailings is distributed in the hope
# that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with asynchronous_batch_mailings.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Asynchronous Batch Mailings',
'version': '8.0.1.0.0',
'author': 'ACSONE SA/NV',
'maintainer': 'ACSONE SA/NV',
'website': 'http://www.acsone.eu',
'category': 'Marketing',
'depends': [
'mail',
'connector',
],
'description': """
Asynchronous Batch Mailings
===========================
This module allows to send emails by an asynchronous way.
Moreover it provides a way to split huge mailing.
Two parameters are available:
* the mailing size from which the mailing must become asynchronous
* the batch size
""",
'images': [
],
'data': [
'data/ir_config_parameter_data.xml',
],
'qweb': [
],
'demo': [
],
'test': [
],
'license': 'AGPL-3',
'installable': True,
'auto_install': False,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of asynchronous_batch_mailings, an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# asynchronous_batch_mailings is free software:
# you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# asynchronous_batch_mailings is distributed in the hope
# that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with asynchronous_batch_mailings.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Asynchronous Batch Mailings',
'version': '1.0',
'author': 'ACSONE SA/NV',
'maintainer': 'ACSONE SA/NV',
'website': 'http://www.acsone.eu',
'category': 'Marketing',
'depends': [
'mail',
'connector',
],
'description': """
Asynchronous Batch Mailings
===========================
This module allows to send emails by an asynchronous way.
Moreover it provides a way to split huge mailing.
Two parameters are available:
* the mailing size from which the mailing must become asynchronous
* the batch size
""",
'images': [
],
'data': [
'data/ir_config_parameter_data.xml',
],
'qweb': [
],
'demo': [
],
'test': [
],
'license': 'AGPL-3',
'installable': True,
'auto_install': False,
}
|
agpl-3.0
|
Python
|
dafc54e782c5ee9bda3cf1817df92ae16ed26979
|
fix website url in manifest
|
ClearCorp/server-tools,ClearCorp/server-tools
|
attachment_base_synchronize/__openerp__.py
|
attachment_base_synchronize/__openerp__.py
|
# coding: utf-8
# @ 2015 Florian DA COSTA @ Akretion
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Attachment Base Synchronize',
'version': '9.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA)',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'base',
'mail',
],
'data': [
'views/attachment_view.xml',
'security/ir.model.access.csv',
'data/cron.xml',
],
'demo': [
'demo/attachment_metadata_demo.xml'
],
'installable': True,
'application': False,
'images': [],
}
|
# coding: utf-8
# @ 2015 Florian DA COSTA @ Akretion
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Attachment Base Synchronize',
'version': '9.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA)',
'website': 'www.akretion.com',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'base',
'mail',
],
'data': [
'views/attachment_view.xml',
'security/ir.model.access.csv',
'data/cron.xml',
],
'demo': [
'demo/attachment_metadata_demo.xml'
],
'installable': True,
'application': False,
'images': [],
}
|
agpl-3.0
|
Python
|
a007f80dc2182787eca521c84f37aeedc307645a
|
Remove base64 padding
|
amitu/django-encrypted-id
|
encrypted_id/__init__.py
|
encrypted_id/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
try:
basestring
except NameError:
basestring = str
from Crypto.Cipher import AES
import base64
import binascii
import struct
from django.conf import settings
from django.db.models import Model
from django.http import Http404
from django.shortcuts import get_object_or_404 as go4
__version__ = "0.1.2"
__license__ = "BSD"
__author__ = "Amit Upadhyay"
__email__ = "[email protected]"
__url__ = "http://amitu.com/encrypted-id/"
__source__ = "https://github.com/amitu/django-encrypted-id"
__docformat__ = "html"
def encode(the_id):
assert 0 <= the_id < 2 ** 64
crc = binascii.crc32(bytes(the_id)) & 0xffffffff
message = struct.pack(b"<IQxxxx", crc, the_id)
assert len(message) == 16
cypher = AES.new(
settings.SECRET_KEY[:24], AES.MODE_CBC,
settings.SECRET_KEY[-16:]
)
return base64.urlsafe_b64encode(cypher.encrypt(message)).replace(b"=", b"")
def decode(e):
if isinstance(e, basestring):
e = bytes(e.encode("ascii"))
try:
padding = (3 - len(e) % 3) * b"="
e = base64.urlsafe_b64decode(e + padding)
except (TypeError, AttributeError):
raise ValueError("Failed to decrypt, invalid input.")
for skey in getattr(settings, "SECRET_KEYS", [settings.SECRET_KEY]):
cypher = AES.new(skey[:24], AES.MODE_CBC, skey[-16:])
msg = cypher.decrypt(e)
crc, the_id = struct.unpack("<IQxxxx", msg)
if crc != binascii.crc32(bytes(the_id)) & 0xffffffff:
continue
return the_id
raise ValueError("Failed to decrypt, CRC never matched.")
def get_object_or_404(m, ekey, *arg, **kw):
try:
pk = decode(ekey)
except ValueError:
raise Http404
return go4(m, id=pk, *arg, **kw)
def ekey(instance):
assert isinstance(instance, Model)
return encode(instance.id)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
try:
basestring
except NameError:
basestring = str
from Crypto.Cipher import AES
import base64
import binascii
import struct
from django.conf import settings
from django.db.models import Model
from django.http import Http404
from django.shortcuts import get_object_or_404 as go4
__version__ = "0.1.2"
__license__ = "BSD"
__author__ = "Amit Upadhyay"
__email__ = "[email protected]"
__url__ = "http://amitu.com/encrypted-id/"
__source__ = "https://github.com/amitu/django-encrypted-id"
__docformat__ = "html"
def encode(the_id):
assert 0 <= the_id < 2 ** 64
crc = binascii.crc32(bytes(the_id)) & 0xffffffff
message = struct.pack(b"<IQxxxx", crc, the_id)
assert len(message) == 16
cypher = AES.new(
settings.SECRET_KEY[:24], AES.MODE_CBC,
settings.SECRET_KEY[-16:]
)
return base64.urlsafe_b64encode(cypher.encrypt(message)).replace(b"=", b".")
def decode(e):
if isinstance(e, basestring):
e = bytes(e.encode("ascii"))
try:
e = base64.urlsafe_b64decode(e.replace(b".", b"="))
except (TypeError, AttributeError):
raise ValueError("Failed to decrypt, invalid input.")
for skey in getattr(settings, "SECRET_KEYS", [settings.SECRET_KEY]):
cypher = AES.new(skey[:24], AES.MODE_CBC, skey[-16:])
msg = cypher.decrypt(e)
crc, the_id = struct.unpack("<IQxxxx", msg)
if crc != binascii.crc32(bytes(the_id)) & 0xffffffff:
continue
return the_id
raise ValueError("Failed to decrypt, CRC never matched.")
def get_object_or_404(m, ekey, *arg, **kw):
try:
pk = decode(ekey)
except ValueError:
raise Http404
return go4(m, id=pk, *arg, **kw)
def ekey(instance):
assert isinstance(instance, Model)
return encode(instance.id)
|
bsd-2-clause
|
Python
|
391d69f4ce485ff02a3844b4cf5a54f23125c477
|
test presab
|
ballesterus/PhyloUtensils
|
partBreaker.py
|
partBreaker.py
|
#!/usr/bin/env python
import argparse
import Get_fasta_from_Ref as GFR
import re
from sys import argv
import os
presab={}
def Subsetfromto(FastaDict, outFile, start,end):
"""Writes a subsect multifast file, boud at sequence indeces start and end, form sequence stored in a dictioanry"""
with open(outFile, 'w') as out:
for seqID in FastaDict.iterkeys():
presab[seqID]=[]
seq=FastaDict[seqID][start:end]
out.write(">%s\n%s\n" %(seqID,seq))
if set(seq) in set('-','?'):
presab[seqId].append(0)
else:
presab[seqId].append(1)
def main(matrix, partfile, outdir):
Smatrix=GFR.Fasta_to_Dict(matrix)
if not os.path.exists(outdir):
os.makedirs(outdir)
else:
print 'The output dir already exist!'
with open(partfile, 'r') as P:
for pline in P:
outN=pline.split(',')[0]
outf="%s/%s" %(outdir,outN)
start=int(pline.split(',')[1].split('-')[0]) -1
end=int(pline.split(',')[1].split('-')[1])
Subsetfromto(Smatrix, outf, start, end)
print presab
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='This is a simple script for breaking supermatrices in individual MSA based on a partition file. The required partition file is a two column comma separated value text file where the fisrt column indicates the name of partition, recycled to be used as the name of the output file, and the second column is an interval of the positions in the supermatrix, separated only by "-". This script deals only with consecutive data blocks. Codon partitioning is not implemented... yet.')
parser.add_argument('-in', dest = 'matrix', type = str, help = 'Input alignemnets in fasta format')
parser.add_argument('-p', dest = 'partitions', type =str, help = 'Input partition definition file: a comma separated text file with two columns, ')
parser.add_argument('-o', dest = 'outdir', help='Specify directory where to write partitions')
# parser.add_argument('-c', help="")
args = parser.parse_args()
main(args.matrix, args.partitions, args.outdir)
|
#!/usr/bin/env python
import argparse
import Get_fasta_from_Ref as GFR
import re
from sys import argv
import os
def Subsetfromto(FastaDict, outFile, start,end):
"""Writes a subsect multifast file, boud at sequence indeces start and end, form sequence stored in a dictioanry"""
with open(outFile, 'w') as out:
for seqID in FastaDict.iterkeys():
seq=FastaDict[seqID][start:end]
out.write(">%s\n%s\n" %(seqID,seq))
def main(matrix, partfile, outdir):
Smatrix=GFR.Fasta_to_Dict(matrix)
if not os.path.exists(outdir):
os.makedirs(outdir)
else:
print 'The output dir already exist!'
with open(partfile, 'r') as P:
for pline in P:
outN=pline.split(',')[0]
outf="%s/%s" %(outdir,outN)
start=int(pline.split(',')[1].split('-')[0]) -1
end=int(pline.split(',')[1].split('-')[1])
Subsetfromto(Smatrix, outf, start, end)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='This is a simple script for breaking supermatrices in individual MSA based on a partition file. The required partition file is a two column comma separated value text file where the fisrt column indicates the name of partition, recycled to be used as the name of the output file, and the second column is an interval of the positions in the supermatrix, separated only by "-". This script deals only with consecutive data blocks. Codon partitioning is not implemented... yet.')
parser.add_argument('-in', dest = 'matrix', type = str, help = 'Input alignemnets in fasta format')
parser.add_argument('-p', dest = 'partitions', type =str, help = 'Input partiotion definition file: a comma separated text file with two columns, ')
parser.add_argument('-o', dest = 'outdir', help='Specify directory where to write partitions')
# parser.add_argument('-c', help="")
args = parser.parse_args()
main(args.matrix, args.partitions, args.outdir)
|
agpl-3.0
|
Python
|
7399dfa45c9b5a563798f504e9eb4054faf2aa30
|
print a more meaningful description of EventAct
|
openpolis/open_municipio,openpolis/open_municipio,openpolis/open_municipio,openpolis/open_municipio
|
open_municipio/events/models.py
|
open_municipio/events/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from open_municipio.acts.models import Act
from open_municipio.events.managers import EventManager
from open_municipio.people.models import Institution
from datetime import datetime, date
class Event(models.Model):
"""
This class allows OpenMunicipio site to keep track of upcoming
events.
Fields:
* A datefield, no time is needed
* A foreign key to the ``Institution`` that will "host" the event;
eg: council or city government
* A foreign key to the involved ``Act``
* A textfield for some description
Since we will always be interested in future events (with regard
to current date), a custom model manager is provided that allows
``Event.future.all()``.
"""
date = models.DateField(_("Event date"), help_text=_("The day when the event is going to be held"))
event_time = models.TimeField(_("Event time"), blank=True, null=True, help_text=_("The time of the event"))
institution = models.ForeignKey(Institution, verbose_name=_("Institution"), help_text=_("The institution that's going to meet during the event"))
acts = models.ManyToManyField(Act, verbose_name=_("Acts"), blank=True, null=True, help_text=_("Acts the discussion is linked to, if any"),through="EventAct")
title = models.CharField(_("Title"), max_length=128, blank=True, null=True, help_text=_("A short title for this event"))
description = models.TextField(_("Description"), blank=True, null=True, help_text=_("A description, containing the list of things that will be discussed during this event"))
address = models.CharField(_("Address"), max_length=128, blank=True, null=True, help_text=_("The physical address where the meeting is going to be held") )
# The default manager
objects = models.Manager()
# Future events will be retrieved using ``Event.future.all()``
future = EventManager()
class Meta:
verbose_name = _('event')
verbose_name_plural = _('events')
def __unicode__(self):
uc = u'%s %s - %s' % (self.date, self.event_time, self.title)
return uc
@property
def is_past_due(self):
if date.today() > self.date:
return True
return False
class EventAct(models.Model):
"""
WRITEME
"""
act = models.ForeignKey(Act)
event = models.ForeignKey(Event)
order = models.IntegerField(blank=False,null=False)
class Meta:
ordering = ('order',)
# the constraint below would be helpful, but it make the interface validation
# hard to manage -FS
# unique_together = ('order','event'),('act','event')
def __unicode__(self):
return "%s (%s)" % (self.act.title, self.event.date)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from open_municipio.acts.models import Act
from open_municipio.events.managers import EventManager
from open_municipio.people.models import Institution
from datetime import datetime, date
class Event(models.Model):
"""
This class allows OpenMunicipio site to keep track of upcoming
events.
Fields:
* A datefield, no time is needed
* A foreign key to the ``Institution`` that will "host" the event;
eg: council or city government
* A foreign key to the involved ``Act``
* A textfield for some description
Since we will always be interested in future events (with regard
to current date), a custom model manager is provided that allows
``Event.future.all()``.
"""
date = models.DateField(_("Event date"), help_text=_("The day when the event is going to be held"))
event_time = models.TimeField(_("Event time"), blank=True, null=True, help_text=_("The time of the event"))
institution = models.ForeignKey(Institution, verbose_name=_("Institution"), help_text=_("The institution that's going to meet during the event"))
acts = models.ManyToManyField(Act, verbose_name=_("Acts"), blank=True, null=True, help_text=_("Acts the discussion is linked to, if any"),through="EventAct")
title = models.CharField(_("Title"), max_length=128, blank=True, null=True, help_text=_("A short title for this event"))
description = models.TextField(_("Description"), blank=True, null=True, help_text=_("A description, containing the list of things that will be discussed during this event"))
address = models.CharField(_("Address"), max_length=128, blank=True, null=True, help_text=_("The physical address where the meeting is going to be held") )
# The default manager
objects = models.Manager()
# Future events will be retrieved using ``Event.future.all()``
future = EventManager()
class Meta:
verbose_name = _('event')
verbose_name_plural = _('events')
def __unicode__(self):
uc = u'%s %s - %s' % (self.date, self.event_time, self.title)
return uc
@property
def is_past_due(self):
if date.today() > self.date:
return True
return False
class EventAct(models.Model):
"""
WRITEME
"""
act = models.ForeignKey(Act)
event = models.ForeignKey(Event)
order = models.IntegerField(blank=False,null=False)
class Meta:
ordering = ('order',)
# the constraint below would be helpful, but it make the interface validation
# hard to manage -FS
# unique_together = ('order','event'),('act','event')
|
agpl-3.0
|
Python
|
a9373c3e4c65160bc04e56edbc356e086d2dae71
|
Tweak division display
|
mileswwatkins/python-opencivicdata-django,opencivicdata/python-opencivicdata,rshorey/python-opencivicdata-django,opencivicdata/python-opencivicdata,opencivicdata/python-opencivicdata-divisions,opencivicdata/python-opencivicdata-django,mileswwatkins/python-opencivicdata-django,opencivicdata/python-opencivicdata-django,rshorey/python-opencivicdata-django,influence-usa/python-opencivicdata-django,opencivicdata/python-opencivicdata-django,influence-usa/python-opencivicdata-django
|
opencivicdata/admin/division.py
|
opencivicdata/admin/division.py
|
from django.contrib import admin
from opencivicdata.models import division as models
@admin.register(models.Division)
class DivisionAdmin(admin.ModelAdmin):
list_display = ('display_name', 'id')
search_fields = list_display
|
from django.contrib import admin
from opencivicdata.models import division as models
@admin.register(models.Division)
class DivisionAdmin(admin.ModelAdmin):
pass
|
bsd-3-clause
|
Python
|
a39cbaf22401c466f02e5b12e3ebdd46fa8eef0c
|
Fix issue refs in test_numpy_piecewise_regression
|
skirpichev/omg,diofant/diofant
|
sympy/printing/tests/test_numpy.py
|
sympy/printing/tests/test_numpy.py
|
from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See sympy/sympy#9747 and sympy/sympy#9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
bsd-3-clause
|
Python
|
1addeefdf51713d562788018ebfb6549b215f55b
|
Fix C typo error in a test
|
timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons,timj/scons
|
test/option/tree-lib.py
|
test/option/tree-lib.py
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Make sure that --tree=derived output with a library dependency shows
the dependency on the library. (On earlier versions of the Microsoft
toolchain this wouldn't show up unless the library already existed
on disk.)
Issue 1363: http://scons.tigris.org/issues/show_bug.cgi?id=1363
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment(LIBPREFIX='',
LIBSUFFIX='.lib',
OBJSUFFIX='.obj',
EXESUFFIX='.exe')
env.AppendENVPath('PATH', '.')
l = env.Library( 'util.lib', 'util.c' )
p = env.Program( 'test_tree_lib.exe', 'main.c', LIBS=l )
env.Command( 'foo.h', p, '$SOURCE > $TARGET')
""")
test.write('main.c', """\
#include <stdlib.h>
#include <stdio.h>
int
main(int argc, char **argv)
{
printf("#define FOO_H \\"foo.h\\"\\n");
return (0);
}
""")
test.write('util.c', """\
void
util(void)
{
;
}
""")
expect = """
+-test_tree_lib.exe
+-main.obj
+-util.lib
+-util.obj
"""
test.run(arguments = '--tree=derived foo.h')
test.must_contain_all_lines(test.stdout(), [expect])
test.up_to_date(arguments = 'foo.h')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Make sure that --tree=derived output with a library dependency shows
the dependency on the library. (On earlier versions of the Microsoft
toolchain this wouldn't show up unless the library already existed
on disk.)
Issue 1363: http://scons.tigris.org/issues/show_bug.cgi?id=1363
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment(LIBPREFIX='',
LIBSUFFIX='.lib',
OBJSUFFIX='.obj',
EXESUFFIX='.exe')
env.AppendENVPath('PATH', '.')
l = env.Library( 'util.lib', 'util.c' )
p = env.Program( 'test_tree_lib.exe', 'main.c', LIBS=l )
env.Command( 'foo.h', p, '$SOURCE > $TARGET')
""")
test.write('main.c', """\
#include <stdlib.h>
#include <stdio.h>
int
main(int argc, char *argv)
{
printf("#define FOO_H \\"foo.h\\"\\n");
return (0);
}
""")
test.write('util.c', """\
void
util(void)
{
;
}
""")
expect = """
+-test_tree_lib.exe
+-main.obj
+-util.lib
+-util.obj
"""
test.run(arguments = '--tree=derived foo.h')
test.must_contain_all_lines(test.stdout(), [expect])
test.up_to_date(arguments = 'foo.h')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mit
|
Python
|
df9ff4f13fc7da111bc11cf5f390efe94352b6e6
|
Fix Setting class
|
ids1024/wikicurses
|
src/wikicurses/__init__.py
|
src/wikicurses/__init__.py
|
import os
import json
import pkgutil
from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
default_configdir = os.environ['HOME'] + '/.config'
configpath = os.environ.get('XDG_CONFIG_HOME', default_configdir) + '/wikicurses'
class Settings:
def __init__(self, name):
self.file = configpath + '/' + name
def __iter__(self):
if not os.path.exists(self.file):
yield from ()
return
with open(self.file) as file:
yield from json.load(file)
def _save(self, bookmarks):
if not os.path.exists(configpath):
os.mkdir(configpath)
with open(self.file, 'w') as file:
json.dump(bookmarks, file)
def add(self, bmark):
bookmarks = set(self)
bookmarks.add(bmark)
self._save(list(bookmarks))
def discard(self, bmark):
bookmarks = set(self)
bookmarks.discard(bmark)
self._save(list(bookmarks))
bmarks = Settings('bookmarks')
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
|
import os
import json
import pkgutil
from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
default_configdir = os.environ['HOME'] + '/.config'
configpath = os.environ.get('XDG_CONFIG_HOME', default_configdir) + '/wikicurses'
class Settings:
def __init__(self, name):
self.file = configpath + '/' + name
def __iter__(self):
if not os.path.exists(self.file):
yield from ()
with open(self.file) as file:
yield from json.load(file)
def _save(self, bookmarks):
if not os.path.exists(configpath):
os.mkdir(configpath)
with open(self.file, 'w') as file:
json.dump(bookmarks, file)
def add(self, bmark):
bookmarks = set(self)
bookmarks.add(bmark)
self._save(list(bookmarks))
def discard(self, bmark):
bookmarks = set(self)
bookmarks.discard(bmark)
self._save(list(bookmarks))
bmarks = Settings('bookmarks')
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
|
mit
|
Python
|
afedc41fd4e573f4db38f2fde38b2286d623b4c4
|
Remove obsolete property
|
ZeitOnline/zeit.campus
|
src/zeit/campus/article.py
|
src/zeit/campus/article.py
|
import zope.interface
import zeit.cms.content.reference
import zeit.campus.interfaces
class TopicpageLink(zeit.cms.related.related.RelatedBase):
zope.interface.implements(zeit.campus.interfaces.ITopicpageLink)
topicpagelink = zeit.cms.content.reference.SingleResource(
'.head.topicpagelink', 'related')
topicpagelink_label = zeit.cms.content.property.ObjectPathProperty(
'.head.topicpagelink.label',
zeit.campus.interfaces.ITopicpageLink['topicpagelink_label'])
|
import zope.interface
import zeit.cms.content.reference
import zeit.campus.interfaces
class TopicpageLink(zeit.cms.related.related.RelatedBase):
zope.interface.implements(zeit.campus.interfaces.ITopicpageLink)
topicpagelink = zeit.cms.content.reference.SingleResource(
'.head.topicpagelink', 'related')
topicpagelink_label = zeit.cms.content.dav.mapProperties(
zeit.campus.interfaces.ITopicpageLink,
zeit.cms.interfaces.DOCUMENT_SCHEMA_NS,
('topicpagelink_label',))
topicpagelink_label = zeit.cms.content.property.ObjectPathProperty(
'.head.topicpagelink.label',
zeit.campus.interfaces.ITopicpageLink['topicpagelink_label'])
|
bsd-3-clause
|
Python
|
e60a05886c52574227b1a73fe02575ede81ffa5e
|
mark out-of-date tests with a @skip
|
d120/pyophase,d120/pyophase,d120/pyophase,d120/pyophase
|
staff/tests/tests_views.py
|
staff/tests/tests_views.py
|
from unittest import skip
from django.core import mail
from django.test import Client, TestCase
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils.http import urlencode
class StaffAddView(TestCase):
fixtures = ['ophasebase.json', 'staff.json', 'students.json']
def test_redirect(self):
"""Test for Redirect to SSO Login page"""
c = Client()
suffix = urlencode({"next": reverse('staff:registration')})
redirect_url = "{}?{}".format(reverse('pyTUID:login'), suffix)
response = c.get(reverse('staff:registration'))
self.assertRedirects(response, redirect_url, target_status_code=302)
@skip
def test_send_email(self):
"""Sending an email after successfull register"""
# TODO Use fake SSO in test
c = Client()
register_view = reverse('staff:registration')
self.assertEqual(len(mail.outbox), 0)
testdata = {'prename': 'Leah',
'name': 'Bayer',
'email': '[email protected]',
'phone': '016031368212',
'matriculated_since': 'today',
'degree_course': 'Bachelor',
'experience_ophase': 'Nothing until now',
'is_helper': True,
'helper_jobs': 1,}
# sending a incomplet form should not send a email
response = c.post(register_view, testdata, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'why_participate', _('This field is required.'))
self.assertEqual(len(mail.outbox), 0)
# a complete form should send one email
testdata['why_participate'] = 'You need testdata'
response = c.post(register_view, testdata, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.redirect_chain, [(reverse('staff:registration_success'), 302)])
self.assertEqual(len(mail.outbox), 1)
smail = mail.outbox[0]
self.assertEqual(len(smail.to), 1)
self.assertEqual(smail.to[0], 'Leah Bayer <[email protected]>')
|
from django.core import mail
from django.test import Client, TestCase
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils.http import urlencode
class StaffAddView(TestCase):
fixtures = ['ophasebase.json', 'staff.json', 'students.json']
def test_redirect(self):
"""Test for Redirect to SSO Login page"""
c = Client()
suffix = urlencode({"next": reverse('staff:registration')})
redirect_url = "{}?{}".format(reverse('pyTUID:login'), suffix)
response = c.get(reverse('staff:registration'))
self.assertRedirects(response, redirect_url, target_status_code=302)
def test_send_email(self):
"""Sending an email after successfull register"""
pass
# TODO Use fake SSO in test
"""
c = Client()
register_view = reverse('staff:registration')
self.assertEqual(len(mail.outbox), 0)
testdata = {'prename': 'Leah',
'name': 'Bayer',
'email': '[email protected]',
'phone': '016031368212',
'matriculated_since': 'today',
'degree_course': 'Bachelor',
'experience_ophase': 'Nothing until now',
'is_helper': True,
'helper_jobs': 1,}
# sending a incomplet form should not send a email
response = c.post(register_view, testdata, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'why_participate', _('This field is required.'))
self.assertEqual(len(mail.outbox), 0)
# a complete form should send one email
testdata['why_participate'] = 'You need testdata'
response = c.post(register_view, testdata, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.redirect_chain, [(reverse('staff:registration_success'), 302)])
self.assertEqual(len(mail.outbox), 1)
smail = mail.outbox[0]
self.assertEqual(len(smail.to), 1)
self.assertEqual(smail.to[0], 'Leah Bayer <[email protected]>')
"""
|
agpl-3.0
|
Python
|
42ec06aa5e2034266f817dc6465cd8bf4fea6ead
|
fix migration
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/apps/linked_domain/migrations/0005_migrate_linked_app_toggle.py
|
corehq/apps/linked_domain/migrations/0005_migrate_linked_app_toggle.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-01 15:00
from __future__ import unicode_literals
from couchdbkit import ResourceNotFound
from django.db import migrations
from corehq.toggles import LINKED_DOMAINS
from toggle.models import Toggle
def _migrate_linked_apps_toggle(apps, schema_editor):
try:
linked_apps_toggle = Toggle.get('linked_apps')
except ResourceNotFound:
pass
else:
try:
Toggle.get(LINKED_DOMAINS.slug)
except ResourceNotFound:
linked_domains_toggle = Toggle(
slug=LINKED_DOMAINS.slug, enabled_users=linked_apps_toggle.enabled_users
)
linked_domains_toggle.save()
def noop(*args, **kwargs):
pass
class Migration(migrations.Migration):
initial = True
dependencies = [
('linked_domain', '0004_domainlinkhistory'),
]
operations = [
migrations.RunPython(_migrate_linked_apps_toggle, noop)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-01 15:00
from __future__ import unicode_literals
from couchdbkit import ResourceNotFound
from django.db import migrations
from corehq.toggles import LINKED_DOMAINS
from toggle.models import Toggle
def _migrate_linked_apps_toggle(apps, schema_editor):
try:
linked_apps_toggle = Toggle.get('linked_apps')
except ResourceNotFound:
pass
else:
linked_domains_toggle = Toggle(
slug=LINKED_DOMAINS.slug, enabled_users=linked_apps_toggle.enabled_users
)
linked_domains_toggle.save()
def noop(*args, **kwargs):
pass
class Migration(migrations.Migration):
initial = True
dependencies = [
('linked_domain', '0004_domainlinkhistory'),
]
operations = [
migrations.RunPython(_migrate_linked_apps_toggle, noop)
]
|
bsd-3-clause
|
Python
|
d3d25e127592356d6b678dc8d013f83f53803f67
|
update mordred.tests to check hidden modules
|
mordred-descriptor/mordred,mordred-descriptor/mordred
|
mordred/tests/__main__.py
|
mordred/tests/__main__.py
|
import os
import nose
def main():
base = os.path.dirname(os.path.dirname(__file__))
hidden = [
os.path.join(base, n)
for n in os.listdir(base)
if n[:1] == "_" and os.path.splitext(n)[1] == ".py"
]
tests = [base, os.path.join(base, "_base")] + hidden
os.environ["NOSE_WITH_DOCTEST"] = "1"
nose.main(
defaultTest=",".join(tests),
)
if __name__ == "__main__":
main()
|
import os
import nose
def main():
base = os.path.dirname(os.path.dirname(__file__))
tests = [base, os.path.join(base, "_base")]
os.environ["NOSE_WITH_DOCTEST"] = "1"
nose.main(
defaultTest=",".join(tests),
)
if __name__ == "__main__":
main()
|
bsd-3-clause
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.