commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
11cd074f67668135d606f68dddb66c465ec01756
|
Add db index on field tag name
|
jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,williamroot/opps
|
opps/core/tags/models.py
|
opps/core/tags/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from opps.core.models import Date, Slugged
class Tag(Date, Slugged):
name = models.CharField(_(u'Name'), max_length=255, unique=True,
db_index=True)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
__unicode__ = lambda self: self.name
class Meta:
verbose_name = _(u'Tag')
verbose_name_plural = _(u'Tags')
unique_together = ['slug', 'name']
class Tagged(models.Model):
tags = models.CharField(_(u'Tags'), max_length=4000, db_index=True,
blank=True, null=True,
help_text=_(u'A comma-separated list of tags.'))
def save(self, *args, **kwargs):
if self.tags:
tags = set(self.tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
self.tags = ','.join(tags)
super(Tagged, self).save(*args, **kwargs)
def get_tags(self):
if self.tags:
tags = []
for tag in self.tags.aplit(','):
t, created = Tag.objects.get_or_create(name=tag)
tags.append(t)
return tags
class Meta:
abstract = True
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from opps.core.models import Date, Slugged
class Tag(Date, Slugged):
name = models.CharField(_(u'Name'), max_length=255, unique=True)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
__unicode__ = lambda self: self.name
class Meta:
verbose_name = _(u'Tag')
verbose_name_plural = _(u'Tags')
class Tagged(models.Model):
tags = models.CharField(_(u'Tags'), max_length=4000, blank=True,
help_text=_(u'A comma-separated list of tags.'))
def save(self, *args, **kwargs):
if self.tags:
tags = set(self.tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
self.tags = ','.join(tags)
super(Tagged, self).save(*args, **kwargs)
def get_tags(self):
if self.tags:
tags = []
for tag in self.tags.aplit(','):
t, created = Tag.objects.get_or_create(name=tag)
tags.append(t)
return tags
class Meta:
abstract = True
|
mit
|
Python
|
706aa6ed7170709828258bca1b9a1dfe6e8fa77e
|
improve helper functions
|
gousaiyang/SoftwareList,gousaiyang/SoftwareList
|
SLHelper.py
|
SLHelper.py
|
# -*- coding: utf-8 -*-
import re
import tkinter
from tkinter import messagebox
def file_content(filename):
with open(filename, 'rb') as fin:
content = fin.read()
return content.decode('utf-8')
def write_file(filename, content):
with open(filename, 'wb') as fout:
fout.write(content.encode('utf-8'))
def date_sanitizer(datestring):
result = re.findall(r'(\d{4})[-./ ](\d{1,2})[-./ ](\d{1,2})', datestring)
return '%s-%02d-%02d' % (result[0][0], int(result[0][1]), int(result[0][2])) if result else datestring.strip()
def alert_messagebox(title, content):
root = tkinter.Tk()
root.withdraw()
messagebox.showinfo(title, content)
root.destroy()
|
# -*- coding: utf-8 -*-
import re
import tkinter
from tkinter import messagebox
def file_content(filename):
with open(filename, 'r') as fin:
content = fin.read()
return content
def write_file(filename, content):
with open(filename, 'wb') as fout:
fout.write(content.encode('utf-8'))
def date_sanitizer(datestring):
result = re.findall(r'(\d{4})[-./ ](\d{1,2})[-./ ](\d{1,2})', datestring)
return '%s-%02d-%02d' % (result[0][0], int(result[0][1]), int(result[0][2])) if result else datestring
def alert_messagebox(title, content):
root = tkinter.Tk()
root.withdraw()
messagebox.showinfo(title, content)
root.destroy()
|
mit
|
Python
|
9d65b613384b1d4781efd65588639ad68261e8d7
|
Remove unused import.
|
Lukasa/cryptography,kimvais/cryptography,skeuomorf/cryptography,sholsapp/cryptography,dstufft/cryptography,sholsapp/cryptography,skeuomorf/cryptography,sholsapp/cryptography,glyph/cryptography,skeuomorf/cryptography,bwhmather/cryptography,Ayrx/cryptography,Hasimir/cryptography,Hasimir/cryptography,Ayrx/cryptography,kimvais/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,dstufft/cryptography,dstufft/cryptography,Lukasa/cryptography,Hasimir/cryptography,kimvais/cryptography,Hasimir/cryptography,dstufft/cryptography,dstufft/cryptography,glyph/cryptography,bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,sholsapp/cryptography,kimvais/cryptography,bwhmather/cryptography
|
cryptography/hazmat/primitives/hmac.py
|
cryptography/hazmat/primitives/hmac.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import six
from cryptography.hazmat.primitives import interfaces
@interfaces.register(interfaces.HashContext)
class HMAC(object):
def __init__(self, key, algorithm, ctx=None, backend=None):
super(HMAC, self).__init__()
self.algorithm = algorithm
if backend is None:
from cryptography.hazmat.bindings import _default_backend
backend = _default_backend
self._backend = backend
self._key = key
if ctx is None:
self._ctx = self._backend.hmacs.create_ctx(key, self.algorithm)
else:
self._ctx = ctx
def update(self, msg):
if isinstance(msg, six.text_type):
raise TypeError("Unicode-objects must be encoded before hashing")
self._backend.hmacs.update_ctx(self._ctx, msg)
def copy(self):
return self.__class__(self._key, self.algorithm, backend=self._backend,
ctx=self._backend.hmacs.copy_ctx(self._ctx))
def finalize(self):
return self._backend.hmacs.finalize_ctx(self._ctx,
self.algorithm.digest_size)
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import binascii
import six
from cryptography.hazmat.primitives import interfaces
@interfaces.register(interfaces.HashContext)
class HMAC(object):
def __init__(self, key, algorithm, ctx=None, backend=None):
super(HMAC, self).__init__()
self.algorithm = algorithm
if backend is None:
from cryptography.hazmat.bindings import _default_backend
backend = _default_backend
self._backend = backend
self._key = key
if ctx is None:
self._ctx = self._backend.hmacs.create_ctx(key, self.algorithm)
else:
self._ctx = ctx
def update(self, msg):
if isinstance(msg, six.text_type):
raise TypeError("Unicode-objects must be encoded before hashing")
self._backend.hmacs.update_ctx(self._ctx, msg)
def copy(self):
return self.__class__(self._key, self.algorithm, backend=self._backend,
ctx=self._backend.hmacs.copy_ctx(self._ctx))
def finalize(self):
return self._backend.hmacs.finalize_ctx(self._ctx,
self.algorithm.digest_size)
|
bsd-3-clause
|
Python
|
38a2d86aed4ea1e94691993c5f49722f9a69ac8d
|
Remove Python < 3.6 version check
|
ARM-software/lisa,credp/lisa,credp/lisa,credp/lisa,credp/lisa,ARM-software/lisa,ARM-software/lisa,ARM-software/lisa
|
lisa/__init__.py
|
lisa/__init__.py
|
#! /usr/bin/env python3
import warnings
import os
import sys
from lisa.version import __version__
# Raise an exception when a deprecated API is used from within a lisa.*
# submodule. This ensures that we don't use any deprecated APIs internally, so
# they are only kept for external backward compatibility purposes.
warnings.filterwarnings(
action='error',
category=DeprecationWarning,
module=r'{}\..*'.format(__name__),
)
# When the deprecated APIs are used from __main__ (script or notebook), always
# show the warning
warnings.filterwarnings(
action='always',
category=DeprecationWarning,
module=r'__main__',
)
# Prevent matplotlib from trying to connect to X11 server, for headless testing.
# Must be done before importing matplotlib.pyplot or pylab
try:
import matplotlib
except ImportError:
pass
else:
if not os.getenv('DISPLAY'):
matplotlib.use('Agg')
# vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab
|
#! /usr/bin/env python3
import warnings
import os
import sys
from lisa.version import __version__
# Raise an exception when a deprecated API is used from within a lisa.*
# submodule. This ensures that we don't use any deprecated APIs internally, so
# they are only kept for external backward compatibility purposes.
warnings.filterwarnings(
action='error',
category=DeprecationWarning,
module=r'{}\..*'.format(__name__),
)
# When the deprecated APIs are used from __main__ (script or notebook), always
# show the warning
warnings.filterwarnings(
action='always',
category=DeprecationWarning,
module=r'__main__',
)
# Prevent matplotlib from trying to connect to X11 server, for headless testing.
# Must be done before importing matplotlib.pyplot or pylab
try:
import matplotlib
except ImportError:
pass
else:
if not os.getenv('DISPLAY'):
matplotlib.use('Agg')
if sys.version_info < (3, 6):
warnings.warn(
'Python 3.6 will soon be required to run LISA, please upgrade from {} to any version higher than 3.6'.format(
'.'.join(
map(str, tuple(sys.version_info)[:3])
),
),
DeprecationWarning,
)
# vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab
|
apache-2.0
|
Python
|
13298bd49d9c72b8db6650fb0f8b316998b302f0
|
Add permissions on TodoList model.
|
endthestart/safarido,endthestart/safarido,endthestart/safarido
|
safarido/todos/models.py
|
safarido/todos/models.py
|
from django.conf import settings
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampedModel(models.Model):
created_on = models.DateTimeField(_('created on'), auto_now_add=True)
modified_on = models.DateTimeField(_('modified on'), auto_now=True)
class Meta:
abstract = True
class TodoList(TimestampedModel):
title = models.CharField(
_('title'),
max_length=50,
)
slug = models.SlugField()
parent = models.ForeignKey(
'self',
blank=True,
null=True,
related_name='child',
)
def get_ancestors(self):
ancestors = []
if self.parent:
parent = self.parent
while parent:
ancestors.append(parent)
parent = parent.parent
ancestors.reverse()
ancestors = ancestors + [self, ]
return ancestors
def save(self, **kwargs):
# Generate a slug if there is not one
if self.title and not self.slug:
self.slug = slugify(self.title)
super(TodoList, self).save(**kwargs)
def __unicode__(self):
return self.title
class Meta:
ordering = ('title', )
permissions = (
('is_owner', 'Is Owner'),
('view_todo_list', 'View Todo List'),
)
class Todo(TimestampedModel):
list = models.ForeignKey(
TodoList,
related_name='todos',
)
title = models.CharField(
_('title'),
max_length=200)
description = models.TextField(
_('description'),
)
assigned_to = models.ManyToManyField(
settings.AUTH_USER_MODEL,
verbose_name=_('users'),
related_name='todos',
)
due_date = models.DateField(
verbose_name=_('due date'),
blank=True,
null=True,
)
is_done = models.DateField(
verbose_name=_('is done'),
blank=True,
null=True,
)
class Meta:
ordering = ('modified_on', 'created_on', )
|
from django.conf import settings
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampedModel(models.Model):
created_on = models.DateTimeField(_('created on'), auto_now_add=True)
modified_on = models.DateTimeField(_('modified on'), auto_now=True)
class Meta:
abstract = True
class TodoList(TimestampedModel):
# owner = models.ForeignKey(
# settings.AUTH_USER_MODEL
# )
# users = models.ManyToManyField(
# settings.AUTH_USER_MODEL,
# verbose_name=_('users'),
# related_name=_('todo_lists'),
# )
title = models.CharField(
_('title'),
max_length=50,
)
slug = models.SlugField()
parent = models.ForeignKey(
'self',
blank=True,
null=True,
related_name='child',
)
def get_ancestors(self):
ancestors = []
if self.parent:
parent = self.parent
while parent:
ancestors.append(parent)
parent = parent.parent
ancestors.reverse()
ancestors = ancestors + [self, ]
return ancestors
def save(self, **kwargs):
# Generate a slug if there is not one
if self.title and not self.slug:
self.slug = slugify(self.name)
super(TodoList, self).save(**kwargs)
def __unicode__(self):
return self.title
class Meta:
ordering = ('title', )
class Todo(TimestampedModel):
list = models.ForeignKey(
TodoList,
related_name='todos',
)
title = models.CharField(
_('title'),
max_length=200)
description = models.TextField(
_('description'),
)
assigned_to = models.ManyToManyField(
settings.AUTH_USER_MODEL,
verbose_name=_('users'),
related_name='todos',
)
due_date = models.DateField(
verbose_name=_('due date'),
blank=True,
null=True,
)
is_done = models.DateField(
verbose_name=_('is done'),
blank=True,
null=True,
)
class Meta:
ordering = ('modified_on', 'created_on', )
|
mit
|
Python
|
b16a1987075bd72cb7d31cf3dc7e529ce8d0e102
|
fix loop
|
TejasM/wisely,TejasM/wisely,TejasM/wisely
|
wisely_project/get_courses_file.py
|
wisely_project/get_courses_file.py
|
import sys
import os
import traceback
from django import db
sys.path.append('/root/wisely/wisely_project/')
os.environ['DJANGO_SETTINGS_MODULE'] = 'wisely_project.settings.production'
from django.db.models import F, Q
from django.utils import timezone
from users.tasks import get_coursera_courses, get_edx_courses, get_udemy_courses
__author__ = 'tmehta'
from users.models import CourseraProfile, EdxProfile, UdemyProfile
while True:
try:
for connection in db.connections.all():
if len(connection.queries) > 100:
db.reset_queries()
for user in CourseraProfile.objects.filter(last_updated__lt=F('user__last_login')).filter(~Q(email='')).filter(
incorrect_login=False):
print user.username
print "Start coursera"
get_coursera_courses(user)
user.last_updated = timezone.now()
user.save()
for user in EdxProfile.objects.filter(last_updated__lt=F('user__last_login')).filter(~Q(email='')).filter(
incorrect_login=False):
print user.email
print "Start edx"
get_edx_courses(user)
user.last_updated = timezone.now()
user.save()
for user in UdemyProfile.objects.filter(last_updated__lt=F('user__last_login')).filter(~Q(email='')).filter(
incorrect_login=False):
print user.email
print "Start udemy"
get_udemy_courses(user)
user.last_updated = timezone.now()
user.save()
except Exception as e:
print traceback.format_exc()
|
import sys
import os
import traceback
from django import db
sys.path.append('/root/wisely/wisely_project/')
os.environ['DJANGO_SETTINGS_MODULE'] = 'wisely_project.settings.production'
from django.db.models import F
from django.utils import timezone
from users.tasks import get_coursera_courses, get_edx_courses, get_udemy_courses
__author__ = 'tmehta'
from users.models import CourseraProfile, EdxProfile, UdemyProfile
while True:
try:
for connection in db.connections.all():
if len(connection.queries) > 100:
db.reset_queries()
for user in CourseraProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.username != '' and user.incorrect_login == False:
print user.username
print "Start coursera"
get_coursera_courses(user)
user.last_updated = timezone.now()
user.save()
for user in EdxProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.email != '' and user.incorrect_login == False:
print user.email
print "Start edx"
get_edx_courses(user)
user.last_updated = timezone.now()
user.save()
for user in UdemyProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.email != '' and user.incorrect_login == False:
print user.email
print "Start udemy"
get_udemy_courses(user)
user.last_updated = timezone.now()
user.save()
except Exception as e:
print traceback.format_exc()
|
mit
|
Python
|
a2c484afc3951a77a6684f9c7323672c6db691aa
|
Fix name of celery queue
|
rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet
|
genomic_neuralnet/common/celery_slave.py
|
genomic_neuralnet/common/celery_slave.py
|
from __future__ import print_function
import os
import sys
import time
import numpy as np
import redis
import pickle
from itertools import chain
from genomic_neuralnet.common.base_compare import try_predictor
from genomic_neuralnet.util.ec2_util import get_master_dns
from celery import Celery
import celery.app.control as ctrl
name = 'parallel_predictors'
_host = get_master_dns(public=True)
backend = 'redis://{}/0'.format(_host)
broker = 'redis://{}/0'.format(_host)
app = Celery(name, backend=backend, broker=broker)
celery_try_predictor = app.task(try_predictor)
os.environ['BROKER_TRANSPORT_OPTIONS'] = "{'visibility_timeout': 900}"
_cache_dir = os.path.expanduser('~/work_cache')
if not os.path.isdir(_cache_dir):
os.makedirs(_cache_dir)
def disk_cache(result, id_num):
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
with open(file_path, 'wb') as f:
pickle.dump(result, f)
def load_and_clear_cache(id_nums):
for id_num in id_nums:
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
os.unlink(file_path)
def get_num_workers():
stats_dict = ctrl.Control(app).inspect().stats()
if stats_dict is None:
return 0
else:
num_workers = 0
for instance, stats in stats_dict.iteritems():
num_workers += stats['pool']['max-concurrency']
return num_workers
def get_queue_length():
conn = redis.StrictRedis(_host)
return conn.llen('celery')
def main():
# Start the worker.
app.worker_main(['--loglevel=DEBUG'])
if __name__ == '__main__':
main()
|
from __future__ import print_function
import os
import sys
import time
import numpy as np
import redis
import pickle
from itertools import chain
from genomic_neuralnet.common.base_compare import try_predictor
from genomic_neuralnet.util.ec2_util import get_master_dns
from celery import Celery
import celery.app.control as ctrl
name = 'parallel_predictors'
_host = get_master_dns(public=True)
backend = 'redis://{}/0'.format(_host)
broker = 'redis://{}/0'.format(_host)
app = Celery(name, backend=backend, broker=broker)
celery_try_predictor = app.task(try_predictor)
os.environ['BROKER_TRANSPORT_OPTIONS'] = "{'visibility_timeout': 900}"
_cache_dir = os.path.expanduser('~/work_cache')
if not os.path.isdir(_cache_dir):
os.makedirs(_cache_dir)
def disk_cache(result, id_num):
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
with open(file_path, 'wb') as f:
pickle.dump(result, f)
def load_and_clear_cache(id_nums):
for id_num in id_nums:
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
os.unlink(file_path)
def get_num_workers():
stats_dict = ctrl.Control(app).inspect().stats()
if stats_dict is None:
return 0
else:
num_workers = 0
for instance, stats in stats_dict.iteritems():
num_workers += stats['pool']['max-concurrency']
return num_workers
def get_queue_length():
conn = redis.StrictRedis(_host)
return conn.llen('parallel_predictors')
def main():
# Start the worker.
app.worker_main(['--loglevel=DEBUG'])
if __name__ == '__main__':
main()
|
mit
|
Python
|
e8003dbb1e6a7efe60b02c65207c7202236b1adb
|
Update InputDialogCtrl.py
|
Relrin/Helenae,Relrin/Helenae,Relrin/Helenae
|
helenae/gui/widgets/InputDialogCtrl.py
|
helenae/gui/widgets/InputDialogCtrl.py
|
# -*- coding: utf-8 -*-
import wx
class InputDialog(wx.Dialog):
def __init__(self, parent, id, title, ico_folder, validator):
wx.Dialog.__init__(self, parent, id, title, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.label = wx.StaticText(self, label="Имя элемента:", pos=(15, 20))
self.field = wx.TextCtrl(self, value="", size=(150, 20), pos=(105, 15), validator=validator)
self.button_ok = wx.Button(self, label="Ок", id=wx.ID_OK, pos=(75, 45))
self.button_cancel = wx.Button(self, label="Отмена", id=wx.ID_CANCEL, pos=(167, 45))
self.Bind(wx.EVT_BUTTON, self.onOK, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.onCancel, id=wx.ID_CANCEL)
self.icon = wx.Icon(ico_folder + '/icons/app.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(self.icon)
size = (275, 80)
self.SetSize(size)
self.result = None
def onOK(self, event):
if self.field.GetValidator().Validate(self.field):
self.result = self.field.GetValue()
self.Destroy()
def onCancel(self, event):
self.result = None
self.Destroy()
if __name__ =='__main__':
from validators.FileValidator import FileValidator
app = wx.App(0)
ico_folder = '..'
frame = InputDialog(None, -1, 'Ввод данных', ico_folder, FileValidator())
frame.Show()
app.MainLoop()
|
# -*- coding: utf-8 -*-
import wx
from validators.FileValidator import FileValidator
class InputDialog(wx.Dialog):
def __init__(self, parent, id, title, ico_folder, validator):
wx.Dialog.__init__(self, parent, id, title, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.label = wx.StaticText(self, label="Имя каталога:", pos=(15, 20))
self.field = wx.TextCtrl(self, value="", size=(150, 20), pos=(105, 15), validator=validator)
self.button_ok = wx.Button(self, label="Ок", id=wx.ID_OK, pos=(75, 45))
self.button_cancel = wx.Button(self, label="Отмена", id=wx.ID_CANCEL, pos=(167, 45))
self.Bind(wx.EVT_BUTTON, self.onOK, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.onCancel, id=wx.ID_CANCEL)
self.icon = wx.Icon(ico_folder + '/icons/app.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(self.icon)
size = (275, 80)
self.SetSize(size)
self.result = None
def onOK(self, event):
if self.field.GetValidator().Validate(self.field):
self.result = self.field.GetValue()
self.Destroy()
def onCancel(self, event):
self.result = None
self.Destroy()
if __name__ =='__main__':
app = wx.App(0)
ico_folder = '..'
frame = InputDialog(None, -1, 'Ввод данных', ico_folder, FileValidator())
frame.Show()
app.MainLoop()
|
mit
|
Python
|
a705892cd7e32a540c5fee61a2bf4c4d67abf477
|
add get_all_required_node_names method
|
zdw/xos,cboling/xos,open-cloud/xos,zdw/xos,zdw/xos,opencord/xos,open-cloud/xos,cboling/xos,cboling/xos,zdw/xos,open-cloud/xos,cboling/xos,opencord/xos,opencord/xos,cboling/xos
|
xos/tosca/resources/xosresource.py
|
xos/tosca/resources/xosresource.py
|
class XOSResource(object):
xos_base_class = "XOSResource"
xos_model = None
provides = None
def __init__(self, user, nodetemplate):
self.dirty = False
self.user = user
self.nodetemplate = nodetemplate
def get_all_required_node_names(self):
results = []
for reqs in self.nodetemplate.requirements:
for (k,v) in reqs.items():
results.append(v["node"])
return results
def get_requirements(self, relationship_name, throw_exception=False):
""" helper to search the list of requirements for a particular relationship
type.
"""
results = []
for reqs in self.nodetemplate.requirements:
for (k,v) in reqs.items():
if (v["relationship"] == relationship_name):
results.append(v["node"])
if (not results) and throw_exception:
raise Exception("Failed to find requirement in %s using relationship %s" % (self.nodetemplate.name, relationship_name))
return results
def get_requirement(self, relationship_name, throw_exception=False):
reqs = self.get_requirements(relationship_name, throw_exception)
if not reqs:
return None
return reqs[0]
def get_xos_object(self, cls, **kwargs):
objs = cls.objects.filter(**kwargs)
if not objs:
raise Exception("Failed to find %s filtered by %s" % (cls.__name__, str(kwargs)))
return objs[0]
def get_existing_objs(self):
return self.xos_model.objects.filter(name = self.nodetemplate.name)
def get_xos_args(self):
return {}
def create_or_update(self):
existing_objs = self.get_existing_objs()
if existing_objs:
self.info("%s %s already exists" % (self.xos_model.__name__, self.nodetemplate.name))
self.update(existing_objs[0])
else:
self.create()
def create(self):
raise Exception("abstract method -- must override")
def update(self, obj):
pass
def info(self, s):
print s
|
class XOSResource(object):
xos_base_class = "XOSResource"
xos_model = None
provides = None
def __init__(self, user, nodetemplate):
self.dirty = False
self.user = user
self.nodetemplate = nodetemplate
def get_requirements(self, relationship_name, throw_exception=False):
""" helper to search the list of requirements for a particular relationship
type.
"""
results = []
for reqs in self.nodetemplate.requirements:
for (k,v) in reqs.items():
if (v["relationship"] == relationship_name):
results.append(v["node"])
if (not results) and throw_exception:
raise Exception("Failed to find requirement in %s using relationship %s" % (self.nodetemplate.name, relationship_name))
return results
def get_requirement(self, relationship_name, throw_exception=False):
reqs = self.get_requirements(relationship_name, throw_exception)
if not reqs:
return None
return reqs[0]
def get_xos_object(self, cls, **kwargs):
objs = cls.objects.filter(**kwargs)
if not objs:
raise Exception("Failed to find %s filtered by %s" % (cls.__name__, str(kwargs)))
return objs[0]
def get_existing_objs(self):
return self.xos_model.objects.filter(name = self.nodetemplate.name)
def get_xos_args(self):
return {}
def create_or_update(self):
existing_objs = self.get_existing_objs()
if existing_objs:
self.info("%s %s already exists" % (self.xos_model.__name__, self.nodetemplate.name))
self.update(existing_objs[0])
else:
self.create()
def create(self):
raise Exception("abstract method -- must override")
def update(self, obj):
pass
def info(self, s):
print s
|
apache-2.0
|
Python
|
cb3bb4d350d5b05c9f3f123dc71dc4cec9f70703
|
Convert to use Context.
|
pfalcon/picotui
|
example_widgets.py
|
example_widgets.py
|
from picotui.context import Context
from picotui.screen import Screen
from picotui.widgets import *
from picotui.defs import *
with Context():
Screen.attr_color(C_WHITE, C_BLUE)
Screen.cls()
Screen.attr_reset()
d = Dialog(5, 5, 50, 12)
# Can add a raw string to dialog, will be converted to WLabel
d.add(1, 1, "Label:")
d.add(11, 1, WLabel("it's me!"))
d.add(1, 2, "Entry:")
d.add(11, 2, WTextEntry(4, "foo"))
d.add(1, 3, "Dropdown:")
d.add(11, 3, WDropDown(10, ["Red", "Green", "Yellow"]))
d.add(1, 4, "Combo:")
d.add(11, 4, WComboBox(8, "fo", ["foo", "foobar", "bar"]))
d.add(1, 5, "Auto complete:")
d.add(15, 5, WAutoComplete(8, "fo", ["foo", "foobar", "bar", "car", "dar"]))
d.add(1, 8, "Multiline:")
d.add(1, 9, WMultiEntry(26, 3, ["Example", "Text"]))
d.add(30, 1, WFrame(18, 6, "Frame"))
d.add(31, 2, WCheckbox("State"))
d.add(31, 3, WRadioButton(["Red", "Green", "Yellow"]))
d.add(30, 8, "List:")
d.add(30, 9, WListBox(16, 4, ["choice%d" % i for i in range(10)]))
d.add(1, 13, "Button:")
b = WButton(9, "Kaboom!")
d.add(10, 13, b)
b.on("click", lambda w: 1/0)
d.add(1, 15, "Dialog buttons:")
b = WButton(8, "OK")
d.add(10, 16, b)
# Instead of having on_click handler, buttons can finish a dialog
# with a given result.
b.finish_dialog = ACTION_OK
b = WButton(8, "Cancel")
d.add(30, 16, b)
b.finish_dialog = ACTION_CANCEL
#d.redraw()
res = d.loop()
print("Result:", res)
|
from picotui.screen import Screen
from picotui.widgets import *
from picotui.defs import *
if __name__ == "__main__":
s = Screen()
try:
s.init_tty()
s.enable_mouse()
s.attr_color(C_WHITE, C_BLUE)
s.cls()
s.attr_reset()
d = Dialog(5, 5, 50, 12)
# Can add a raw string to dialog, will be converted to WLabel
d.add(1, 1, "Label:")
d.add(11, 1, WLabel("it's me!"))
d.add(1, 2, "Entry:")
d.add(11, 2, WTextEntry(4, "foo"))
d.add(1, 3, "Dropdown:")
d.add(11, 3, WDropDown(10, ["Red", "Green", "Yellow"]))
d.add(1, 4, "Combo:")
d.add(11, 4, WComboBox(8, "fo", ["foo", "foobar", "bar"]))
d.add(1, 5, "Auto complete:")
d.add(15, 5, WAutoComplete(8, "fo", ["foo", "foobar", "bar", "car", "dar"]))
d.add(1, 8, "Multiline:")
d.add(1, 9, WMultiEntry(26, 3, ["Example", "Text"]))
d.add(30, 1, WFrame(18, 6, "Frame"))
d.add(31, 2, WCheckbox("State"))
d.add(31, 3, WRadioButton(["Red", "Green", "Yellow"]))
d.add(30, 8, "List:")
d.add(30, 9, WListBox(16, 4, ["choice%d" % i for i in range(10)]))
d.add(1, 13, "Button:")
b = WButton(9, "Kaboom!")
d.add(10, 13, b)
b.on("click", lambda w: 1/0)
d.add(1, 15, "Dialog buttons:")
b = WButton(8, "OK")
d.add(10, 16, b)
# Instead of having on_click handler, buttons can finish a dialog
# with a given result.
b.finish_dialog = ACTION_OK
b = WButton(8, "Cancel")
d.add(30, 16, b)
b.finish_dialog = ACTION_CANCEL
#d.redraw()
res = d.loop()
finally:
s.goto(0, 50)
s.cursor(True)
s.disable_mouse()
s.deinit_tty()
print("Result:", res)
|
mit
|
Python
|
bc6772cb8990039479f6fe2b238304765aafab41
|
make 70_web.py better
|
moskytw/mosql,uranusjr/mosql
|
examples/70_web.py
|
examples/70_web.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Run this script, then try the following urls:
#
# 1. http://127.0.0.1:5000/?person_id=mosky
# 2. http://127.0.0.1:5000/?name=Mosky Liu
# 3. http://127.0.0.1:5000/?name like=%Mosky%
#
import psycopg2
from flask import Flask, request, jsonify
from mosql.query import select, left_join
from mosql.db import Database
db = Database(psycopg2, host='127.0.0.1')
app = Flask(__name__)
@app.route('/')
def index():
with db as cur:
cur.execute(select(
'person',
request.args or None,
joins = left_join('detail', using=('person_id', )),
))
return jsonify(data=list(cur))
if __name__ == '__main__':
app.run(debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Run this script, then try the following urls:
#
# 1. http://127.0.0.1:5000/?person_id=mosky
# 2. http://127.0.0.1:5000/?name=Mosky Liu
# 3. http://127.0.0.1:5000/?name like=%Mosky%
#
import psycopg2
from flask import Flask, request, jsonify
from mosql.query import select, left_join
app = Flask(__name__)
conn = psycopg2.connect(host='127.0.0.1')
@app.route('/')
def index():
cur = conn.cursor()
cur.execute(select(
'person',
request.args or None,
joins = left_join('detail', using=('person_id', )),
))
rows = cur.fetchall()
cur.close()
return jsonify(data=rows)
if __name__ == '__main__':
app.run(debug=True)
|
mit
|
Python
|
cb5aeedc651773d1c298b167b07aa535dfd7beca
|
Fix typos/spelling in serializer docstrings (#2420)
|
Parsl/parsl,Parsl/parsl,Parsl/parsl,Parsl/parsl
|
parsl/serialize/concretes.py
|
parsl/serialize/concretes.py
|
import dill
import pickle
import logging
logger = logging.getLogger(__name__)
from parsl.serialize.base import SerializerBase
class PickleSerializer(SerializerBase):
""" Pickle serialization covers most python objects, with some notable exceptions:
* functions defined in a interpreter/notebook
* classes defined in local context and not importable using a fully qualified name
* closures, generators and coroutines
* [sometimes] issues with wrapped/decorated functions
"""
_identifier = b'01\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = pickle.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = pickle.loads(chomped)
return data
class DillSerializer(SerializerBase):
""" Dill serialization works on a superset of object including the ones covered by pickle.
However for most cases pickle is faster. For most callable objects the additional overhead
of dill can be amortized with an lru_cache. Here's items that dill handles that pickle
doesn't:
* functions defined in a interpreter/notebook
* classes defined in local context and not importable using a fully qualified name
* functions that are wrapped/decorated by other functions/classes
* closures
"""
_identifier = b'02\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = dill.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = dill.loads(chomped)
return data
|
import dill
import pickle
import logging
logger = logging.getLogger(__name__)
from parsl.serialize.base import SerializerBase
class PickleSerializer(SerializerBase):
""" Pickle serialization covers most python objects, with some notable exceptions:
* functions defined in a interpretor/notebook
* classes defined in local context and not importable using a fully qualified name
* clojures, generators and coroutines
* [sometimes] issues with wrapped/decorated functions
"""
_identifier = b'01\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = pickle.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = pickle.loads(chomped)
return data
class DillSerializer(SerializerBase):
""" Dill serialization works on a superset of object including the ones covered by pickle.
However for most cases pickle is faster. For most callable objects the additional overhead
of dill can be amortized with an lru_cache. Here's items that dill handles that pickle
doesn't:
* functions defined in a interpretor/notebook
* classes defined in local context and not importable using a fully qualified name
* functions that are wrapped/decorated by other functions/classes
* clojures
"""
_identifier = b'02\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = dill.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = dill.loads(chomped)
return data
|
apache-2.0
|
Python
|
7fd76d87cfda8f02912985cb3cf650ee8ff2e11e
|
Remove py2 Ska.DBI assert in report test
|
sot/mica,sot/mica
|
mica/report/tests/test_write_report.py
|
mica/report/tests/test_write_report.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import tempfile
import os
import shutil
import pytest
from .. import report
try:
import Ska.DBI
with Ska.DBI.DBI(server='sqlsao', dbi='sybase', user='aca_ops', database='axafocat') as db:
HAS_SYBASE_ACCESS = True
except:
HAS_SYBASE_ACCESS = False
HAS_SC_ARCHIVE = os.path.exists(report.starcheck.FILES['data_root'])
@pytest.mark.skipif('not HAS_SYBASE_ACCESS', reason='Report test requires Sybase/OCAT access')
@pytest.mark.skipif('not HAS_SC_ARCHIVE', reason='Report test requires mica starcheck archive')
def test_write_reports():
"""
Make a report and database
"""
tempdir = tempfile.mkdtemp()
# Get a temporary file, but then delete it, because report.py will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(dir=tempdir, suffix='.db3')
os.unlink(fn)
report.REPORT_ROOT = tempdir
report.REPORT_SERVER = fn
for obsid in [20001, 15175, 54778]:
report.main(obsid)
os.unlink(fn)
shutil.rmtree(tempdir)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import tempfile
import os
import shutil
import pytest
from .. import report
try:
import Ska.DBI
with Ska.DBI.DBI(server='sqlsao', dbi='sybase', user='aca_ops', database='axafocat') as db:
assert db.conn._is_connected == 1
HAS_SYBASE_ACCESS = True
except:
HAS_SYBASE_ACCESS = False
HAS_SC_ARCHIVE = os.path.exists(report.starcheck.FILES['data_root'])
@pytest.mark.skipif('not HAS_SYBASE_ACCESS', reason='Report test requires Sybase/OCAT access')
@pytest.mark.skipif('not HAS_SC_ARCHIVE', reason='Report test requires mica starcheck archive')
def test_write_reports():
"""
Make a report and database
"""
tempdir = tempfile.mkdtemp()
# Get a temporary file, but then delete it, because report.py will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(dir=tempdir, suffix='.db3')
os.unlink(fn)
report.REPORT_ROOT = tempdir
report.REPORT_SERVER = fn
for obsid in [20001, 15175, 54778]:
report.main(obsid)
os.unlink(fn)
shutil.rmtree(tempdir)
|
bsd-3-clause
|
Python
|
319d457e1e6511f6c240f5f4f5479181647f7cf6
|
Fix bug with test
|
jkitchin/scopus,scopus-api/scopus
|
scopus/tests/test_AffiliationSearch.py
|
scopus/tests/test_AffiliationSearch.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `AffiliationSearch` module."""
from collections import namedtuple
from nose.tools import assert_equal, assert_true
import scopus
s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
def test_affiliations():
received = s.affiliations
assert_true(isinstance(received, list))
order = 'eid name variant documents city country parent'
Affiliation = namedtuple('Affiliation', order)
expected = Affiliation(eid='10-s2.0-60021784', name='New York University',
variant='', documents='0', city='New York', country='United States',
parent='0')
assert_true(int(received[0].documents) >= 101148)
assert_equal(received[0]._replace(documents="0"), expected)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `AffiliationSearch` module."""
from collections import namedtuple
from nose.tools import assert_equal, assert_true
import scopus
s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
def test_affiliations():
received = s.affiliations
assert_true(isinstance(received, list))
order = 'eid name variant documents city country parent'
Affiliation = namedtuple('Affiliation', order)
expected = [Affiliation(eid='10-s2.0-60021784', name='New York University',
variant='', documents='101148', city='New York',
country='United States', parent='0')]
assert_true(int(received.documents) >= 101148)
assert_equal(received._replace(documents="0"), expected)
|
mit
|
Python
|
ed0821bd41a10dd00727f09cf9ba82123bd2cf93
|
Fix output of permissions import script
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
scripts/import_permissions_and_roles.py
|
scripts/import_permissions_and_roles.py
|
#!/usr/bin/env python
"""Import permissions, roles, and their relations from a TOML file.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import click
from byceps.services.authorization import impex_service
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('data_file', type=click.File())
def execute(data_file):
permission_count, role_count = impex_service.import_from_file(data_file)
click.secho(
f'Imported {permission_count} permissions and {role_count} roles.',
fg='green',
)
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
#!/usr/bin/env python
"""Import permissions, roles, and their relations from a TOML file.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import click
from byceps.services.authorization import impex_service
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('data_file', type=click.File())
def execute(data_file):
permission_count, role_count = impex_service.import_from_file(data_file)
click.secho(
'Imported {permission_count} permissions and {role_count} roles.',
fg='green',
)
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
bsd-3-clause
|
Python
|
cf94a8a51d4e8eb3fd96ca3587af0f4c38e2deec
|
Fix kdtree example
|
larsmans/python-pcl,amitibo/python-pcl,amitibo/python-pcl,amitibo/python-pcl,larsmans/python-pcl,larsmans/python-pcl
|
examples/kdtree.py
|
examples/kdtree.py
|
from __future__ import print_function
import numpy as np
import pcl
points_1 = np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0]], dtype=np.float32)
points_2 = np.array([[0, 0, 0.2],
[1, 0, 0],
[0, 1, 0],
[1.1, 1, 0.5]], dtype=np.float32)
pc_1 = pcl.PointCloud()
pc_1.from_array(points_1)
pc_2 = pcl.PointCloud()
pc_2.from_array(points_2)
kd = pcl.KdTreeFLANN(pc_1)
print('pc_1:')
print(points_1)
print('\npc_2:')
print(points_2)
print('\n')
pc_1 = pcl.PointCloud(points_1)
pc_2 = pcl.PointCloud(points_2)
kd = pc_1.make_kdtree_flann()
# find the single closest points to each point in point cloud 2
# (and the sqr distances)
indices, sqr_distances = kd.nearest_k_search_for_cloud(pc_2, 1)
for i in range(pc_1.size):
print('index of the closest point in pc_1 to point %d in pc_2 is %d'
% (i, indices[i, 0]))
print('the squared distance between these two points is %f'
% sqr_distances[i, 0])
|
from __future__ import print_function
import numpy as np
import pcl
pc_1 = pcl.PointCloud()
pc_1.from_array(points_1)
pc_2 = pcl.PointCloud()
pc_2.from_array(points_2)
kd = pcl.KdTreeFLANN(pc_1)
points_1 = np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0]], dtype=np.float32)
points_2 = np.array([[0, 0, 0.2],
[1, 0, 0],
[0, 1, 0],
[1.1, 1, 0.5]], dtype=np.float32)
print('pc_1:')
print(points_1)
print('\npc_2:')
print(points_2)
print('\n')
pc_1 = pcl.PointCloud(points_1)
pc_2 = pcl.PointCloud(points_2)
kd = pc_1.make_kdtree_flann()
# find the single closest points to each point in point cloud 2
# (and the sqr distances)
indices, sqr_distances = kd.nearest_k_search_for_cloud(pc_2, 1)
for i in range(pc_1.size):
print('index of the closest point in pc_1 to point %d in pc_2 is %d'
% (i, indices[i, 0]))
print('the squared distance between these two points is %f'
% sqr_distances[i, 0])
|
bsd-3-clause
|
Python
|
881290c3d29ad28fb0fddfdd895fe493d7909262
|
make the wsgi.py work with all providers
|
mrdakoki/ballin-avenger,natea/django-deployer,natea/django-deployer,mrdakoki/ballin-avenger
|
django_deployer/paas_templates/wsgi.py
|
django_deployer/paas_templates/wsgi.py
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'{{ project_name }}')))
os.environ['DJANGO_SETTINGS_MODULE'] = '{{ django_settings }}_{{ provider }}'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'{{ project_name }}')))
os.environ['DJANGO_SETTINGS_MODULE'] = '{{ django_settings }}_stackato'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
mit
|
Python
|
7b84c2bd59f455050a249da795d1a73021b12581
|
Add an import
|
thombashi/pathvalidate
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._error import NullNameError
from ._error import InvalidCharError
from ._error import InvalidCharWindowsError
from ._error import InvalidLengthError
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._error import NullNameError
from ._error import InvalidCharError
from ._error import InvalidLengthError
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
mit
|
Python
|
343ed4b5ce1685c00eb611982319e48047c46361
|
remove dead code
|
fifoforlifo/pyqplan
|
samples/test_a/test_a.py
|
samples/test_a/test_a.py
|
class project:
title = "Main Project"
estimate = 3
class task_a:
estimate = 8
class task_b:
title = "Task B"
estimate = 4
def deps(): return [project.task_a, other.task_d]
class task_c:
estimate = 6
def deps(): return [project.task_a]
class other:
class task_d:
class task_e:
estimate = 1
if __name__ == "__main__":
import sys
sys.path.append('../../modules')
import qplan
tasks = qplan.get_tasks(project)
schedule_items = qplan.schedule_naively(tasks, project)
for item in schedule_items:
print('{item.task.name}: {item.start_time} - {item.end_time}'.format(**locals()))
qplan.plot_gantt(schedule_items)
|
class project:
title = "Main Project"
estimate = 3
class task_a:
estimate = 8
class task_b:
title = "Task B"
estimate = 4
def deps(): return [project.task_a, other.task_d]
class task_c:
estimate = 6
def deps(): return [project.task_a]
class other:
class task_d:
class task_e:
estimate = 1
if __name__ == "__main__":
import sys
sys.path.append('../../modules')
import qplan
def print_tasks(tasks):
for name in sorted(tasks.keys()):
task = tasks[name]
print('{0} :'.format(name))
print(' deps:')
for dep_name in task.deps:
print(' ' + dep_name)
print(' waiters:')
for waiter_name in task.waiters:
print(' ' + waiter_name)
tasks = qplan.get_tasks(project)
schedule_items = qplan.schedule_naively(tasks, project)
for item in schedule_items:
print('{item.task.name}: {item.start_time} - {item.end_time}'.format(**locals()))
qplan.plot_gantt(schedule_items)
|
apache-2.0
|
Python
|
ba0f68221ed0aa3d0fcf99efcb3180ddd9d89e0b
|
add imports to magenta/music/__init__.py for notebook functions (#246)
|
magenta/note-seq,magenta/note-seq,magenta/note-seq
|
__init__.py
|
__init__.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Imports objects from music modules into the top-level music namespace."""
from constants import * # pylint: disable=wildcard-import
from melodies_lib import BadNoteException
from melodies_lib import extract_melodies
from melodies_lib import MelodyEncoderDecoder
from melodies_lib import MonophonicMelody
from melodies_lib import OneHotMelodyEncoderDecoder
from melodies_lib import PolyphonicMelodyException
from midi_io import midi_file_to_sequence_proto
from midi_io import midi_to_sequence_proto
from midi_io import MIDIConversionError
from midi_io import sequence_proto_to_midi_file
from midi_synth import fluidsynth
from midi_synth import synthesize
from notebook_utils import play_sequence
from sequence_generator import BaseSequenceGenerator
from sequence_generator import SequenceGeneratorException
from sequence_generator_bundle import GeneratorBundleParseException
from sequence_generator_bundle import read_bundle_file
from sequences_lib import BadTimeSignatureException
from sequences_lib import MultipleTimeSignatureException
from sequences_lib import NegativeTimeException
from sequences_lib import QuantizedSequence
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Imports objects from music modules into the top-level music namespace."""
from constants import * # pylint: disable=wildcard-import
from melodies_lib import BadNoteException
from melodies_lib import extract_melodies
from melodies_lib import MelodyEncoderDecoder
from melodies_lib import MonophonicMelody
from melodies_lib import OneHotMelodyEncoderDecoder
from melodies_lib import PolyphonicMelodyException
from midi_io import midi_file_to_sequence_proto
from midi_io import midi_to_sequence_proto
from midi_io import MIDIConversionError
from midi_io import sequence_proto_to_midi_file
from sequence_generator import BaseSequenceGenerator
from sequence_generator import SequenceGeneratorException
from sequence_generator_bundle import GeneratorBundleParseException
from sequence_generator_bundle import read_bundle_file
from sequences_lib import BadTimeSignatureException
from sequences_lib import MultipleTimeSignatureException
from sequences_lib import NegativeTimeException
from sequences_lib import QuantizedSequence
|
apache-2.0
|
Python
|
1fd2623c1e718a1b6685c82f40d4bcb11dd8541d
|
Add a get_collection method.
|
materialsproject/pymatgen-db,migueldiascosta/pymatgen-db,migueldiascosta/pymatgen-db,migueldiascosta/pymatgen-db,materialsproject/pymatgen-db,migueldiascosta/pymatgen-db,migueldiascosta/pymatgen-db
|
matgendb/util.py
|
matgendb/util.py
|
"""
Utility functions used across scripts
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Dec 1, 2012"
import bson
import datetime
import json
import os
from pymongo import Connection
DEFAULT_PORT = 27017
DEFAULT_SETTINGS = [
("host", "localhost"),
("port", DEFAULT_PORT),
("database", "vasp"),
("admin_user", None),
("admin_password", None),
("readonly_user", None),
("readonly_password", None),
("collection", "tasks"),
("aliases_config", None),
("mapi_key", None)
]
def get_settings(config_file):
if config_file:
with open(config_file) as f:
return json.load(f)
elif os.path.exists("db.json"):
with open("db.json") as f:
return json.load(f)
else:
return dict(DEFAULT_SETTINGS)
def get_collection(config_file, admin=False):
d = get_settings(config_file)
conn = Connection(d["host"], d["port"])
db = conn[d["database"]]
user = d["admin_user"] if admin else d["readonly_user"]
passwd = d["admin_password"] if admin else d["readonly_password"]
db.authenticate(user, passwd)
return db[d["collection"]]
class MongoJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, bson.objectid.ObjectId):
return str(o)
if isinstance(o, datetime.datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
|
"""
Utility functions used across scripts
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Dec 1, 2012"
import bson
import datetime
import json
import os
DEFAULT_PORT = 27017
DEFAULT_SETTINGS = [
("host", "localhost"),
("port", DEFAULT_PORT),
("database", "vasp"),
("admin_user", None),
("admin_password", None),
("readonly_user", None),
("readonly_password", None),
("collection", "tasks"),
("aliases_config", None),
("mapi_key", None)
]
def get_settings(config_file):
if config_file:
with open(config_file) as f:
return json.load(f)
elif os.path.exists("db.json"):
with open("db.json") as f:
return json.load(f)
else:
return dict(DEFAULT_SETTINGS)
class MongoJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, bson.objectid.ObjectId):
return str(o)
if isinstance(o, datetime.datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
|
mit
|
Python
|
47077fd978866acefb127d3ca3b72182a468a020
|
Support prelim.csv files in sort_sam.py script.
|
cfe-lab/MiCall,cfe-lab/MiCall,cfe-lab/MiCall
|
micall/utils/sort_sam.py
|
micall/utils/sort_sam.py
|
#! /usr/bin/env python3
import csv
import json
import os
from argparse import ArgumentParser, FileType, ArgumentDefaultsHelpFormatter
import subprocess
from pathlib import Path
import typing
def parse_args():
# noinspection PyTypeChecker
parser = ArgumentParser(description='Sort SAM file before viewing.',
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('sam', help='SAM file to sort, or prelim.csv')
default_projects = Path(__file__).parent.parent / 'projects.json'
parser.add_argument('--projects',
type=FileType(),
help='JSON file with project definitions',
default=str(default_projects))
return parser.parse_args()
def convert_from_csv(csv_name: str,
sam_name: str,
projects_file: typing.TextIO):
with open(csv_name) as csv_file, open(sam_name, 'w') as sam_file:
writer = csv.writer(sam_file, delimiter='\t', lineterminator=os.linesep)
writer.writerow(['@HD', 'VN:1.0', 'SO:unsorted'])
reader = csv.reader(csv_file)
header = next(reader)
region_index = 2
assert header[region_index] == 'rname'
regions = {row[region_index] for row in reader}
projects = json.load(projects_file)
for region, info in projects['regions'].items():
if region in regions:
reference = ''.join(info['reference'])
reference_length = len(reference)
row = ['@SQ', f'SN:{region}', f'LN:{reference_length}']
writer.writerow(row)
csv_file.seek(0)
reader = csv.reader(csv_file)
next(reader)
writer.writerows(reader)
def main():
args = parse_args()
sam_name = args.sam
sam_root, sam_ext = os.path.splitext(sam_name)
if sam_ext == '.csv':
csv_name = sam_name
sam_name = sam_root + '.sam'
convert_from_csv(csv_name, sam_name, args.projects)
# samtools view -Sb example.sam -o example.bam
subprocess.check_call(
['samtools', 'view', '-Sb', sam_name, '-o', sam_root + '.bam'])
# samtools sort example.bam -o example.sorted.bam
subprocess.check_call(
['samtools', 'sort', sam_root + '.bam', '-o', sam_root + '.sorted.bam'])
# samtools view -h -o example.sorted.sam example.sorted.bam
subprocess.check_call(['samtools',
'view',
'-h',
'-o',
sam_root + '.sorted.sam',
sam_root + '.sorted.bam'])
if __name__ == '__main__':
main()
|
#! /usr/bin/env python3
import os
from argparse import ArgumentParser, FileType
import subprocess
def parse_args():
parser = ArgumentParser(description='Sort SAM file before viewing.')
parser.add_argument('sam', help='SAM file to sort')
return parser.parse_args()
def main():
args = parse_args()
# samtools view -Sb example.sam -o example.bam
sam_name = args.sam
sam_root, _ = os.path.splitext(sam_name)
subprocess.check_call(
['samtools', 'view', '-Sb', sam_name, '-o', sam_root + '.bam'])
# samtools sort example.bam -o example.sorted.bam
subprocess.check_call(
['samtools', 'sort', sam_root + '.bam', '-o', sam_root + '.sorted.bam'])
# samtools view -h -o example.sorted.sam example.sorted.bam
subprocess.check_call(['samtools',
'view',
'-h',
'-o',
sam_root + '.sorted.sam',
sam_root + '.sorted.bam'])
if __name__ == '__main__':
main()
|
agpl-3.0
|
Python
|
1b3b3edac1b01a59519690c86647c70a67c4d90b
|
Add support for relative paths in mac os gen_snapshot. (#35324)
|
rmacnak-google/engine,flutter/engine,flutter/engine,rmacnak-google/engine,chinmaygarde/flutter_engine,rmacnak-google/engine,flutter/engine,rmacnak-google/engine,flutter/engine,chinmaygarde/flutter_engine,chinmaygarde/flutter_engine,flutter/engine,rmacnak-google/engine,chinmaygarde/flutter_engine,chinmaygarde/flutter_engine,rmacnak-google/engine,flutter/engine,chinmaygarde/flutter_engine,rmacnak-google/engine,chinmaygarde/flutter_engine,flutter/engine,flutter/engine
|
sky/tools/create_macos_gen_snapshots.py
|
sky/tools/create_macos_gen_snapshots.py
|
#!/usr/bin/env python3
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
buildroot_dir = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
def main():
parser = argparse.ArgumentParser(
description='Copies architecture-dependent gen_snapshot binaries to output dir'
)
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--clang-dir', type=str, default='clang_x64')
parser.add_argument('--x64-out-dir', type=str)
parser.add_argument('--arm64-out-dir', type=str)
args = parser.parse_args()
dst = (
args.dst
if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst)
)
if args.x64_out_dir:
x64_out_dir = (
args.x64_out_dir if os.path.isabs(args.x64_out_dir) else
os.path.join(buildroot_dir, args.x64_out_dir)
)
generate_gen_snapshot(x64_out_dir, os.path.join(dst, 'gen_snapshot_x64'))
if args.arm64_out_dir:
arm64_out_dir = (
args.arm64_out_dir if os.path.isabs(args.arm64_out_dir) else
os.path.join(buildroot_dir, args.arm64_out_dir)
)
generate_gen_snapshot(
os.path.join(arm64_out_dir, args.clang_dir),
os.path.join(dst, 'gen_snapshot_arm64')
)
def generate_gen_snapshot(directory, destination):
gen_snapshot_dir = os.path.join(directory, 'gen_snapshot')
if not os.path.isfile(gen_snapshot_dir):
print('Cannot find gen_snapshot at %s' % gen_snapshot_dir)
sys.exit(1)
subprocess.check_call([
'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination
])
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python3
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
def main():
parser = argparse.ArgumentParser(
description='Copies architecture-dependent gen_snapshot binaries to output dir'
)
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--clang-dir', type=str, default='clang_x64')
parser.add_argument('--x64-out-dir', type=str)
parser.add_argument('--arm64-out-dir', type=str)
parser.add_argument('--armv7-out-dir', type=str)
args = parser.parse_args()
if args.x64_out_dir:
generate_gen_snapshot(
args.x64_out_dir, os.path.join(args.dst, 'gen_snapshot_x64')
)
if args.arm64_out_dir:
generate_gen_snapshot(
os.path.join(args.arm64_out_dir, args.clang_dir),
os.path.join(args.dst, 'gen_snapshot_arm64')
)
if args.armv7_out_dir:
generate_gen_snapshot(
os.path.join(args.armv7_out_dir, args.clang_dir),
os.path.join(args.dst, 'gen_snapshot_armv7')
)
def generate_gen_snapshot(directory, destination):
gen_snapshot_dir = os.path.join(directory, 'gen_snapshot')
if not os.path.isfile(gen_snapshot_dir):
print('Cannot find gen_snapshot at %s' % gen_snapshot_dir)
sys.exit(1)
subprocess.check_call([
'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination
])
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
Python
|
16b92aac9a7f82a3c15bd2e50d02e4af482a7cf0
|
Fix crestHandler auth callback.
|
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
|
mint/web/cresthandler.py
|
mint/web/cresthandler.py
|
from conary.repository.netrepos import proxy
import restlib.http.modpython
from restlib import response
import crest.root
import crest.webhooks
from mint.rest.db import database as restDatabase
from mint.db import database
from mint.rest.middleware import auth
def handleCrest(uri, cfg, db, repos, req):
handler, callback = getCrestHandler(cfg, db)
if isinstance(repos, proxy.SimpleRepositoryFilter):
callback.repos = repos.repos
else:
callback.repos = repos
return handler.handle(req, uri)
def getCrestHandler(cfg, db):
assert(cfg)
assert(db)
crestController = crest.root.Controller(None, '/rest')
crestHandler = restlib.http.modpython.ModPythonHttpHandler(crestController)
crestCallback = CrestRepositoryCallback(db)
crestHandler.addCallback(crestCallback)
db = database.Database(cfg, db)
db = restDatabase.Database(cfg, db)
crestHandler.addCallback(CrestAuthenticationCallback(cfg, db))
return crestHandler, crestCallback
class CrestAuthenticationCallback(auth.AuthenticationCallback):
def processMethod(self, request, viewMethod, args, kw):
return self.checkDisablement(request, viewMethod)
class CrestRepositoryCallback(crest.webhooks.ReposCallback):
def __init__(self, db):
self.db = db
crest.webhooks.ReposCallback.__init__(self, None)
def makeUrl(self, request, *args, **kwargs):
if 'host' in kwargs:
cu = self.db.cursor()
fqdn = kwargs['host']
hostname = fqdn.split('.', 1)[0]
cu.execute('''SELECT COUNT(*) FROM Projects
WHERE hostname=?''', hostname)
if not cu.fetchall():
return 'http://%s/%s' % (kwargs['host'], '/'.join(args))
baseUrl = request.getHostWithProtocol() + '/repos/%s/api' % hostname
return request.url(baseUrl=baseUrl, *args)
return request.url(*args)
|
from conary.repository.netrepos import proxy
import restlib.http.modpython
from restlib import response
import crest.root
import crest.webhooks
from mint.rest.db import database as restDatabase
from mint.db import database
from mint.rest.middleware import auth
def handleCrest(uri, cfg, db, repos, req):
handler, callback = getCrestHandler(cfg, db)
if isinstance(repos, proxy.SimpleRepositoryFilter):
callback.repos = repos.repos
else:
callback.repos = repos
return handler.handle(req, uri)
def getCrestHandler(cfg, db):
assert(cfg)
assert(db)
crestController = crest.root.Controller(None, '/rest')
crestHandler = restlib.http.modpython.ModPythonHttpHandler(crestController)
crestCallback = CrestRepositoryCallback(db)
crestHandler.addCallback(crestCallback)
db = database.Database(cfg, db)
db = restDatabase.Database(cfg, db)
crestHandler.addCallback(auth.AuthenticationCallback(cfg, db))
return crestHandler, crestCallback
class AuthChecker(auth.AuthenticationCallback(cfg, db):
def processMethod(self, request, viewMethod, args, kw):
return self.checkDisablement(request, viewMethod)
class CrestRepositoryCallback(crest.webhooks.ReposCallback):
def __init__(self, db):
self.db = db
crest.webhooks.ReposCallback.__init__(self, None)
def makeUrl(self, request, *args, **kwargs):
if 'host' in kwargs:
cu = self.db.cursor()
fqdn = kwargs['host']
hostname = fqdn.split('.', 1)[0]
cu.execute('''SELECT COUNT(*) FROM Projects
WHERE hostname=?''', hostname)
if not cu.fetchall():
return 'http://%s/%s' % (kwargs['host'], '/'.join(args))
baseUrl = request.getHostWithProtocol() + '/repos/%s/api' % hostname
return request.url(baseUrl=baseUrl, *args)
return request.url(*args)
|
apache-2.0
|
Python
|
f20c911285cc83f2cfe2b4650ba85f4b82eae43c
|
Improve description about the api
|
KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,kived/plyer,KeyWeeUsr/plyer,kived/plyer,kivy/plyer,KeyWeeUsr/plyer
|
plyer/facades/temperature.py
|
plyer/facades/temperature.py
|
class Temperature(object):
'''Temperature facade.
Temperature sensor is used to measure the ambient room temperature in
degrees Celsius (°C)
With method `enable` you can turn on temperature sensor and 'disable'
method stops the sensor.
Use property `temperature` to get ambient air temperature in degree C.
'''
@property
def temperature(self):
'''Current air temperature in degree C.'''
return self._get_temperature()
def enable(self):
'''Enable temperature sensor.'''
self._enable()
def disable(self):
'''Disable temperature sensor.'''
self._disable()
#private
def _get_temperature(self, **kwargs):
raise NotImplementedError()
def _enable(self, **kwargs):
raise NotImplementedError()
def _disable(self, **kwargs):
raise NotImplementedError()
|
class Temperature(object):
'''Temperature facade.
Temperature sensor is used to measure the ambient room temperature in degrees Celsius
With method `enable` you can turn on temperature sensor and 'disable'
method stops the sensor.
Use property `temperature` to get ambient air temperature in degree C.
'''
@property
def temperature(self):
'''Current air temperature in degree C.'''
return self._get_temperature()
def enable(self):
'''Enable temperature sensor.'''
self._enable()
def disable(self):
'''Disable temperature sensor.'''
self._disable()
#private
def _get_temperature(self, **kwargs):
raise NotImplementedError()
def _enable(self, **kwargs):
raise NotImplementedError()
def _disable(self, **kwargs):
raise NotImplementedError()
|
mit
|
Python
|
bec24879cafaa4e17dd7cd56bcdaa3b04cb378b9
|
remove test_dpdk_vf.py from run_tests
|
stackforge/fuel-plugin-contrail,stackforge/fuel-plugin-contrail,stackforge/fuel-plugin-contrail,stackforge/fuel-plugin-contrail
|
plugin_test/run_tests.py
|
plugin_test/run_tests.py
|
"""Copyright 2015 Mirantis, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
import sys
import os
import re
from nose.plugins import Plugin
from paramiko.transport import _join_lingering_threads
class CloseSSHConnectionsPlugin(Plugin):
"""Closes all paramiko's ssh connections after each test case.
Plugin fixes proboscis disability to run cleanup of any kind.
'afterTest' calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s)
"""
name = 'closesshconnections'
def options(self, parser, env=os.environ):
"""Options."""
super(CloseSSHConnectionsPlugin, self).options(parser, env=env)
def configure(self, options, conf):
"""Configure env."""
super(CloseSSHConnectionsPlugin, self).configure(options, conf)
self.enabled = True
def afterTest(self, *args, **kwargs):
"""After_Test.
After_Test calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s).
"""
_join_lingering_threads()
def import_tests():
"""Import test suite of project."""
from tests import test_smoke_bvt # noqa
from tests import test_integration # noqa
from tests import test_functional # noqa
from tests import test_failover # noqa
from tests import test_system # noqa
from tests import test_dpdk # noqa
from tests import test_sriov # noqa
from tests import test_dpdk_on_vf # noqa
from tests import test_contrail_multiple_networks # noqa
def run_tests():
"""Run test cases."""
from proboscis import TestProgram # noqa
import_tests()
# Run Proboscis and exit.
TestProgram(
addplugins=[CloseSSHConnectionsPlugin()]
).run_and_exit()
if __name__ == '__main__':
sys.path.append(sys.path[0] + "/fuel-qa")
import_tests()
from fuelweb_test.helpers.patching import map_test
if any(re.search(r'--group=patching_master_tests', arg)
for arg in sys.argv):
map_test('master')
elif any(re.search(r'--group=patching.*', arg) for arg in sys.argv):
map_test('environment')
run_tests()
|
"""Copyright 2015 Mirantis, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
import sys
import os
import re
from nose.plugins import Plugin
from paramiko.transport import _join_lingering_threads
class CloseSSHConnectionsPlugin(Plugin):
"""Closes all paramiko's ssh connections after each test case.
Plugin fixes proboscis disability to run cleanup of any kind.
'afterTest' calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s)
"""
name = 'closesshconnections'
def options(self, parser, env=os.environ):
"""Options."""
super(CloseSSHConnectionsPlugin, self).options(parser, env=env)
def configure(self, options, conf):
"""Configure env."""
super(CloseSSHConnectionsPlugin, self).configure(options, conf)
self.enabled = True
def afterTest(self, *args, **kwargs):
"""After_Test.
After_Test calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s).
"""
_join_lingering_threads()
def import_tests():
"""Import test suite of project."""
from tests import test_smoke_bvt # noqa
from tests import test_integration # noqa
from tests import test_functional # noqa
from tests import test_failover # noqa
from tests import test_system # noqa
from tests import test_dpdk # noqa
from tests import test_sriov # noqa
from tests import test_dpdk_vf # noqa
from tests import test_dpdk_on_vf # noqa
from tests import test_contrail_multiple_networks # noqa
def run_tests():
"""Run test cases."""
from proboscis import TestProgram # noqa
import_tests()
# Run Proboscis and exit.
TestProgram(
addplugins=[CloseSSHConnectionsPlugin()]
).run_and_exit()
if __name__ == '__main__':
sys.path.append(sys.path[0] + "/fuel-qa")
import_tests()
from fuelweb_test.helpers.patching import map_test
if any(re.search(r'--group=patching_master_tests', arg)
for arg in sys.argv):
map_test('master')
elif any(re.search(r'--group=patching.*', arg) for arg in sys.argv):
map_test('environment')
run_tests()
|
apache-2.0
|
Python
|
f956159efe43f14618c1b2baed8abaddbab42488
|
Fix export_models.py to work with new structure
|
crossroads-education/eta,crossroads-education/eta
|
scripts/export_models.py
|
scripts/export_models.py
|
import json
import os
import utils
def process_file(filename):
handle = open(filename, "r")
lines = handle.read().replace("\r", "").split("\n")
handle.close()
real_lines = []
for line in lines:
line = line.replace(" default ", " ")
raw_line = line.strip()
if raw_line.startswith("@") or raw_line.startswith("import ") or raw_line == "":
continue
if raw_line.startswith("export ") and raw_line.endswith(";"):
continue
real_lines.append(line)
return real_lines
def process_dir(dirname):
real_files = []
contents = []
for root, _, files in os.walk(dirname):
for filename in files: # Ensure that "I..." classes are put first
if filename.startswith("I"):
real_files.insert(0, root + "/" + filename)
else:
real_files.append(root + "/" + filename)
for filename in real_files:
if os.path.basename(filename) == "index.ts" or not filename.endswith(".ts"):
continue
contents += process_file(filename)
return contents
def main():
content_dir = utils.get_server_dir() + "content/"
handle = open(content_dir + "config.json", "r")
content_config = json.loads(handle.read())
handle.close()
lines = ['import * as linq from "linq";']
model_dirs = content_config["modelDirs"]
model_dirs.insert(0, "enums")
for model_dir in model_dirs:
lines += process_dir(content_dir + model_dir + "/")
comment = "// This file is automatically generated by /scripts/export_models.py\n"
raw = comment + "\n".join(lines)
out_file = content_dir + "static/js/lib/Models.ts"
handle = open(out_file, "w")
handle.write(raw)
handle.close()
print("Wrote output to {}. Compiling client-side Typescript...".format(out_file))
os.chdir(content_dir + "static/js")
utils.compile_ts()
os.chdir(utils.get_server_dir())
if __name__ == "__main__":
main()
|
import os
import utils
def process_file(filename):
handle = open(filename, "r")
lines = handle.read().replace("\r", "").split("\n")
handle.close()
real_lines = []
for line in lines:
line = line.replace(" default ", " ")
raw_line = line.strip()
if raw_line.startswith("@") or raw_line.startswith("import ") or raw_line == "":
continue
if raw_line.startswith("export ") and raw_line.endswith(";"):
continue
real_lines.append(line)
return real_lines
def process_dir(dirname):
files = os.listdir(dirname)
contents = []
for filename in files: # Ensure that "I..." classes are put first
if filename.startswith("I"):
files.remove(filename)
files.insert(0, filename)
for filename in files:
if filename == "index.ts" or not filename.endswith(".ts"):
continue
contents += process_file(dirname + filename)
return contents
def main():
server_dir = utils.get_server_dir()
lines = ['import * as linq from "linq";']
lines += process_dir(server_dir + "db/interfaces/")
lines += process_dir(server_dir + "db/models/")
comment = "// This file is automatically generated by /scripts/export_models.py.\n"
raw = comment + "\n".join(lines)
out_file = server_dir + "static/js/lib/Models.ts"
handle = open(out_file, "w")
handle.write(raw)
handle.close()
print("Wrote output to {}. Compiling client-side Typescript...".format(out_file))
os.chdir(server_dir + "static/js")
utils.compile_ts()
os.chdir(server_dir)
if __name__ == "__main__":
main()
|
mit
|
Python
|
7f876881267da77efeb8c3f5bb585502e33e76fc
|
add imagefile funcs to namespace
|
dalejung/ts-charting
|
ts_charting/__init__.py
|
ts_charting/__init__.py
|
from ts_charting.figure import Figure, Grapher
from ts_charting.charting import *
import ts_charting.ohlc
import ts_charting.boxplot
import ts_charting.span
from ts_charting.styler import styler, marker_styler, level_styler
from ts_charting.ipython import figsize, IN_NOTEBOOK
from ts_charting.plot_3d import plot_wireframe
from ts_charting.imagefile import plot_pdf, save_images
|
from ts_charting.figure import Figure, Grapher
from ts_charting.charting import *
import ts_charting.ohlc
import ts_charting.boxplot
import ts_charting.span
from ts_charting.styler import styler, marker_styler, level_styler
from ts_charting.ipython import figsize, IN_NOTEBOOK
from ts_charting.plot_3d import plot_wireframe
|
mit
|
Python
|
cb47cd2fbd37b9fce12abbf1c0ccff38d863f838
|
Add some refactorings
|
timtroendle/pytus2000
|
scripts/generate_code.py
|
scripts/generate_code.py
|
from collections import namedtuple
from itertools import dropwhile, groupby
VARIABLE_SECTION_START = 'Pos. = '
VARIABLE_NAME_FIELD = 'Variable = '
VARIABLE_LABEL_FIELD = 'Variable label = '
VALUE_FIELD = 'Value = '
VALUE_LABEL_FIELD = 'Label = '
Variable = namedtuple('Variable', ['id', 'name', 'label', 'values'])
class DataDictionaryParser():
def __init__(self, path_to_file):
with path_to_file.open('r') as txt_file:
lines = txt_file.readlines()
lines = filter(lambda line: line is not '\n', lines)
lines = dropwhile(lambda line: not line.startswith(VARIABLE_SECTION_START), lines)
lines = (line.rstrip('\n') for line in lines)
variable_sections = self._variable_section_generator(lines)
self.variables = [self._parse_variable(variable_section)
for variable_section in variable_sections]
self.number_variables = len(list(variable_sections))
@classmethod
def _parse_variable(cls, variable_section):
variable_section = list(variable_section)
position, name, label = variable_section[0].split('\t')
value_lines = filter(lambda line: line.startswith(VALUE_FIELD), variable_section)
return Variable(
id=int(position.split(VARIABLE_SECTION_START)[1]),
name=name.split(VARIABLE_NAME_FIELD)[1],
label=label.split(VARIABLE_LABEL_FIELD)[1],
values=cls._parse_variable_values(value_lines)
)
@staticmethod
def _parse_variable_values(value_lines):
values = {
value.split(VALUE_FIELD)[1]: label.split(VALUE_LABEL_FIELD)[1]
for value, label in (line.split('\t') for line in value_lines)
}
return values if len(values) > 0 else None
@staticmethod
def _variable_section_generator(lines):
variable_section = []
for line in lines:
if line.startswith(VARIABLE_SECTION_START) and len(variable_section) > 0:
yield variable_section
variable_section = []
variable_section.append(line)
yield variable_section
|
from collections import namedtuple
from itertools import dropwhile, groupby
VARIABLE_SECTION_START = 'Pos. = '
VARIABLE_NAME_FIELD = 'Variable = '
VARIABLE_LABEL_FIELD = 'Variable label = '
VALUE_FIELD = 'Value = '
VALUE_LABEL_FIELD = 'Label = '
Variable = namedtuple('Variable', ['id', 'name', 'label', 'values'])
class DataDictionaryParser():
def __init__(self, path_to_file):
with path_to_file.open('r') as txt_file:
lines = txt_file.readlines()
lines = filter(lambda line: line is not '\n', lines)
lines = dropwhile(lambda line: not line.startswith(VARIABLE_SECTION_START), lines)
lines = (line.rstrip('\n') for line in lines)
variable_sections = self._variable_section_generator(lines)
self.variables = [self._parse_variable(variable_section)
for variable_section in variable_sections]
self.number_variables = len(list(variable_sections))
@classmethod
def _parse_variable(cls, variable_section):
variable_section = list(variable_section)
position, name, label = variable_section[0].split('\t')
value_lines = filter(lambda line: line.startswith(VALUE_FIELD), variable_section)
return Variable(
id=int(position.split(VARIABLE_SECTION_START)[1]),
name=name.split(VARIABLE_NAME_FIELD)[1],
label=label.split(VARIABLE_LABEL_FIELD)[1],
values=cls._parse_variable_values(value_lines)
)
@staticmethod
def _parse_variable_values(value_lines):
value_lines_split = (line.split('\t') for line in value_lines)
values = {
value.split(VALUE_FIELD)[1]: label.split(VALUE_LABEL_FIELD)[1] for value, label in value_lines_split
}
if len(values) == 0:
return None
else:
return values
@staticmethod
def _variable_section_generator(lines):
variable_section = []
for line in lines:
if line.startswith(VARIABLE_SECTION_START) and len(variable_section) > 0:
yield variable_section
variable_section = []
variable_section.append(line)
yield variable_section
|
mit
|
Python
|
90d42e80690a80a7099142b6b024c8d3b0f78075
|
Fix DelayedCall cancellation in remind plugin on reload
|
JohnMaguire/Cardinal
|
plugins/remind/plugin.py
|
plugins/remind/plugin.py
|
from twisted.internet import error, reactor
from cardinal.decorators import command, help
class RemindPlugin:
def __init__(self):
self.call_ids = []
@command('remind')
@help("Sends a reminder after a set time.")
@help("Syntax: .remind <minutes> <message>")
def remind(self, cardinal, user, channel, msg):
message = msg.split(None, 2)
if len(message) < 3:
cardinal.sendMsg(channel, "Syntax: .remind <minutes> <message>")
return
self.call_ids.append(reactor.callLater(60 * int(message[1]),
cardinal.sendMsg, user.nick, message[2]))
cardinal.sendMsg(channel,
"%s: You will be reminded in %d minutes." %
(user.nick, int(message[1])))
def close(self):
for call_id in self.call_ids:
try:
call_id.cancel()
except error.AlreadyCancelled:
pass
entrypoint = RemindPlugin
|
from twisted.internet import error, reactor
from cardinal.decorators import command, help
class RemindPlugin:
def __init__(self):
self.call_ids = []
@command('remind')
@help("Sends a reminder after a set time.")
@help("Syntax: .remind <minutes> <message>")
def remind(self, cardinal, user, channel, msg):
message = msg.split(None, 2)
if len(message) < 3:
cardinal.sendMsg(channel, "Syntax: .remind <minutes> <message>")
return
self.call_ids.append(reactor.callLater(60 * int(message[1]),
cardinal.sendMsg, user.nick, message[2]))
cardinal.sendMsg(channel,
"%s: You will be reminded in %d minutes." %
(user.nick, int(message[1])))
def close(self):
for call_id in call_ids:
try:
call_id.cancel()
except error.AlreadyCancelled:
pass
entrypoint = RemindPlugin
|
mit
|
Python
|
d6c81135077867283738bcf9cceb0ce8198808d6
|
Enable SSL verify for prod
|
amm0nite/unicornclient,amm0nite/unicornclient
|
unicornclient/config.py
|
unicornclient/config.py
|
import os
import logging
ENV = os.getenv('PYTHONENV', 'prod')
LOG_LEVEL = logging.DEBUG
LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
HOST = 'localhost'
PORT = 8080
SSL_VERIFY = False
DEFAULT_ROUTINES = ['auth', 'ping', 'status', 'system']
if ENV == 'prod':
LOG_LEVEL = logging.INFO
HOST = 'unicorn.ahst.fr'
SSL_VERIFY = True
|
import os
import logging
ENV = os.getenv('PYTHONENV', 'prod')
LOG_LEVEL = logging.DEBUG
LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
HOST = 'localhost'
PORT = 8080
SSL_VERIFY = False
DEFAULT_ROUTINES = ['auth', 'ping', 'status', 'system']
if ENV == 'prod':
LOG_LEVEL = logging.INFO
HOST = 'unicorn.ahst.fr'
#SSL_VERIFY = True
|
mit
|
Python
|
5741d373ce42e7fbf7f888e4c220b033d21567fb
|
move default iembot listen ports
|
akrherz/iembot,akrherz/iembot
|
iembot.tac
|
iembot.tac
|
# Twisted Bits
from twisted.application import service, internet
from twisted.web import server
from twisted.enterprise import adbapi
# Base Python
import json
# Local Import
import iemchatbot
dbconfig = json.load(open('settings.json'))
application = service.Application("Public IEMBOT")
serviceCollection = service.IServiceCollection(application)
# This provides DictCursors!
dbpool = adbapi.ConnectionPool("pyiem.twistedpg", cp_reconnect=True,
database=dbconfig.get('databaserw').get('openfire'),
host=dbconfig.get('databaserw').get('host'),
password=dbconfig.get('databaserw').get('password'),
user=dbconfig.get('databaserw').get('user') )
jabber = iemchatbot.JabberClient(dbpool)
defer = dbpool.runQuery("select propname, propvalue from properties")
defer.addCallback(jabber.fire_client_with_config, serviceCollection)
# 2. JSON channel requests
json = server.Site( iemchatbot.JSONResource(jabber), logPath='/dev/null' )
x = internet.TCPServer(9003, json)
x.setServiceParent(serviceCollection)
# 3. Answer requests for RSS feeds of the bot logs
rss = server.Site( iemchatbot.RootResource(), logPath="/dev/null" )
r = internet.TCPServer(9004, rss)
r.setServiceParent(serviceCollection)
# END
|
# Twisted Bits
from twisted.application import service, internet
from twisted.web import server
from twisted.enterprise import adbapi
# Base Python
import json
# Local Import
import iemchatbot
dbconfig = json.load(open('settings.json'))
application = service.Application("Public IEMBOT")
serviceCollection = service.IServiceCollection(application)
# This provides DictCursors!
dbpool = adbapi.ConnectionPool("pyiem.twistedpg", cp_reconnect=True,
database=dbconfig.get('databaserw').get('openfire'),
host=dbconfig.get('databaserw').get('host'),
password=dbconfig.get('databaserw').get('password'),
user=dbconfig.get('databaserw').get('user') )
jabber = iemchatbot.JabberClient(dbpool)
defer = dbpool.runQuery("select propname, propvalue from properties")
defer.addCallback(jabber.fire_client_with_config, serviceCollection)
# 2. JSON channel requests
json = server.Site( iemchatbot.JSONResource(jabber), logPath='/dev/null' )
x = internet.TCPServer(8003, json)
x.setServiceParent(serviceCollection)
# 3. Answer requests for RSS feeds of the bot logs
rss = server.Site( iemchatbot.RootResource(), logPath="/dev/null" )
r = internet.TCPServer(8004, rss)
r.setServiceParent(serviceCollection)
# END
|
mit
|
Python
|
4312dcee00eabe97040a7a1da58f25d714a9dfee
|
Remove debug statement and prevent nsfw for images
|
jakebasile/procbot,jakebasile/procbot,jakebasile/procbot,jakebasile/procbot,jakebasile/procbot
|
scripts/python/reddit.py
|
scripts/python/reddit.py
|
#!/usr/bin/env python3
# Copyright 2012-2013 Jake Basile and Kyle Varga
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Shows a random image from /r/aww'''
import urllib.request
import urllib.parse
import random
import sys
import json
import re
if len(sys.argv) < 2:
sys.exit(1)
subreddit = sys.argv[1]
timeframe = sys.argv[2] if len(sys.argv) > 2 else 'day'
sort = sys.argv[3] if len(sys.argv) > 3 else 'top'
filetypes = sys.argv[4] if len(sys.argv) > 4 else 'jpg|jpeg|gif|png'
regex = '^.*\.(%s)$' % filetypes
results = urllib.request.urlopen('http://reddit.com/r/%s.json?limit=100&t=%s&sort=%s' % (subreddit, timeframe, sort))
if results.status != 200:
sys.exit(1)
jsn = json.loads(results.read().decode('utf-8'))
images = [
str(c['data']['url'])
for c in jsn['data']['children']
if re.match(regex, c['data']['url']) and c['data']['over_18'] == False
]
print(random.choice(images))
|
#!/usr/bin/env python3
# Copyright 2012-2013 Jake Basile and Kyle Varga
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Shows a random image from /r/aww'''
import urllib.request
import urllib.parse
import random
import sys
import json
import re
if len(sys.argv) < 2:
sys.exit(1)
subreddit = sys.argv[1]
timeframe = sys.argv[2] if len(sys.argv) > 2 else 'day'
sort = sys.argv[3] if len(sys.argv) > 3 else 'top'
filetypes = sys.argv[4] if len(sys.argv) > 4 else 'jpg|jpeg|gif|png'
regex = '^.*\.(%s)$' % filetypes
results = urllib.request.urlopen('http://reddit.com/r/%s.json?limit=100&t=%s&sort=%s' % (subreddit, timeframe, sort))
if results.status != 200:
print(results.url)
sys.exit(0)
jsn = json.loads(results.read().decode('utf-8'))
images = [
str(c['data']['url'])
for c in jsn['data']['children']
if re.match(regex, c['data']['url'])
]
print(random.choice(images))
|
bsd-2-clause
|
Python
|
f6cf19966651e8c1e21fa3bde777c5bad6285c9f
|
add print
|
jluccisano/raspberry-scripts,jluccisano/raspberry-scripts
|
scripts/relay_control.py
|
scripts/relay_control.py
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import argparse
GPIO.setmode(GPIO.BOARD)
# GPIO/BOARD | Relay IN | Rotors | Zone
# 22/15 | R2 IN2 | 1 | B
# 18/12 | R1 IN2 | 2 | A
# 24/18 | R1 IN3 | 3 | D
# 17/11 | R1 IN4 | 4 | C
# 27/13 | R2 IN1 | 5 | E
relayIO = { "1": 15, "2": 12, "3": 18, "4": 11, "5": 13}
def setState(relay, state):
print("Trying to set relay: " + int(relayIO[relay]) + " to state: " + bool(state))
GPIO.output(int(relayIO[relay]), bool(state))
if getState(relay) != state:
print("relay: " + relay + "is not set to " + state)
print("relay: " + relay + "is set to " + getState(relay))
def getState(relay):
return GPIO.input(int(relayIO[relay]))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--relay', help='Set relay 1/2/3/4/5', required=True)
parser.add_argument('--state',help='Set state high=1 or low=0', required=True)
args = parser.parse_args()
setState(args.relay, args.state)
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import argparse
GPIO.setmode(GPIO.BOARD)
# GPIO/BOARD | Relay IN | Rotors | Zone
# 22/15 | R2 IN2 | 1 | B
# 18/12 | R1 IN2 | 2 | A
# 24/18 | R1 IN3 | 3 | D
# 17/11 | R1 IN4 | 4 | C
# 27/13 | R2 IN1 | 5 | E
relayIO = { "1": 15, "2": 12, "3": 18, "4": 11, "5": 13}
def setState(relay, state):
GPIO.output(int(relayIO[relay]), bool(state))
if getState(relay) != state:
print("relay: " + relay + "is not set to " + state)
print("relay: " + relay + "is set to " + getState(relay))
def getState(relay):
return GPIO.input(int(relayIO[relay]))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--relay', help='Set relay 1/2/3/4/5', required=True)
parser.add_argument('--state',help='Set state high=1 or low=0', required=True)
args = parser.parse_args()
setState(args.relay, args.state)
if __name__ == '__main__':
main()
|
mit
|
Python
|
2f6d64325e1d4aa23d892db33402f455bd75458d
|
Reduce QR code box size.
|
Steveice10/FBI,Steveice10/FBI,Steveice10/FBI
|
servefiles/servefiles.py
|
servefiles/servefiles.py
|
import atexit
import os
import sys
import tempfile
import threading
import urllib
import netifaces
import qrcode
from PIL import ImageTk
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
from Tkinter import Tk, Frame, Label, BitmapImage
from urlparse import urljoin
from urllib import pathname2url, quote
except ImportError:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
from tkinter import Tk, Frame, Label, BitmapImage
from urllib.parse import urljoin, quote
from urllib.request import pathname2url
if len(sys.argv) < 2:
print("Please specify a file/directory.")
sys.exit(1)
directory = sys.argv[1]
if not os.path.exists(directory):
print(directory + ": No such file or directory.")
sys.exit(1)
print("Preparing data...")
baseUrl = netifaces.ifaddresses(netifaces.gateways()['default'][netifaces.AF_INET][1])[2][0]['addr'] + ":8080/"
qrData = ""
if os.path.isfile(directory):
if directory.endswith(('.cia', '.tik')):
qrData += baseUrl + quote(os.path.basename(directory))
directory = os.path.dirname(directory)
else:
for file in [ file for file in next(os.walk(directory))[2] if file.endswith(('.cia', '.tik')) ]:
qrData += baseUrl + quote(file) + "\n"
if len(qrData) == 0:
print("No files to serve.")
sys.exit(1)
if not directory == "":
os.chdir(directory)
print("")
print("URLS:")
print(qrData)
print("")
print("Opening HTTP server on port 8080...")
server = TCPServer(("", 8080), SimpleHTTPRequestHandler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
atexit.register(server.shutdown)
print("Generating QR code...")
qrImage = qrcode.make(qrData, box_size=5)
print("Displaying QR code...")
root = Tk()
root.title("QR Code")
frame = Frame(root)
frame.pack()
qrBitmap = ImageTk.PhotoImage(qrImage)
qrLabel = Label(frame, image=qrBitmap)
qrLabel.pack()
root.mainloop()
print("Shutting down HTTP server...")
server.shutdown()
|
import atexit
import os
import sys
import tempfile
import threading
import urllib
import netifaces
import qrcode
from PIL import ImageTk
try:
from SimpleHTTPServer import SimpleHTTPRequestHandler
from SocketServer import TCPServer
from Tkinter import Tk, Frame, Label, BitmapImage
from urlparse import urljoin
from urllib import pathname2url, quote
except ImportError:
from http.server import SimpleHTTPRequestHandler
from socketserver import TCPServer
from tkinter import Tk, Frame, Label, BitmapImage
from urllib.parse import urljoin, quote
from urllib.request import pathname2url
if len(sys.argv) < 2:
print("Please specify a file/directory.")
sys.exit(1)
directory = sys.argv[1]
if not os.path.exists(directory):
print(directory + ": No such file or directory.")
sys.exit(1)
print("Preparing data...")
baseUrl = netifaces.ifaddresses(netifaces.gateways()['default'][netifaces.AF_INET][1])[2][0]['addr'] + ":8080/"
qrData = ""
if os.path.isfile(directory):
if directory.endswith(('.cia', '.tik')):
qrData += baseUrl + quote(os.path.basename(directory))
directory = os.path.dirname(directory)
else:
for file in [ file for file in next(os.walk(directory))[2] if file.endswith(('.cia', '.tik')) ]:
qrData += baseUrl + quote(file) + "\n"
if len(qrData) == 0:
print("No files to serve.")
sys.exit(1)
if not directory == "":
os.chdir(directory)
print("")
print("URLS:")
print(qrData)
print("")
print("Opening HTTP server on port 8080...")
server = TCPServer(("", 8080), SimpleHTTPRequestHandler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
atexit.register(server.shutdown)
print("Generating QR code...")
qrImage = qrcode.make(qrData)
print("Displaying QR code...")
root = Tk()
root.title("QR Code")
frame = Frame(root)
frame.pack()
qrBitmap = ImageTk.PhotoImage(qrImage)
qrLabel = Label(frame, image=qrBitmap)
qrLabel.pack()
root.mainloop()
print("Shutting down HTTP server...")
server.shutdown()
|
mit
|
Python
|
8bc482db2e9cf98d3e3571f49a85ee7a287efaf7
|
Use DjangoJSONEncoder when serving jsonp requests
|
codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/citydash,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/cornerwise,cityofsomerville/cornerwise,codeforboston/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,codeforboston/cornerwise
|
server/shared/request.py
|
server/shared/request.py
|
from django.core.serializers.json import DjangoJSONEncoder
from django.http import JsonResponse, HttpResponse
from django.shortcuts import render_to_response
from django.template.loader import render_to_string
import logging, json, re
logger = logging.getLogger("logger")
class ErrorResponse(Exception):
def __init__(self, message, data=None, status=401, err=None):
super(Exception, self).__init__(self, message)
self.data = data or { "error": message }
self.status = status
self.exception = err
def make_response(template=None, error_template="error.html"):
"""
View decorator
Tailor the response to the requested data type, as specified
in the Accept header. Expects the wrapped view to return a
dict. If the request wants JSON, renders the dict as JSON data.
"""
def constructor_fn(view):
def wrapped_view(req, *args, **kwargs):
use_template = template
status = 200
try:
data = view(req, *args, **kwargs)
except ErrorResponse as err:
data = err.data
use_template = error_template
status = err.status
# render error template or return JSON with proper error
# code
jsonp_callback = req.GET.get("callback")
if jsonp_callback:
content = json.dumps(data, cls=DjangoJSONEncoder)
body = "{callback}({json})".format(callback=jsonp_callback,
json=content)
response = HttpResponse(body, status=status)
response["Content-Type"] = "application/javascript"
return response
accepts = req.META["HTTP_ACCEPT"]
typestring, _ = accepts.split(";", 1)
if not use_template \
or re.search(r"application/json", typestring):
response = JsonResponse(data, status=status)
# TODO: We may (or may not!) want to be more restrictive
# in the future:
response["Access-Control-Allow-Origin"] = "*"
return response
return render_to_response(use_template, data, status=status)
return wrapped_view
return constructor_fn
|
from django.http import JsonResponse, HttpResponse
from django.shortcuts import render_to_response
from django.template.loader import render_to_string
import logging
import json
import re
logger = logging.getLogger("logger")
class ErrorResponse(Exception):
def __init__(self, message, data=None, status=401, err=None):
super(Exception, self).__init__(self, message)
self.data = data or { "error": message }
self.status = status
self.exception = err
def make_response(template=None, error_template="error.html"):
"""
View decorator
Tailor the response to the requested data type, as specified
in the Accept header. Expects the wrapped view to return a
dict. If the request wants JSON, renders the dict as JSON data.
"""
def constructor_fn(view):
def wrapped_view(req, *args, **kwargs):
use_template = template
status = 200
try:
data = view(req, *args, **kwargs)
except ErrorResponse as err:
data = err.data
use_template = error_template
status = err.status
# render error template or return JSON with proper error
# code
jsonp_callback = req.GET.get("callback")
if jsonp_callback:
body = "{callback}({json})".format(callback=jsonp_callback,
json=json.dumps(data))
response = HttpResponse(body, status=status)
response["Content-Type"] = "application/javascript"
return response
accepts = req.META["HTTP_ACCEPT"]
typestring, _ = accepts.split(";", 1)
if not use_template \
or re.search(r"application/json", typestring):
response = JsonResponse(data, status=status)
# TODO: We may (or may not!) want to be more restrictive
# in the future:
response["Access-Control-Allow-Origin"] = "*"
return response
return render_to_response(use_template, data, status=status)
return wrapped_view
return constructor_fn
|
mit
|
Python
|
e1d119d743076b29cf19c584c337579903ab3875
|
fix templates path
|
criscv94/bdd_repo,criscv94/bdd_repo,criscv94/bdd_repo
|
flaskr/__init__.py
|
flaskr/__init__.py
|
#!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "my_db"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/flaskr/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('templates/file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('templates/mongo.html', results=results)
else:
return "ok"
@app.route("/postgres")
def postgres():
query = request.args.get("query")
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('templates/postgres.html', results=results)
@app.route("/example")
def example():
return render_template('templates/example.html')
if __name__ == "__main__":
app.run()
|
#!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "my_db"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/flaskr/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "ok"
@app.route("/postgres")
def postgres():
query = request.args.get("query")
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
|
mit
|
Python
|
22571c096051fefc28b467ca29d93a4f0ea6cb9c
|
fix column pruning
|
dwa/mongoose_fdw,asya999/mongoose_fdw,laurenhecht/mongoose_fdw
|
mongoose_fdw/__init__.py
|
mongoose_fdw/__init__.py
|
###
### Author: David Wallin
### Time-stamp: <2015-03-02 08:56:11 dwa>
from multicorn import ForeignDataWrapper
from multicorn.utils import log_to_postgres as log2pg
from pymongo import MongoClient
class Mongoose_fdw (ForeignDataWrapper):
def __init__(self, options, columns):
super(Mongoose_fdw, self).__init__(options, columns)
self.host_name = options.get('host', 'localhost')
self.port_nr = int(options.get('port', '27017'))
self.user = options.get('user')
self.password = options.get('password')
self.db_name = options.get('db', 'test')
self.collection_name = options.get('collection', 'test')
self.c = MongoClient(host=self.host_name,
port=self.port_nr)
self.auth_db = options.get('auth_db', self.db_name)
self.c.userprofile.authenticate(self.user,
self.password,
source=self.auth_db)
self.db = getattr(self.c, self.db_name)
self.coll = getattr(self.db, self.collection_name)
def execute(self, quals, columns):
## TODO: build spec based on quals:
if quals:
log2pg('quals: {}'.format(quals))
log2pg('Quals are not implemented yet')
## Only request fields of interest:
fields = {k: True for k in columns}
if '_id' not in fields:
fields['_id'] = False
Q = {}
cur = self.coll.find(spec=Q, fields=fields, snapshot=True)
for doc in cur:
yield doc
## Local Variables: ***
## mode:python ***
## coding: utf-8 ***
## End: ***
|
###
### Author: David Wallin
### Time-stamp: <2015-03-02 08:56:11 dwa>
from multicorn import ForeignDataWrapper
from multicorn.utils import log_to_postgres as log2pg
from pymongo import MongoClient
class Mongoose_fdw (ForeignDataWrapper):
def __init__(self, options, columns):
super(Mongoose_fdw, self).__init__(options, columns)
self.host_name = options.get('host', 'localhost')
self.port_nr = int(options.get('port', '27017'))
self.user = options.get('user')
self.password = options.get('password')
self.db_name = options.get('db', 'test')
self.collection_name = options.get('collection', 'test')
self.c = MongoClient(host=self.host_name,
port=self.port_nr)
self.auth_db = options.get('auth_db', self.db_name)
self.c.userprofile.authenticate(self.user,
self.password,
source=self.auth_db)
self.db = getattr(self.c, self.db_name)
self.coll = getattr(self.db, self.collection_name)
def execute(self, quals, columns):
## TODO: build spec based on quals:
if quals:
log2pg('quals: {}'.format(quals))
log2pg('Quals are not implemented yet')
## Only request fields of interest:
fields = {k: True for k in columns.keys()}
if '_id' not in fields:
fields['_id'] = False
Q = {}
cur = self.coll.find(spec=Q, fields=fields, snapshot=True)
for doc in cur:
yield doc
## Local Variables: ***
## mode:python ***
## coding: utf-8 ***
## End: ***
|
mit
|
Python
|
441cfadb97879d9ac76407145ba77185bbb292f8
|
fix regex n test
|
Fantomas42/mots-vides,Fantomas42/mots-vides
|
mots_vides/stop_words.py
|
mots_vides/stop_words.py
|
"""
StopWord Python container, managing collection of stop words.
"""
import re
class StopWord(object):
"""
Object managing collection of stop words for a given language.
"""
def __init__(self, language, collection=[]):
"""
Initializes with a given language and an optional collection.
"""
self.language = language
self.collection = set(collection)
self.regex = None
def __add__(self, entry):
"""
Adds an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.add(entry)
else:
self.collection = self.collection.union(entry)
return self
def __sub__(self, entry):
"""
Substracts an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.remove(entry)
else:
self.collection = self.collection.difference(entry)
return self
def __len__(self):
"""
Returns the collection length.
"""
return self.collection.__len__()
def __contains__(self, entry):
"""
Checks if an entry is in collection.
"""
return self.collection.__contains__(entry)
def __iter__(self):
"""
Iterates over the collection.
"""
return self.collection.__iter__()
def _compile_regex(self, word):
self.regex = re.compile(r'((^| )((?<!\w){0}(?!\w))(| ))|(((?<!\w){0}(?!\w)) )|((?<!\w){0}(?!\w))'.format(word), flags=re.IGNORECASE)
return self.regex
def rebase(self, text):
for word in self.collection:
current_regex = self._compile_regex(word)
text = current_regex.sub('', text).strip()
return text
|
"""
StopWord Python container, managing collection of stop words.
"""
import re
class StopWord(object):
"""
Object managing collection of stop words for a given language.
"""
def __init__(self, language, collection=[]):
"""
Initializes with a given language and an optional collection.
"""
self.language = language
self.collection = set(collection)
self.regex = None
def __add__(self, entry):
"""
Adds an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.add(entry)
else:
self.collection = self.collection.union(entry)
return self
def __sub__(self, entry):
"""
Substracts an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.remove(entry)
else:
self.collection = self.collection.difference(entry)
return self
def __len__(self):
"""
Returns the collection length.
"""
return self.collection.__len__()
def __contains__(self, entry):
"""
Checks if an entry is in collection.
"""
return self.collection.__contains__(entry)
def __iter__(self):
"""
Iterates over the collection.
"""
return self.collection.__iter__()
def _compile_regex(self, word):
self.regex = re.compile(r'((^| ){0}(| ))|({0} )|{0}'.format(word), flags=re.IGNORECASE)
return self.regex
def rebase(self, text):
for word in self.collection:
current_regex = self._compile_regex(word)
text = current_regex.sub('', text).strip()
return text
|
bsd-3-clause
|
Python
|
9c7d335780e219893f0976cda6a5388b51fa0a64
|
Update to v19.2.6
|
Dark5ide/mycroft-core,Dark5ide/mycroft-core
|
mycroft/version/__init__.py
|
mycroft/version/__init__.py
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from genericpath import exists, isfile
from os.path import join, expanduser
from mycroft.configuration import Configuration
from mycroft.util.log import LOG
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 19
CORE_VERSION_MINOR = 2
CORE_VERSION_BUILD = 6
# END_VERSION_BLOCK
CORE_VERSION_TUPLE = (CORE_VERSION_MAJOR,
CORE_VERSION_MINOR,
CORE_VERSION_BUILD)
CORE_VERSION_STR = '.'.join(map(str, CORE_VERSION_TUPLE))
class VersionManager:
@staticmethod
def get():
data_dir = expanduser(Configuration.get()['data_dir'])
version_file = join(data_dir, 'version.json')
if exists(version_file) and isfile(version_file):
try:
with open(version_file) as f:
return json.load(f)
except Exception:
LOG.error("Failed to load version from '%s'" % version_file)
return {"coreVersion": None, "enclosureVersion": None}
def check_version(version_string):
"""
Check if current version is equal or higher than the
version string provided to the function
Args:
version_string (string): version string ('Major.Minor.Build')
"""
version_tuple = tuple(map(int, version_string.split('.')))
return CORE_VERSION_TUPLE >= version_tuple
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from genericpath import exists, isfile
from os.path import join, expanduser
from mycroft.configuration import Configuration
from mycroft.util.log import LOG
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 19
CORE_VERSION_MINOR = 2
CORE_VERSION_BUILD = 5
# END_VERSION_BLOCK
CORE_VERSION_TUPLE = (CORE_VERSION_MAJOR,
CORE_VERSION_MINOR,
CORE_VERSION_BUILD)
CORE_VERSION_STR = '.'.join(map(str, CORE_VERSION_TUPLE))
class VersionManager:
@staticmethod
def get():
data_dir = expanduser(Configuration.get()['data_dir'])
version_file = join(data_dir, 'version.json')
if exists(version_file) and isfile(version_file):
try:
with open(version_file) as f:
return json.load(f)
except Exception:
LOG.error("Failed to load version from '%s'" % version_file)
return {"coreVersion": None, "enclosureVersion": None}
def check_version(version_string):
"""
Check if current version is equal or higher than the
version string provided to the function
Args:
version_string (string): version string ('Major.Minor.Build')
"""
version_tuple = tuple(map(int, version_string.split('.')))
return CORE_VERSION_TUPLE >= version_tuple
|
apache-2.0
|
Python
|
0d77cb02dfec448c1de8def96c9b73856b602759
|
Update models.py
|
Bouke/django-user-sessions,Bouke/django-user-sessions,jmp0xf/django-user-sessions,ivorbosloper/django-user-sessions,jmp0xf/django-user-sessions,ivorbosloper/django-user-sessions
|
user_sessions/models.py
|
user_sessions/models.py
|
import django
from django.conf import settings
from django.contrib.sessions.models import SessionManager
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Session(models.Model):
"""
Session objects containing user session information.
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
Additionally this session object providers the following properties:
``user``, ``user_agent`` and ``ip``.
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expiry date'), db_index=True)
objects = SessionManager()
class Meta:
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore(None, None).decode(self.session_data)
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'),
null=True)
user_agent = models.CharField(max_length=200)
last_activity = models.DateTimeField(auto_now=True)
if django.VERSION[:2] >= (1, 6):
ip = models.GenericIPAddressField(verbose_name='IP')
else:
ip = models.IPAddressField(verbose_name='IP')
# At bottom to avoid circular import
from .backends.db import SessionStore
|
import django
from django.conf import settings
from django.contrib.sessions.models import SessionManager
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Session(models.Model):
"""
Session objects containing user session information.
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
Additionally this session object providers the following properties:
``user``, ``user_agent`` and ``ip``.
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expiry date'), db_index=True)
objects = SessionManager()
class Meta:
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore(None, None).decode(self.session_data)
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'),
null=True)
user_agent = models.CharField(max_length=200)
last_activity = models.DateTimeField(auto_now=True)
if django.VERSION[:2] >= (1, 6):
ip = models.GenericIPAddressField()
else:
ip = models.IPAddressField()
# At bottom to avoid circular import
from .backends.db import SessionStore
|
mit
|
Python
|
b5814202bdcc5a15503d6c52c59aa2eb8736b7ec
|
Add whitelist to redpill plugin
|
alama/PSO2Proxy,cyberkitsune/PSO2Proxy,alama/PSO2Proxy,flyergo/PSO2Proxy,flyergo/PSO2Proxy,cyberkitsune/PSO2Proxy,alama/PSO2Proxy,cyberkitsune/PSO2Proxy
|
proxy/plugins/redpill.py
|
proxy/plugins/redpill.py
|
# redpill.py PSO2Proxy plugin
# For use with redpill.py flask webapp and website for packet logging and management
import sqlite, plugins
dbLocation = '/var/pso2-www/redpill/redpill.db'
enabled = False
if enabled:
@plugins.onStartHook
def redpillInit():
print("[Redpill] Redpill initilizing with database %s." % dbLocation)
@plugins.packetHook(0x11, 0x0)
def loginPacketHook(context, packet):
username = packet[0x8:0x48].decode('utf-8')
username = username.rstrip('\0')
if not user_exists(username):
context.loseConnection()
print("[Redpill] %s is not in the whitelist database. Hanging up." % username)
@plugins.onConnectionHook
def registerClient(client):
pass
@plugins.onConnectionLossHook
def archivePackets(client):
pass
def getConn():
conn = sqlite3.connect(dbLocation)
conn.row_factory = sqlite3.Row
return conn
def user_exists(username):
con = getConn()
with con:
cur = con.cursor()
cur.execute("select * from users where username = ? COLLATE NOCASE", (username, ))
check = cur.fetchone()
if check == None:
return False
else:
return True
|
# redpill.py PSO2Proxy plugin
# For use with redpill.py flask webapp and website for packet logging and management
import sqlite
dbLocation = '/var/pso2-www/redpill/redpill.db'
#TODO
|
agpl-3.0
|
Python
|
28f74edc5b2902ccb9026388db789807a5c2e1f1
|
Append layout and seat if in csv if exist in ticket.
|
wadobo/congressus,wadobo/congressus,wadobo/congressus,wadobo/congressus
|
congressus/invs/utils.py
|
congressus/invs/utils.py
|
from django.conf import settings
from django.http import HttpResponse
from .models import Invitation
from tickets.utils import concat_pdf
from tickets.utils import generate_pdf
def gen_csv_from_generator(ig, numbered=True, string=True):
csv = []
name = ig.type.name
for i, inv in enumerate(ig.invitations.all()):
line = '%s, %s' % (inv.order, name)
if numbered:
line = ('%d,' % (i + 1)) + line
if inv.seat_layout and inv.seat:
row, col = inv.seat.split('-')
col = int(col) + inv.seat_layout.column_start_number - 1
line += ', %s, %s, %s' % (inv.seat_layout.gate, row, col)
csv.append(line)
if string:
return '\n'.join(csv)
return csv
def gen_csv_from_generators(igs):
csv = []
for ig in igs:
csv += gen_csv_from_generator(ig, numbered=False, string=False)
out = []
for i, line in enumerate(csv):
out.append(('%d ' % (i + 1)) + line)
return '\n'.join(out)
def gen_pdf(igs):
files = []
for inv in Invitation.objects.filter(generator__in=igs):
print(inv)
files.append(generate_pdf(inv, asbuf=True, inv=True))
return concat_pdf(files)
def get_ticket_format(invs, pf):
""" With a list of invitations or invitations,generate ticket output """
if pf == 'csv':
response = HttpResponse(content_type='application/csv')
response['Content-Disposition'] = 'filename="invs.csv"'
response.write(gen_csv_from_generators(invs))
elif pf == 'thermal':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'filename="tickets.pdf"'
response.write(pdf)
elif pf == 'A4':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="tickets.pdf"'
response.write(pdf)
else:
raise "Ticket format not found"
return response
|
from django.conf import settings
from django.http import HttpResponse
from .models import Invitation
from tickets.utils import concat_pdf
from tickets.utils import generate_pdf
def gen_csv_from_generator(ig, numbered=True, string=True):
csv = []
name = ig.type.name
for i, inv in enumerate(ig.invitations.all()):
line = '%s, %s' % (inv.order, name)
if numbered:
line = ('%d,' % (i + 1)) + line
csv.append(line)
if string:
return '\n'.join(csv)
return csv
def gen_csv_from_generators(igs):
csv = []
for ig in igs:
csv += gen_csv_from_generator(ig, numbered=False, string=False)
out = []
for i, line in enumerate(csv):
out.append(('%d ' % (i + 1)) + line)
return '\n'.join(out)
def gen_pdf(igs):
files = []
for inv in Invitation.objects.filter(generator__in=igs):
print(inv)
files.append(generate_pdf(inv, asbuf=True, inv=True))
return concat_pdf(files)
def get_ticket_format(invs, pf):
""" With a list of invitations or invitations,generate ticket output """
if pf == 'csv':
response = HttpResponse(content_type='application/csv')
response['Content-Disposition'] = 'filename="invs.csv"'
response.write(gen_csv_from_generators(invs))
elif pf == 'thermal':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'filename="tickets.pdf"'
response.write(pdf)
elif pf == 'A4':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="tickets.pdf"'
response.write(pdf)
else:
raise "Ticket format not found"
return response
|
agpl-3.0
|
Python
|
d7284d82367a3f9b7a3db4de88d3c06e92542b23
|
fix bug in domain blacklist
|
MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock
|
muckrock/task/signals.py
|
muckrock/task/signals.py
|
"""Signals for the task application"""
from django.db.models.signals import post_save
from email.utils import parseaddr
import logging
from muckrock.task.models import OrphanTask, BlacklistDomain
logger = logging.getLogger(__name__)
def domain_blacklist(sender, instance, **kwargs):
"""Blacklist certain domains - automatically reject tasks from them"""
# pylint: disable=unused-argument
_, email = parseaddr(instance.communication.priv_from_who)
if '@' not in email:
return
domain = email.split('@')[1]
logger.info('Checking domain %s against blacklist', domain)
if BlacklistDomain.objects.filter(domain=domain).exists():
instance.resolve()
post_save.connect(domain_blacklist, sender=OrphanTask,
dispatch_uid='muckrock.task.signals.domain_blacklist')
|
"""Signals for the task application"""
from django.db.models.signals import post_save
from email.utils import parseaddr
import logging
from muckrock.task.models import OrphanTask, BlacklistDomain
logger = logging.getLogger(__name__)
def domain_blacklist(sender, instance, **kwargs):
"""Blacklist certain domains - automatically reject tasks from them"""
# pylint: disable=unused-argument
_, email = parseaddr(instance.communication.priv_from_who)
domain = email.split('@')[1]
logger.info('Checking domain %s against blacklist', domain)
if BlacklistDomain.objects.filter(domain=domain).exists():
instance.resolve()
post_save.connect(domain_blacklist, sender=OrphanTask,
dispatch_uid='muckrock.task.signals.domain_blacklist')
|
agpl-3.0
|
Python
|
009f1ec1580653dfc600c505622b95d153be231d
|
fix the id column
|
gkralik/lightspeed
|
util/create_database.py
|
util/create_database.py
|
#!/usr/bin/env python
import os
import sys
import sqlite3
base_dir = os.path.dirname(os.path.realpath(os.path.join(__file__, '..')))
db_path = os.path.join(base_dir, 'db/lightspeed.db')
if len(sys.argv) == 2:
db_path = os.path.realpath(sys.argv[1])
try:
conn = sqlite3.connect(db_path)
c = conn.cursor();
c.execute('''
CREATE TABLE IF NOT EXISTS measurements (
id INTEGER PRIMARY KEY,
pingMs UNSIGNED DECIMAL(10, 3),
downloadMbit DECIMAL(5, 2),
uploadMbit DECIMAL(5, 2),
timestamp DATETIME,
durationSecs UNSIGNED INTEGER,
isError INTEGER DEFAULT 0
);
''')
conn.commit()
print('Database created in', db_path)
except sqlite3.Error as e:
print('Error:', e.args[0])
finally:
if conn:
conn.close()
|
#!/usr/bin/env python
import os
import sys
import sqlite3
base_dir = os.path.dirname(os.path.realpath(os.path.join(__file__, '..')))
db_path = os.path.join(base_dir, 'db/lightspeed.db')
if len(sys.argv) == 2:
db_path = os.path.realpath(sys.argv[1])
try:
conn = sqlite3.connect(db_path)
c = conn.cursor();
c.execute('''
CREATE TABLE IF NOT EXISTS measurements (
id UNSIGNED INT AUTO_INCREMENT,
pingMs UNSIGNED DECIMAL(10, 3),
downloadMbit DECIMAL(5, 2),
uploadMbit DECIMAL(5, 2),
timestamp DATETIME,
durationSecs UNSIGNED INT,
isError INT DEFAULT 0,
PRIMARY KEY (id)
);
''')
conn.commit()
print('Database created in', db_path)
except sqlite3.Error as e:
print('Error:', e.args[0])
finally:
if conn:
conn.close()
|
mit
|
Python
|
30c2463ea91a6ae5c43e3c31d8efae093e9708c3
|
fix attempt
|
viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct
|
viaduct/models/group.py
|
viaduct/models/group.py
|
#!/usr/bin/python
from viaduct import db
from viaduct.models.permission import GroupPermission
user_group = db.Table('user_group',
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('group_id', db.Integer, db.ForeignKey('group.id'))
)
class Group(db.Model):
__tablename__ = 'group'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(256), unique=True)
users = db.relationship('User', secondary=user_group,
backref=db.backref('groups', lazy='dynamic'), lazy='dynamic')
def __init__(self, name):
self.name = name
def has_user(self, user):
if not user:
return False;
else:
return self.users.filter(user_group.c.user_id==user.id).count() > 0
def add_user(self, user):
if not self.has_user(user):
self.users.append(user)
return self
def delete_user(self, user):
if self.has_user(user):
self.users.remove(user)
def get_users(self):
# FIXME: backwards compatibility.
return self.users
#def get_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if not permission:
# return 0
# if permission.allowed:
# return 1
# else:
# return -1
#def has_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if permission:
# return permission.allowed
# return False
#
#def add_permission(self, name, allowed=True):
# self.delete_permission(name)
# permission = Permission.query.filter(Permission.name==name).first()
# db.session.add(GroupPermission(self, permission, allowed))
# db.session.commit()
#def delete_permission(self, name):
# for permission in self.permissions.join(Permission).filter(Permission.name==name).all():
# db.session.delete(permission)
# db.session.commit()
|
#!/usr/bin/python
from viaduct import db
from viaduct.models.permission import GroupPermission
user_group = db.Table('user_group',
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('group_id', db.Integer, db.ForeignKey('group.id'))
)
class Group(db.Model):
__tablename__ = 'group'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(256), unique=True)
users = db.relationship('User', secondary=user_group,
backref=db.backref('groups', lazy='dynamic'), lazy='dynamic')
def __init__(self, name):
self.name = name
def has_user(self, user):
return self.users.filter(user_group.c.user_id==user.id).count() > 0
def add_user(self, user):
if not self.has_user(user):
self.users.append(user)
return self
def delete_user(self, user):
if self.has_user(user):
self.users.remove(user)
def get_users(self):
# FIXME: backwards compatibility.
return self.users
#def get_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if not permission:
# return 0
# if permission.allowed:
# return 1
# else:
# return -1
#def has_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if permission:
# return permission.allowed
# return False
#
#def add_permission(self, name, allowed=True):
# self.delete_permission(name)
# permission = Permission.query.filter(Permission.name==name).first()
# db.session.add(GroupPermission(self, permission, allowed))
# db.session.commit()
#def delete_permission(self, name):
# for permission in self.permissions.join(Permission).filter(Permission.name==name).all():
# db.session.delete(permission)
# db.session.commit()
|
mit
|
Python
|
980ea4be2fd6d05aa9ec64bfaa50d89161185ccd
|
rework httplib2.Http to be able to not verify certs if configuration tells the app not to verify them
|
ayan-usgs/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI
|
pubs_ui/metrics/views.py
|
pubs_ui/metrics/views.py
|
from flask import Blueprint, render_template
from flask_login import login_required
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
from .. import app
metrics = Blueprint('metrics', __name__,
template_folder='templates',
static_folder='static',
static_url_path='/metrics/static')
def get_access_token():
# verification_cert could be a boolean or a string representing a path to a certificate bundle
verification_cert = app.config.get('VERIFY_CERT')
keyfile_path = app.config.get('GA_KEY_FILE_PATH')
ga_auth_scope = app.config.get('GA_OAUTH2_SCOPE')
# if verification_cert is a str that means it's a cert bundle, use that in an Http object
if isinstance(verification_cert, str):
http = Http(ca_certs=verification_cert)
elif isinstance(verification_cert, bool):
# if VERIFY_CERT is False, that means that disable_ssl_certificate_validation should be True and vice versa
http = Http(disable_ssl_certificate_validation=(not verification_cert))
else:
http = None
credentials = ServiceAccountCredentials.from_json_keyfile_name(keyfile_path, ga_auth_scope)
access_token = credentials.get_access_token(http=http).access_token
return access_token
@metrics.context_processor
def add_ga_access_token():
return {
'ga_access_token': get_access_token(),
}
@metrics.route('/publications/acquisitions/')
@login_required
def publications_aquisitions():
return render_template('metrics/publications_aquisitions.html')
@metrics.route('/publications/')
@login_required
def publications():
return render_template('metrics/publications.html')
|
from flask import Blueprint, render_template
from flask_login import login_required
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
from .. import app
metrics = Blueprint('metrics', __name__,
template_folder='templates',
static_folder='static',
static_url_path='/metrics/static')
def get_access_token():
# verification_cert could be a boolean or a string representing a path to a certificate bundle
verification_cert = app.config.get('VERIFY_CERT')
keyfile_path = app.config.get('GA_KEY_FILE_PATH')
ga_auth_scope = app.config.get('GA_OAUTH2_SCOPE')
# if verification_cert is a str that means it's a cert bundle, use that in an Http object
http = Http(ca_certs=verification_cert) if isinstance(verification_cert, str) else None
credentials = ServiceAccountCredentials.from_json_keyfile_name(keyfile_path, ga_auth_scope)
access_token = credentials.get_access_token(http=http).access_token
return access_token
@metrics.context_processor
def add_ga_access_token():
return {
'ga_access_token': get_access_token(),
}
@metrics.route('/publications/acquisitions/')
@login_required
def publications_aquisitions():
return render_template('metrics/publications_aquisitions.html')
@metrics.route('/publications/')
@login_required
def publications():
return render_template('metrics/publications.html')
|
unlicense
|
Python
|
290f864f1bb44300cec9bb9e28679c3d7ba70c7e
|
Test 1 done
|
mtb-za/fatiando,cmeessen/fatiando,drandykass/fatiando,cmeessen/fatiando,santis19/fatiando,victortxa/fatiando,drandykass/fatiando,fatiando/fatiando,mtb-za/fatiando,santis19/fatiando,victortxa/fatiando,rafaelmds/fatiando,fatiando/fatiando,rafaelmds/fatiando
|
cookbook/seismic_conv.py
|
cookbook/seismic_conv.py
|
"""
Synthetic convolutional seismogram for a simple two layer velocity model
"""
import numpy as np
from fatiando.seismic import conv
from fatiando.vis import mpl
#model parameters
n_samples, n_traces = [600, 20]
rock_grid = 1500.*np.ones((n_samples, n_traces))
rock_grid[300:, :] = 2500.
#synthetic calculation
[vel_l, rho_l] = conv.depth_2_time(n_samples, n_traces, rock_grid, dt=2.e-3)
synt = conv.seismic_convolutional_model(n_traces, vel_l, 30., conv.rickerwave)
# plot input model
mpl.figure()
mpl.subplot(3, 1, 1)
mpl.ylabel('Depth (m)')
mpl.title("Depth Vp model", fontsize=13, family='sans-serif', weight='bold')
mpl.imshow(rock_grid, extent=[0, n_traces, n_samples, 0],
cmap=mpl.pyplot.cm.bwr, aspect='auto', origin='upper')
# plot resulted seismogram using wiggle
mpl.subplot(3, 1, 2)
mpl.seismic_wiggle(synt, dt=2.e-3)
mpl.seismic_image(synt, dt=2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram", fontsize=13, family='sans-serif',
weight='bold')
# plot resulted seismogram using wiggle over Vp model
mpl.subplot(3, 1, 3)
mpl.seismic_image(vel_l, dt=2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.seismic_wiggle(synt, dt=2.e-3)
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram over Vp model", fontsize=13,
family='sans-serif', weight='bold')
mpl.show()
|
"""
Synthetic convolutional seismogram for a simple two layer velocity model
"""
import numpy as np
from fatiando.seismic import conv
from fatiando.vis import mpl
#model parameters
n_samples, n_traces = [600, 20]
rock_grid = 1500.*np.ones((n_samples, n_traces))
rock_grid[300:,:] = 2500.
#synthetic calculation
[vel_l, rho_l] = conv.depth_2_time(n_samples, n_traces, rock_grid, dt=2.e-3)
synt = conv.seismic_convolutional_model(n_traces, vel_l, 30., conv.rickerwave)
# plot input model
mpl.figure()
mpl.subplot(3,1,1)
mpl.ylabel('Depth (m)')
mpl.title("Depth Vp model", fontsize=13, family='sans-serif', weight='bold')
mpl.imshow(rock_grid, extent=[0,n_traces, n_samples, 0],cmap=mpl.pyplot.cm.bwr,
aspect='auto', origin='upper')
# plot resulted seismogram using wiggle
mpl.subplot(3, 1, 2)
mpl.seismic_wiggle(synt, dt = 2.e-3)
mpl.seismic_image(synt, dt = 2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram", fontsize=13, family='sans-serif',
weight='bold')
# plot resulted seismogram using wiggle over Vp model
mpl.subplot(3, 1, 3)
mpl.seismic_image(vel_l, dt= 2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.seismic_wiggle(synt, dt = 2.e-3)
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram over Vp model", fontsize=13, family='sans-serif',
weight='bold')
mpl.show()
|
bsd-3-clause
|
Python
|
b4623bcdcd0a35091030057edc52870045a17223
|
fix for Anaconda compatibility
|
wright-group/WrightTools,wright-group/WrightTools
|
__init__.py
|
__init__.py
|
'''
Import all subdirectories and modules.
'''
import os as _os
__all__ = []
for _path in _os.listdir(_os.path.dirname(__file__)):
_full_path = _os.path.join(_os.path.dirname(__file__), _path)
if _os.path.isdir(_full_path) and _path not in ['.git', 'examples', 'widgets']:
__import__(_path, locals(), globals())
__all__.append(_path)
elif _path[-3:] == '.py' and _path not in ['__init__.py', 'gui.py']:
__import__(_path[:-3], locals(), globals())
__all__.append(_path[:-3])
|
'''
Import all subdirectories and modules.
'''
import os as _os
__all__ = []
for _path in _os.listdir(_os.path.dirname(__file__)):
_full_path = _os.path.join(_os.path.dirname(__file__), _path)
if _os.path.isdir(_full_path) and _path not in ['.git', 'examples']:
__import__(_path, locals(), globals())
__all__.append(_path)
elif _path[-3:] == '.py' and _path not in ['__init__.py', 'gui.py']:
__import__(_path[:-3], locals(), globals())
__all__.append(_path[:-3])
|
mit
|
Python
|
2ce8efa3bf227c9a769121a4d313963f0cfbde51
|
print sys args
|
loreguerra/bbt-chart
|
add_data.py
|
add_data.py
|
import psycopg2
import sys
from connect import connect_to_db
# add argparse for options via command line
# add new temperature and date
conn = connect_to_db()
cur = conn.cursor()
print sys.argv
|
import psycopg2
import sys
from connect import connect_to_db
# add argparse for options via command line
# add new temperature and date
def add_temp(date, temp):
print date, temp
# conn = connect_to_db()
|
mit
|
Python
|
85da4c8cb3d613882eb46fb398e361286d4b4286
|
fix add_page
|
j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy
|
add_page.py
|
add_page.py
|
from widgy.models import *
page = ContentPage.objects.create(
title='widgy page'
)
page.root_widget = TwoColumnLayout.add_root().node
page.save()
for i in range(3):
page.root_widget.data.left_bucket.data.add_child(TextContent,
content='yay %s' % i
)
for i in range(2):
page.root_widget.data.right_bucket.data.add_child(TextContent,
content='yay right bucket %s' % i
)
|
from widgy.models import *
page = ContentPage.objects.create(
title='widgy page'
)
page.root_widget = TwoColumnLayout.add_root().node
page.save()
for i in range(3):
page.root_widget.data.left_bucket.add_child(TextContent,
content='yay %s' % i
)
for i in range(2):
page.root_widget.data.right_bucket.add_child(TextContent,
content='yay right bucket %s' % i
)
|
apache-2.0
|
Python
|
8034a3f237fad994444cbc7edfffb658ef00f908
|
Test commit
|
Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank,Signbank/Auslan-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank
|
__init__.py
|
__init__.py
|
# test
|
bsd-3-clause
|
Python
|
|
9cd440760ea789cf712491080e61205d03a027c8
|
Support verbose and bleeding config from file
|
EPITECH-2022/TwentyTwo
|
__main__.py
|
__main__.py
|
import json, os.path
import discord
from discord.ext import commands
from Fun import Fun
def main():
# variables
config_file = 'config.json'
# load config
with open(config_file) as f:
config = json.load(f)
# split config
description, token = config['description'], config['token']
verbose, token = config['verbose'], config['bleeding']
# define bot
bot = Bot(description=description, verbose=verbose, bleeding=bleeding)
bot.add_cog(Fun(bot))
# launch bot
bot.run(token)
class Bot(commands.Bot):
def __init__(self, *args, **kwargs):
# Rewrite the command_prefix flag to force mention
super().__init__(*args, command_prefix=commands.when_mentioned, verbose=False, bleeding=False **kwargs)
self.admins = []
self.verbose = verbose
self.bleeding = bleeding
def log(self, txt):
if self.verbose:
print(txt)
async def on_ready(self):
self.log('Logged as {}#{}'.format(self.user.name, self.user.id))
self.log('My boty is ready')
async def on_member_join(self, member):
if self.bleeding:
self.log('Initiating verification procedure for user "{}".'.format(member.name))
await self.verify(member)
async def verify(self, member):
msg = 'Please send your EPITECH mail adress\n'
msg += 'i.e.: ```[email protected]```\n'
msg += 'It has to be an EPITECH adress, any other adress will not be accepted'
await self.send_message(member, msg)
def is_epitech(self, txt):
if txt[-11:] != '@epitech.eu':
return False
# TODO : mail username (check there are no @)
return True
if __name__ == '__main__':
main()
|
import json, os.path
import discord
from discord.ext import commands
from Fun import Fun
def main():
# variables
config_file = 'config.json'
# load config
with open(config_file) as f:
config = json.load(f)
# split config
description, token = config['description'], config['token']
# define bot
bot = Bot(description=description)
bot.add_cog(Fun(bot))
# launch bot
bot.run(token)
class Bot(commands.Bot):
def __init__(self, *args, **kwargs):
# Rewrite the command_prefix flag to force mention
super().__init__(*args, command_prefix=commands.when_mentioned, **kwargs)
self.admins = []
self.verbose = False
self.bleeding = False
def log(self, txt):
if self.verbose:
print(txt)
async def on_ready(self):
self.log('Logged as {}#{}'.format(self.user.name, self.user.id))
self.log('My boty is ready')
async def on_member_join(self, member):
if self.bleeding:
self.log('Initiating verification procedure for user "{}".'.format(member.name))
await self.verify(member)
async def verify(self, member):
msg = 'Please send your EPITECH mail adress\n'
msg += 'i.e.: ```[email protected]```\n'
msg += 'It has to be an EPITECH adress, any other adress will not be accepted'
await self.send_message(member, msg)
def is_epitech(self, txt):
if txt[-11:] != '@epitech.eu':
return False
# TODO : mail username (check there are no @)
return True
if __name__ == '__main__':
main()
|
mit
|
Python
|
6bb6f73b6dd5a497a670ec3dc4d85483253737d2
|
update dev version after 0.9.6 tag [skip ci]
|
desihub/desimodel,desihub/desimodel
|
py/desimodel/_version.py
|
py/desimodel/_version.py
|
__version__ = '0.9.6.dev431'
|
__version__ = '0.9.6'
|
bsd-3-clause
|
Python
|
615613a3213e7b4023135b2fc85ac725d5f12656
|
Add jvm_path argument to connect method
|
laughingman7743/PyAthenaJDBC,laughingman7743/PyAthenaJDBC
|
pyathenajdbc/__init__.py
|
pyathenajdbc/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
__version__ = '1.0.2'
__athena_driver_version__ = '1.0.0'
# Globals https://www.python.org/dev/peps/pep-0249/#globals
apilevel = '2.0'
threadsafety = 3
paramstyle = 'pyformat'
ATHENA_JAR = 'AthenaJDBC41-{0}.jar'.format(__athena_driver_version__)
ATHENA_DRIVER_DOWNLOAD_URL = 'https://s3.amazonaws.com/athena-downloads/drivers/{0}'.format(
ATHENA_JAR)
ATHENA_DRIVER_CLASS_NAME = 'com.amazonaws.athena.jdbc.AthenaDriver'
ATHENA_CONNECTION_STRING = 'jdbc:awsathena://athena.{region}.amazonaws.com:443/'
class DBAPITypeObject:
"""Type Objects and Constructors
https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors
"""
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
STRING = DBAPITypeObject('CHAR', 'NCHAR',
'VARCHAR', 'NVARCHAR',
'LONGVARCHAR', 'LONGNVARCHAR')
BINARY = DBAPITypeObject('BINARY', 'VARBINARY', 'LONGVARBINARY')
NUMBER = DBAPITypeObject('BOOLEAN', 'TINYINT', 'SMALLINT', 'BIGINT', 'INTEGER',
'REAL', 'DOUBLE', 'FLOAT', 'DECIMAL', 'NUMERIC')
DATETIME = DBAPITypeObject('TIMESTAMP')
ROWID = DBAPITypeObject('')
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def connect(s3_staging_dir=None, access_key=None, secret_key=None,
region_name=None, profile_name=None, credential_file=None,
jvm_options=None, converter=None, formatter=None, jvm_path=None,
**kwargs):
from pyathenajdbc.connection import Connection
return Connection(s3_staging_dir, access_key, secret_key,
region_name, profile_name, credential_file,
jvm_options, converter, formatter, jvm_path,
**kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
__version__ = '1.0.2'
__athena_driver_version__ = '1.0.0'
# Globals https://www.python.org/dev/peps/pep-0249/#globals
apilevel = '2.0'
threadsafety = 3
paramstyle = 'pyformat'
ATHENA_JAR = 'AthenaJDBC41-{0}.jar'.format(__athena_driver_version__)
ATHENA_DRIVER_DOWNLOAD_URL = 'https://s3.amazonaws.com/athena-downloads/drivers/{0}'.format(
ATHENA_JAR)
ATHENA_DRIVER_CLASS_NAME = 'com.amazonaws.athena.jdbc.AthenaDriver'
ATHENA_CONNECTION_STRING = 'jdbc:awsathena://athena.{region}.amazonaws.com:443/'
class DBAPITypeObject:
"""Type Objects and Constructors
https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors
"""
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
STRING = DBAPITypeObject('CHAR', 'NCHAR',
'VARCHAR', 'NVARCHAR',
'LONGVARCHAR', 'LONGNVARCHAR')
BINARY = DBAPITypeObject('BINARY', 'VARBINARY', 'LONGVARBINARY')
NUMBER = DBAPITypeObject('BOOLEAN', 'TINYINT', 'SMALLINT', 'BIGINT', 'INTEGER',
'REAL', 'DOUBLE', 'FLOAT', 'DECIMAL', 'NUMERIC')
DATETIME = DBAPITypeObject('TIMESTAMP')
ROWID = DBAPITypeObject('')
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def connect(s3_staging_dir=None, access_key=None, secret_key=None,
region_name=None, profile_name=None, credential_file=None,
jvm_options=None, converter=None, formatter=None,
**kwargs):
from pyathenajdbc.connection import Connection
return Connection(s3_staging_dir, access_key, secret_key,
region_name, profile_name, credential_file,
jvm_options, converter, formatter,
**kwargs)
|
mit
|
Python
|
462312c3acf2d6daf7d8cd27f251b8cb92647f5e
|
Fix a typo in the variable name
|
jean/pybossa,geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,stefanhahmann/pybossa,proyectos-analizo-info/pybossa-analizo-info,stefanhahmann/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,geotagx/geotagx-pybossa-archive,geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,harihpr/tweetclickers,geotagx/pybossa,proyectos-analizo-info/pybossa-analizo-info,CulturePlex/pybossa,CulturePlex/pybossa,geotagx/pybossa,geotagx/geotagx-pybossa-archive,CulturePlex/pybossa,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,PyBossa/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,PyBossa/pybossa,inteligencia-coletiva-lsd/pybossa
|
pybossa/auth/category.py
|
pybossa/auth/category.py
|
from flaskext.login import current_user
def create(category=None):
if current_user.is_authenticated():
if current_user.admin is True:
return True
else:
return False
else:
return False
def read(category=None):
return True
def update(category):
return create(category)
def delete(category):
return create(category)
|
from flaskext.login import current_user
def create(app=None):
if current_user.is_authenticated():
if current_user.admin is True:
return True
else:
return False
else:
return False
def read(app=None):
return True
def update(app):
return create(app)
def delete(app):
return create(app)
|
agpl-3.0
|
Python
|
ce384e6eb3f762f611bfd70874766248169a7d15
|
indent fix
|
thesabbir/nginpro
|
nginpro/utils.py
|
nginpro/utils.py
|
"""
Utilities
"""
from string import Template
"""
Generate configuration blocks
"""
def make_block(name, content, pattern=""):
return Template(
"""
${name} ${pattern} {
${content}
}
""").safe_substitute(name=name, content=content, pattern=pattern)
"""
Takes a python dictionary and converts it to nginx compatible configuration block
"""
def to_nginx_template(config):
template = ""
for key, value in config.iteritems():
if isinstance(value, dict):
for key2, value2 in value.iteritems():
template += "{} {} {};\n".format(key, key2, value2)
else:
template += "{} {};\n".format(key, value)
return template
"""
nginx configuration indentation
"""
def make_indent(contents):
indents = ' '
lines = map(str.strip, contents.splitlines())
current_indent = 0
for index, line in enumerate(lines):
if line.endswith('}'):
current_indent -= 1
lines[index] = current_indent * indents + line
if line.endswith('{'):
current_indent += 1
return '\n'.join(lines)
"""
Get nginx config args
"""
def get_nginx_config_args():
# TODO: make this more pythonic
import subprocess
import re
options = {}
try:
process = subprocess.Popen(['nginx', '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err, out = process.communicate()
matches = re.findall('--([^\s]+)', out)
for option in matches:
if '=' in option:
v = option.split('=')
options[v[0]] = v[1]
else:
options[option] = True
except OSError:
print 'Nginx is not installed or not in $PATH'
return options
|
"""
Utilities
"""
from string import Template
"""
Generate configuration blocks
"""
def make_block(name, content, pattern=""):
return Template("""
${name} ${pattern} {
${content}
}
""").safe_substitute(name=name, content=content, pattern=pattern)
"""
Takes a python dictionary and converts it to nginx compatible configuration block
"""
def to_nginx_template(config):
template = ""
for key, value in config.iteritems():
if isinstance(value, dict):
for key2, value2 in value.iteritems():
template += "{} {} {};\n".format(key, key2, value2)
else:
template += "{} {};\n".format(key, value)
return template
"""
nginx configuration indentation
"""
def make_indent(contents):
indents = ' '
lines = map(str.strip, contents.splitlines())
current_indent = 0
for index, line in enumerate(lines):
if line.endswith('}'):
current_indent -= 1
lines[index] = current_indent * indents + line
if line.endswith('{'):
current_indent += 1
return '\n'.join(lines)
"""
Get nginx config args
"""
def get_nginx_config_args():
# TODO: make this more pythonic
import subprocess
import re
options = {}
try:
process = subprocess.Popen(['nginx', '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err, out = process.communicate()
matches = re.findall('--([^\s]+)', out)
for option in matches:
if '=' in option:
v = option.split('=')
options[v[0]] = v[1]
else:
options[option] = True
except OSError:
print 'Nginx is not installed or not in $PATH'
return options
|
mit
|
Python
|
96ad539fdf0302dd0e2996f746ce1fd055c8e590
|
fix log size to 5mb
|
2gis/vmmaster,sh0ked/vmmaster,2gis/vmmaster,sh0ked/vmmaster,2gis/vmmaster
|
vmmaster/core/logger.py
|
vmmaster/core/logger.py
|
import logging
import logging.handlers
import graypy
import os
import sys
from .config import config
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
self.linebuf = ''
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
def setup_logging(logdir=None, scrnlog=True, txtlog=True, loglevel=logging.DEBUG):
logdir = os.path.abspath(logdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
log = logging.getLogger('')
log.setLevel(loglevel)
log_formatter = logging.Formatter("%(asctime)s - %(levelname)-7s :: %(name)-6s :: %(message)s")
if hasattr(config, 'GRAYLOG'):
graylog_handler = graypy.GELFHandler(config.GRAYLOG)
graylog_handler.setFormatter(log_formatter)
log.addHandler(graylog_handler)
if txtlog:
txt_handler = logging.handlers.RotatingFileHandler(
os.path.join(logdir, "vmmaster.log"), maxBytes=5242880, backupCount=5
)
txt_handler.setFormatter(log_formatter)
log.addHandler(txt_handler)
log.info("Logger initialised.")
if scrnlog:
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log.addHandler(console_handler)
stdout_logger = logging.getLogger('STDOUT')
slout = StreamToLogger(stdout_logger, logging.INFO)
sys.stdout = slout
stderr_logger = logging.getLogger('STDERR')
slerr = StreamToLogger(stderr_logger, logging.ERROR)
sys.stderr = slerr
return log
log = logging.getLogger('LOG')
|
import logging
import logging.handlers
import graypy
import os
import sys
from .config import config
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
self.linebuf = ''
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
def setup_logging(logdir=None, scrnlog=True, txtlog=True, loglevel=logging.DEBUG):
logdir = os.path.abspath(logdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
log = logging.getLogger('')
log.setLevel(loglevel)
log_formatter = logging.Formatter("%(asctime)s - %(levelname)-7s :: %(name)-6s :: %(message)s")
if hasattr(config, 'GRAYLOG'):
graylog_handler = graypy.GELFHandler(config.GRAYLOG)
graylog_handler.setFormatter(log_formatter)
log.addHandler(graylog_handler)
if txtlog:
txt_handler = logging.handlers.RotatingFileHandler(
os.path.join(logdir, "vmmaster.log"), maxBytes=5120, backupCount=5
)
txt_handler.setFormatter(log_formatter)
log.addHandler(txt_handler)
log.info("Logger initialised.")
if scrnlog:
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log.addHandler(console_handler)
stdout_logger = logging.getLogger('STDOUT')
slout = StreamToLogger(stdout_logger, logging.INFO)
sys.stdout = slout
stderr_logger = logging.getLogger('STDERR')
slerr = StreamToLogger(stderr_logger, logging.ERROR)
sys.stderr = slerr
return log
log = logging.getLogger('LOG')
|
mit
|
Python
|
23ab8664d1ed16ea0339f9b94938e1c95b574132
|
Remove silly try/except blocks in button.py
|
AndyDeany/pygame-template
|
pygametemplate/button.py
|
pygametemplate/button.py
|
import time
from pygametemplate import log
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
self.pressed = 0
self.released = 0
def time_held(self):
if self.held:
return time.time() - self.press_time
else:
return 0.0
|
import time
from pygametemplate import log
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
try:
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
except Exception:
log("Failed to initialise button variable")
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
try:
self.pressed = 0
self.released = 0
except Exception:
log("Failed to reset button")
def time_held(self):
try:
if self.held:
return time.time() - self.press_time
else:
return 0.0
except Exception:
log("Failed to get button held time")
|
mit
|
Python
|
1ee2e880872c4744f4159df7fc64bb64b3f35632
|
Add docstring to Button.time_held() method
|
AndyDeany/pygame-template
|
pygametemplate/button.py
|
pygametemplate/button.py
|
import time
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
self.pressed = 0
self.released = 0
def time_held(self) -> float:
"""Return the amount of time this button has been held for in seconds."""
if self.held:
return time.time() - self.press_time
else:
return 0.0
|
import time
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
self.pressed = 0
self.released = 0
def time_held(self):
if self.held:
return time.time() - self.press_time
else:
return 0.0
|
mit
|
Python
|
d7249e710be3da451b4ca752780e5a86501f6198
|
update version number to 1.8.3
|
frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME
|
python/flame/__init__.py
|
python/flame/__init__.py
|
from collections import OrderedDict
from ._internal import (Machine as MachineBase,
GLPSPrinter, _GLPSParse,
_pyapi_version, _capi_version,
FLAME_ERROR, FLAME_WARN,
FLAME_INFO, FLAME_DEBUG,
setLogLevel, getLoggerName)
def _list2odict(L):
'Recursively turn list of tuples into OrderedDict'
for i in range(len(L)):
K,V = L[i]
if isinstance(V, list):
L[i] = (K,list(map(OrderedDict, V)))
return OrderedDict(L)
class GLPSParser(object):
"""GLPS parser context
"""
def parse(self, *args, **kws):
"""parse(file_or_buf, path=None, extra=None)
parse(file_or_buf, path="/dir/", extra={'VAR':'value'})
Parse the provided buffer or file-like object.
'path' is used to expand relative paths found while parsing.
If not 'path' is None then either PWD or the .name of
the file-like object is used.
'extra' may be used to provide additional variable definitions when parsing.
Returns an OrderedDict.
"""
return _GLPSParse(*args, **kws)
class Machine(MachineBase):
def conf(self, *args, **kws):
return _list2odict(super(Machine, self).conf(*args, **kws))
# by default pass all but DEBUG to python logger.
# May set to FLAME_WARN for performance
setLogLevel(FLAME_WARN)
__all__ = ['Machine',
'GLPSPrinter',
'GLPSParser',
]
__version__ = '1.8.3'
|
from collections import OrderedDict
from ._internal import (Machine as MachineBase,
GLPSPrinter, _GLPSParse,
_pyapi_version, _capi_version,
FLAME_ERROR, FLAME_WARN,
FLAME_INFO, FLAME_DEBUG,
setLogLevel, getLoggerName)
def _list2odict(L):
'Recursively turn list of tuples into OrderedDict'
for i in range(len(L)):
K,V = L[i]
if isinstance(V, list):
L[i] = (K,list(map(OrderedDict, V)))
return OrderedDict(L)
class GLPSParser(object):
"""GLPS parser context
"""
def parse(self, *args, **kws):
"""parse(file_or_buf, path=None, extra=None)
parse(file_or_buf, path="/dir/", extra={'VAR':'value'})
Parse the provided buffer or file-like object.
'path' is used to expand relative paths found while parsing.
If not 'path' is None then either PWD or the .name of
the file-like object is used.
'extra' may be used to provide additional variable definitions when parsing.
Returns an OrderedDict.
"""
return _GLPSParse(*args, **kws)
class Machine(MachineBase):
def conf(self, *args, **kws):
return _list2odict(super(Machine, self).conf(*args, **kws))
# by default pass all but DEBUG to python logger.
# May set to FLAME_WARN for performance
setLogLevel(FLAME_WARN)
__all__ = ['Machine',
'GLPSPrinter',
'GLPSParser',
]
__version__ = '1.8.1'
|
mit
|
Python
|
6708830ab2bde841bbc3da2befbbe5ab9f3d21aa
|
Put test stuff inside `if __name__ == '__main__'`
|
msabramo/ansi_str
|
ansi_str.py
|
ansi_str.py
|
import re
_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def strip_ansi(value):
return _ansi_re.sub('', value)
def len_exclude_ansi(value):
return len(strip_ansi(value))
class ansi_str(str):
"""A str subclass, specialized for strings containing ANSI escapes.
When you call the ``len`` method, it discounts ANSI color escape codes.
This is beneficial, because ANSI color escape codes won't mess up code
that tries to do alignment, padding, printing in columns, etc.
"""
_stripped = None
def __len__(self, exclude_ansi=True):
if exclude_ansi is False:
return len(self[:])
if self._stripped is None:
self._stripped = strip_ansi(self[:])
return len(self._stripped)
if __name__ == '__main__':
# s = ansi_str('abc')
# print s
# print len(s)
s = ansi_str(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m')
print s
print len(s)
print s.__len__()
print s.__len__(exclude_ansi=False)
print(len_exclude_ansi(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m'))
|
import re
_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def strip_ansi(value):
return _ansi_re.sub('', value)
def len_exclude_ansi(value):
return len(strip_ansi(value))
class ansi_str(str):
"""A str subclass, specialized for strings containing ANSI escapes.
When you call the ``len`` method, it discounts ANSI color escape codes.
This is beneficial, because ANSI color escape codes won't mess up code
that tries to do alignment, padding, printing in columns, etc.
"""
_stripped = None
def __len__(self, exclude_ansi=True):
if exclude_ansi is False:
return len(self[:])
if self._stripped is None:
self._stripped = strip_ansi(self[:])
return len(self._stripped)
# s = ansi_str('abc')
# print s
# print len(s)
s = ansi_str(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m')
print s
print len(s)
print s.__len__()
print s.__len__(exclude_ansi=False)
print(len_exclude_ansi(u'\x1b[32m\x1b[1mSUCCESS\x1b[0m'))
|
mit
|
Python
|
b9e12e6bb1d4d4cdb337cbf3d3cd7a41f57b4d24
|
Use a more standard RPM query format
|
pombredanne/jsonstats,RHInception/jsonstats,pombredanne/jsonstats,RHInception/jsonstats
|
JsonStats/FetchStats/Plugins/RPM.py
|
JsonStats/FetchStats/Plugins/RPM.py
|
import datetime
from JsonStats.FetchStats import Fetcher
class RPM(Fetcher):
def __init__(self):
"""
Returns an rpm manifest (all rpms installed on the system.
**Note**: This takes more than a few seconds!!
"""
self.context = 'rpm'
self._load_data()
def _load_data(self):
self._refresh_time = datetime.datetime.utcnow()
self._rpms = {}
cmd = 'rpm -qa --queryformat "%{NAME} %{VERSION}-%{RELEASE}.%{ARCH}\n"'
try:
for line in self._exec(cmd).split('\n')[:-1]:
(rpm_name, rpm_version) = line.split()
self._rpms[rpm_name] = rpm_version
self._loaded(True)
except Exception, e:
self._loaded(False, str(e))
def dump(self):
# poor mans cache, refresh cache in an hour
if (datetime.datetime.utcnow() -
datetime.timedelta(minutes=1)) > self._refresh_time:
self._load_data()
return self._rpms
def dump_json(self):
return self.json.dumps(self.dump())
|
import datetime
from JsonStats.FetchStats import Fetcher
class RPM(Fetcher):
def __init__(self):
"""
Returns an rpm manifest (all rpms installed on the system.
**Note**: This takes more than a few seconds!!
"""
self.context = 'rpm'
self._load_data()
def _load_data(self):
self._refresh_time = datetime.datetime.utcnow()
self._rpms = {}
cmd = 'rpm -qa --queryformat "%{NAME} %{VERSION}\n"'
try:
for line in self._exec(cmd).split('\n')[:-1]:
(rpm_name, rpm_version) = line.split()
self._rpms[rpm_name] = rpm_version
self._loaded(True)
except Exception, e:
self._loaded(False, str(e))
def dump(self):
# poor mans cache, refresh cache in an hour
if (datetime.datetime.utcnow() -
datetime.timedelta(minutes=1)) > self._refresh_time:
self._load_data()
return self._rpms
def dump_json(self):
return self.json.dumps(self.dump())
|
mit
|
Python
|
9ec8aa9fbb9b8c6656e5fe8920787f2c03a93683
|
create Cell class and method add_neighbor that returns a list of neighbor positions
|
BradleyMoore/Game_of_Life
|
app/life.py
|
app/life.py
|
class Cell(object):
def __init__(self, pos):
self.neighbors = 0
self.neighbor_list = []
self.pos = pos
self.posx = pos[0]
self.posy = pos[1]
def add_neighbors(self):
self.neighbor_list = []
for x in xrange(self.posx-1, self.posx+1):
for y in xrange(self.posy-1, self.posy+1):
self.neighbor_list.append((x,y))
self.neighbor_list.remove(self.pos)
return self.neighbor_list
|
mit
|
Python
|
|
777eaf01586b330b976c2691bf73b9a2053ff978
|
Store real non-stemmed texts
|
matnel/hs-comments-visu,matnel/hs-comments-visu
|
app/main.py
|
app/main.py
|
from flask import *
import collect_hs
import collections
import nltk
import numpy
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.decomposition import LatentDirichletAllocation
app = Flask(__name__)
## common constructs
stem = nltk.stem.snowball.SnowballStemmer('finnish')
@app.route("/")
def index():
return send_from_directory( 'static' , 'index.html' )
@app.route('/topicmodel', methods=['POST'] )
def analyze():
path1 = request.form['url'].split('/')[-1]
path = '1296808743968/' + path1
comments = collect_hs.comment( path )
_texts = []
texts = []
for c in comments:
_texts.append( c['text'] )
text = nltk.word_tokenize( c['text'] )
text = map( lambda x: stem.stem( x ) , text )
texts.append( ' '.join( text ) )
tf_vectorizer = CountVectorizer(
max_df=0.95,
min_df=2,
max_features= 10000 )
texts = tf_vectorizer.fit_transform( _texts )
## test between 2 and 20 topics
topics = {}
for k in range(2, 21):
model = LatentDirichletAllocation(
n_topics= k ,
max_iter=5,
learning_method='online',
learning_offset=50.,
random_state=0
)
fit = model.fit( texts )
ll = model.score( texts )
topics[ ll ] = fit
topic = max( topics.keys() )
ret = collections.defaultdict( list )
## ugly, rewrite some day
new_topics = topics[ topic ].transform( texts )
for i, topic in enumerate( new_topics ):
topic = numpy.argmax( topic )
text = _texts[ i ].encode('utf8')
print text
ret[ topic ].append( text )
return jsonify( ret )
if __name__ == "__main__":
app.run( debug = True)
|
from flask import *
import collect_hs
import collections
import nltk
import numpy
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.decomposition import LatentDirichletAllocation
app = Flask(__name__)
## common constructs
stem = nltk.stem.snowball.SnowballStemmer('finnish')
@app.route("/")
def index():
return send_from_directory( 'static' , 'index.html' )
@app.route('/topicmodel', methods=['POST'] )
def analyze():
path1 = request.form['url'].split('/')[-1]
path = '1296808743968/' + path1
comments = collect_hs.comment( path )
_texts = []
for c in comments:
text = nltk.word_tokenize( c['text'] )
text = map( lambda x: stem.stem( x ) , text )
_texts.append( ' '.join( text ) )
tf_vectorizer = CountVectorizer(
max_df=0.95,
min_df=2,
max_features= 10000 )
texts = tf_vectorizer.fit_transform( _texts )
## test between 2 and 20 topics
topics = {}
for k in range(2, 21):
model = LatentDirichletAllocation(
n_topics= k ,
max_iter=5,
learning_method='online',
learning_offset=50.,
random_state=0
)
fit = model.fit( texts )
ll = model.score( texts )
topics[ ll ] = fit
topic = max( topics.keys() )
ret = collections.defaultdict( list )
## ugly, rewrite some day
new_topics = topics[ topic ].transform( texts )
for i, topic in enumerate( new_topics ):
topic = numpy.argmax( topic )
text = _texts[ i ].encode('utf8')
print text
ret[ topic ].append( text )
return jsonify( ret )
if __name__ == "__main__":
app.run( debug = True)
|
mit
|
Python
|
1056c3f489b162d77b6c117fad2b45bfa06beee1
|
Revert "Added a post view"
|
yourbuddyconner/cs399-social,yourbuddyconner/cs399-social
|
app/urls.py
|
app/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'),
url(r'^admin/', include(admin.site.urls))
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from . import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'app.views.splash', name='splash'),
url(r'^feed', 'app.views.feed', name='feed'),
url(r'^about', 'app.views.about', name='about'),
url(r'^explore', 'app.views.explore', name='explore'),
url(r'^profile_picture', 'app.views.profile_picture', name='profile_picture'),
url(r'^dashboard', 'app.views.dashboard', name='dashboard'),
url(r'^login', 'app.views.login', name='login'),
url(r'^logout', 'app.views.logout', name='logout'),
url(r'^temp', 'app.views.temp', name='temp'), #delete eventually
url(r'^posts', 'app.views.posts', name='posts'),
url(r'^admin/', include(admin.site.urls))
)
|
unlicense
|
Python
|
1165c923145be18d40fda1fc4303cac3e1613078
|
Update cached_function wrapper to set qualname instead of name
|
albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com
|
app/util.py
|
app/util.py
|
# Various utility functions
import os
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func):
data = {}
def wrapper(*args):
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
|
# Various utility functions
import os
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func):
data = {}
def wrapper(*args):
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__name__ = func.__name__
return wrapper
|
mit
|
Python
|
d39eb13f555daa429838b76de2f4088a46f36237
|
tweak `do`
|
tek/amino
|
amino/do.py
|
amino/do.py
|
from types import GeneratorType
from typing import TypeVar, Callable, Any, Generator, cast, Type
import functools
from amino.tc.base import F
from amino.tc.monad import Monad
A = TypeVar('A')
B = TypeVar('B')
G = TypeVar('G', bound=F)
Do = Generator
def untyped_do(f: Callable[..., Generator[G, B, None]]) -> Callable[..., G]:
@functools.wraps(f)
def do_loop(*a: Any, **kw: Any) -> F[B]:
itr = f(*a, **kw)
if not isinstance(itr, GeneratorType):
raise Exception(f'function `{f.__qualname__}` decorated with `do` does not produce a generator')
init = itr.send(None)
m = Monad.fatal_for(init)
def send(val: B) -> F[B]:
try:
return itr.send(val).flat_map(send)
except StopIteration:
nonlocal m
return m.pure(val)
return init.flat_map(send)
return do_loop
def do(tpe: Type[A]) -> Callable[[Callable[..., Generator]], Callable[..., A]]:
def deco(f: Callable[..., Generator]) -> Callable[..., A]:
return cast(Callable[[Callable[..., Generator]], Callable[..., A]], untyped_do)(f)
return deco
tdo = do
__all__ = ('do', 'F', 'tdo', 'untyped_do', 'Do')
|
from types import GeneratorType
from typing import TypeVar, Callable, Any, Generator, cast, Optional, Type
import functools
from amino.tc.base import F
from amino.tc.monad import Monad
A = TypeVar('A')
B = TypeVar('B')
G = TypeVar('G', bound=F)
Do = Generator
def untyped_do(f: Callable[..., Generator[G, B, None]]) -> Callable[..., G]:
@functools.wraps(f)
def do_loop(*a: Any, **kw: Any) -> F[B]:
itr = f(*a, **kw)
if not isinstance(itr, GeneratorType):
raise Exception(f'function `{f.__qualname__}` decorated with `do` does not produce a generator')
c: Optional[F] = None
m: Optional[Monad[F]] = None
def send(val: B) -> F[B]:
nonlocal c, m
try:
c = itr.send(val)
if m is None:
m = Monad.fatal_for(c)
return c.flat_map(send)
except StopIteration:
return m.pure(val)
return send(cast(B, None))
return do_loop
def tdo(tpe: Type[A]) -> Callable[[Callable[..., Generator]], Callable[..., A]]:
def deco(f: Callable[..., Generator]) -> Callable[..., A]:
return cast(Callable[[Callable[..., Generator]], Callable[..., A]], untyped_do)(f)
return deco
do = tdo
__all__ = ('do', 'F', 'tdo', 'untyped_do', 'Do')
|
mit
|
Python
|
98a82f084c6693dbd7cd44774f52e1bbdd835d05
|
Fix urls.py
|
rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug
|
rdmo/projects/urls/v1.py
|
rdmo/projects/urls/v1.py
|
from django.urls import include, path
from rest_framework_extensions.routers import ExtendedDefaultRouter
from ..viewsets import (CatalogViewSet, MembershipViewSet,
ProjectMembershipViewSet, ProjectQuestionSetViewSet,
ProjectSnapshotViewSet, ProjectValueViewSet,
ProjectViewSet, QuestionSetViewSet, SnapshotViewSet,
ValueViewSet)
app_name = 'v1-projects'
router = ExtendedDefaultRouter()
project_route = router.register(r'projects', ProjectViewSet, basename='project')
project_route.register(r'memberships', ProjectMembershipViewSet, basename='project-membership',
parents_query_lookups=['project'])
project_route.register(r'snapshots', ProjectSnapshotViewSet, basename='project-snapshot',
parents_query_lookups=['project'])
project_route.register(r'values', ProjectValueViewSet, basename='project-value',
parents_query_lookups=['project'])
project_route.register(r'questionsets', ProjectQuestionSetViewSet, basename='project-questionset',
parents_query_lookups=['project'])
router.register(r'memberships', MembershipViewSet, basename='membership')
router.register(r'snapshots', SnapshotViewSet, basename='snapshot')
router.register(r'values', ValueViewSet, basename='value')
router.register(r'questionsets', QuestionSetViewSet, basename='questionset')
router.register(r'catalogs', CatalogViewSet, basename='catalog')
urlpatterns = [
path('', include(router.urls)),
]
|
from django.urls import include, path
from rest_framework_extensions.routers import ExtendedDefaultRouter
from ..viewsets import (ProjectQuestionSetViewSet, ProjectSnapshotViewSet, ProjectMembershipViewSet,
ProjectValueViewSet, ProjectViewSet, MembershipViewSet, SnapshotViewSet,
ValueViewSet)
app_name = 'v1-projects'
router = ExtendedDefaultRouter()
project_route = router.register(r'projects', ProjectViewSet, basename='project')
project_route.register(r'memberships', ProjectMembershipViewSet, basename='project-membership',
parents_query_lookups=['project'])
project_route.register(r'snapshots', ProjectSnapshotViewSet, basename='project-snapshot',
parents_query_lookups=['project'])
project_route.register(r'values', ProjectValueViewSet, basename='project-value',
parents_query_lookups=['project'])
project_route.register(r'questionsets', ProjectQuestionSetViewSet, basename='project-questionset',
parents_query_lookups=['project'])
router.register(r'memberships', MembershipViewSet, basename='membership')
router.register(r'snapshots', SnapshotViewSet, basename='snapshot')
router.register(r'values', ValueViewSet, basename='value')
urlpatterns = [
path('', include(router.urls)),
]
|
apache-2.0
|
Python
|
8b7ef1066abefae83876607fd1a9153662463185
|
add try for obnl version loading in init
|
IntegrCiTy/obnl
|
obnl/__init__.py
|
obnl/__init__.py
|
import pkg_resources # part of setuptools
try:
__version__ = pkg_resources.require("obnl")[0].version
except:
pass
|
import pkg_resources # part of setuptools
__version__ = pkg_resources.require("obnl")[0].version
|
apache-2.0
|
Python
|
32d49946279cab868b493aae432b431fa9d5e2bc
|
Add wrap at wrap_width unless it's 0.
|
randy3k/AutoWrap
|
autowrap.py
|
autowrap.py
|
import sublime, sublime_plugin, re, sys
if sys.version >= '3':
long = int
class AutoWrapListener(sublime_plugin.EventListener):
saved_sel = 0
def on_modified(self, view):
if view.is_scratch() or view.settings().get('is_widget'): return
if not view.settings().get('auto_wrap', False): return
sel = view.sel()
if not sel or len(sel)>1 or sel[0].begin()!=sel[0].end(): return
wrap_width = view.settings().get('wrap_width')
if not wrap_width or wrap_width == 0:
rulers = view.settings().get('rulers')
if rulers:
wrap_width = rulers[0]
else:
wrap_width = 80
pt = sel[0].end()
if pt<=self.saved_sel or pt-self.saved_sel>1 or view.rowcol(pt)[1]<=wrap_width \
or view.substr(pt-1)==" ":
activate = False
else: activate = True
self.saved_sel = sel[0].end()
if not activate: return
# to obtain the insert point
line = view.substr(view.line(pt))
m = re.match('.*\s(\S*\s*)$',line)
if not m: return
insertpt = view.line(pt).end()-len(m.group(1))
if pt<insertpt: return
if view.settings().get("wrap_style") != "classic" and view.rowcol(insertpt)[1]<=wrap_width:
return
# insert enter
view.run_command('auto_wrap_insert', {'insertpt': insertpt})
if view.settings().get('auto_indent'):
view.run_command('reindent', {'force_indent': False})
class AutoWrapInsertCommand(sublime_plugin.TextCommand):
def run(self, edit, insertpt):
self.view.insert(edit, long(insertpt), "\n")
class ToggleAutoWrap(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
view.settings().set("auto_wrap", not view.settings().get("auto_wrap", False))
onoff = "on" if view.settings().get("auto_wrap") else "off"
sublime.status_message("Auto (Hard) Wrap %s" % onoff)
|
import sublime, sublime_plugin, re, sys
if sys.version >= '3':
long = int
class AutoWrapListener(sublime_plugin.EventListener):
saved_sel = 0
def on_modified(self, view):
if view.is_scratch() or view.settings().get('is_widget'): return
if not view.settings().get('auto_wrap', False): return
sel = view.sel()
if not sel or len(sel)>1 or sel[0].begin()!=sel[0].end(): return
rulers = view.settings().get('rulers')
if not rulers: rulers = [80]
pt = sel[0].end()
if pt<=self.saved_sel or pt-self.saved_sel>1 or view.rowcol(pt)[1]<=rulers[0] \
or view.substr(pt-1)==" ":
activate = False
else: activate = True
self.saved_sel = sel[0].end()
if not activate: return
# to obtain the insert point
line = view.substr(view.line(pt))
m = re.match('.*\s(\S*\s*)$',line)
if not m: return
insertpt = view.line(pt).end()-len(m.group(1))
if pt<insertpt: return
if view.settings().get("wrap_style") != "classic" and view.rowcol(insertpt)[1]<=rulers[0]:
return
# insert enter
view.run_command('auto_wrap_insert', {'insertpt': insertpt})
if view.settings().get('auto_indent'):
view.run_command('reindent', {'force_indent': False})
class AutoWrapInsertCommand(sublime_plugin.TextCommand):
def run(self, edit, insertpt):
self.view.insert(edit, long(insertpt), "\n")
class ToggleAutoWrap(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
view.settings().set("auto_wrap", not view.settings().get("auto_wrap", False))
onoff = "on" if view.settings().get("auto_wrap") else "off"
sublime.status_message("Auto (Hard) Wrap %s" % onoff)
|
mit
|
Python
|
0f85b39fcca84b60815c54201f5f52eb9a2840c7
|
Split the normalize function into two.
|
eliteraspberries/avena
|
avena/np.py
|
avena/np.py
|
#!/usr/bin/env python2
from numpy import around, empty as _empty, mean, std
from numpy import int8, int16, int32, int64
from numpy import uint8, uint16, uint32, uint64
from numpy import float32, float64
from sys import float_info as _float_info
_eps = 10.0 * _float_info.epsilon
# Map of NumPy array type strings to types
_np_dtypes = {
'int8': int8,
'int16': int16,
'int32': int32,
'int64': int64,
'uint8': uint8,
'uint16': uint16,
'uint32': uint32,
'uint64': uint64,
'float32': float32,
'float64': float64,
}
def from_uint8(array, dtype):
new_array = array.astype(dtype)
return new_array
def to_uint8(array):
uint8_array = _empty(array.shape, dtype=uint8)
around(array * 255, out=uint8_array)
return uint8_array
def clip(array, (min, max)):
'''Clip the values of an array to the given interval.'''
x = array < min + _eps
y = array > max - _eps
array[x] = min
array[y] = max
return
def normalize(array):
'''Normalize an array to the interval [0,1].'''
mu = mean(array)
rho2 = std(array)
min = mu - 3.0 * rho2
max = mu + 3.0 * rho2
array -= min
array /= max - min
return
if __name__ == '__main__':
pass
|
#!/usr/bin/env python2
from numpy import around, empty as _empty, mean, std
from numpy import int8, int16, int32, int64
from numpy import uint8, uint16, uint32, uint64
from numpy import float32, float64
from sys import float_info as _float_info
_eps = 10.0 * _float_info.epsilon
# Map of NumPy array type strings to types
_np_dtypes = {
'int8': int8,
'int16': int16,
'int32': int32,
'int64': int64,
'uint8': uint8,
'uint16': uint16,
'uint32': uint32,
'uint64': uint64,
'float32': float32,
'float64': float64,
}
def from_uint8(array, dtype):
new_array = array.astype(dtype)
return new_array
def to_uint8(array):
uint8_array = _empty(array.shape, dtype=uint8)
around(array * 255, out=uint8_array)
return uint8_array
def normalize(array):
'''Normalize an array to the interval [0,1].'''
mu = mean(array)
rho2 = std(array)
min = mu - 3.0 * rho2
max = mu + 3.0 * rho2
array -= min
array /= max - min
negs = array < 0.0 + _eps
array[negs] = 0.0
bigs = array > 1.0 - _eps
array[bigs] = 1.0
return
if __name__ == '__main__':
pass
|
isc
|
Python
|
bdcafd0c5af46e88ae06e6bbb853d415a30f8d26
|
test algo affine
|
neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox,neuropoly/spinalcordtoolbox
|
testing/test_sct_register_multimodal.py
|
testing/test_sct_register_multimodal.py
|
#!/usr/bin/env python
#########################################################################################
#
# Test function for sct_register_multimodal script
#
# replace the shell test script in sct 1.0
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Augustin Roux
# modified: 2014/09/28
#
# About the license: see the file LICENSE.TXT
#########################################################################################
#import sct_utils as sct
import commands
def test(path_data):
folder_data = 'mt/'
file_data = ['mt0.nii.gz', 'mt1.nii.gz']
output = ''
status = 0
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=syn,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
# check other method
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=slicereg,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
# check other method
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=affine,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
return status, output
if __name__ == "__main__":
# call main function
test()
|
#!/usr/bin/env python
#########################################################################################
#
# Test function for sct_register_multimodal script
#
# replace the shell test script in sct 1.0
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Augustin Roux
# modified: 2014/09/28
#
# About the license: see the file LICENSE.TXT
#########################################################################################
#import sct_utils as sct
import commands
def test(path_data):
folder_data = 'mt/'
file_data = ['mt0.nii.gz', 'mt1.nii.gz']
output = ''
status = 0
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=syn,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
# check other method
cmd = 'sct_register_multimodal -i ' + path_data + folder_data + file_data[0] \
+ ' -d ' + path_data + folder_data + file_data[1] \
+ ' -o data_reg.nii.gz' \
+ ' -p step=1,algo=slicereg,iter=1,smooth=0,shrink=4,metric=MeanSquares' \
+ ' -x linear' \
+ ' -r 0' \
+ ' -v 1'
output += cmd+'\n' # copy command
s, o = commands.getstatusoutput(cmd)
status += s
output += o
return status, output
if __name__ == "__main__":
# call main function
test()
|
mit
|
Python
|
1e704b4ac648d06a05d8c97e3ca38b64ea931c0a
|
Fix version number
|
lordappsec/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe
|
ooni/__init__.py
|
ooni/__init__.py
|
# -*- encoding: utf-8 -*-
__author__ = "Arturo Filastò"
__version__ = "1.0.0-rc5"
__all__ = ['config', 'inputunit', 'kit',
'lib', 'nettest', 'oonicli', 'reporter',
'templates', 'utils']
|
# -*- encoding: utf-8 -*-
__author__ = "Arturo Filastò"
__version__ = "1.0.0-rc3"
__all__ = ['config', 'inputunit', 'kit',
'lib', 'nettest', 'oonicli', 'reporter',
'templates', 'utils']
|
bsd-2-clause
|
Python
|
0c244f0b295785378c85dfdf7a70c238d0a4f20b
|
Add a warning to prevent people from running nipy from the source directory.
|
alexis-roche/nipy,alexis-roche/nireg,alexis-roche/nipy,nipy/nipy-labs,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/register,arokem/nipy,arokem/nipy,nipy/nireg,alexis-roche/niseg,bthirion/nipy,alexis-roche/register,arokem/nipy,alexis-roche/register,arokem/nipy,nipy/nireg,nipy/nipy-labs,bthirion/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,bthirion/nipy
|
neuroimaging/__init__.py
|
neuroimaging/__init__.py
|
# -*- coding: utf-8 -*-
"""
Neuroimaging tools for Python (NIPY).
The aim of NIPY is to produce a platform-independent Python environment for
the analysis of brain imaging data using an open development model.
While
the project is still in its initial stages, packages for file I/O, script
support as well as single subject fMRI and random effects group comparisons
model are currently available.
Specifically, we aim to:
1. Provide an open source, mixed language scientific programming
environment suitable for rapid development.
2. Create sofware components in this environment to make it easy
to develop tools for MRI, EEG, PET and other modalities.
3. Create and maintain a wide base of developers to contribute to
this platform.
4. To maintain and develop this framework as a single, easily
installable bundle.
Package Organization
====================
The neuroimaging package contains the following subpackages and modules:
.. packagetree::
:style: UML
"""
__docformat__ = 'restructuredtext en'
from version import version as __version__
# FIXME
#__revision__ = int("$Rev$".split()[-2])
__status__ = 'alpha'
__date__ = "$LastChangedDate$"
__url__ = 'http://neuroimaging.scipy.org'
packages = (
'neuroimaging',
'neuroimaging.algorithms',
'neuroimaging.algorithms.tests',
'neuroimaging.algorithms.statistics',
'neuroimaging.algorithms.statistics.tests',
'neuroimaging.core',
'neuroimaging.core.image',
'neuroimaging.core.image.tests',
'neuroimaging.core.reference',
'neuroimaging.core.reference.tests',
'neuroimaging.io',
'neuroimaging.io.tests',
'neuroimaging.modalities',
'neuroimaging.modalities.fmri',
'neuroimaging.modalities.fmri.tests',
'neuroimaging.modalities.fmri.fmristat',
'neuroimaging.modalities.fmri.fmristat.tests',
'neuroimaging.utils',
'neuroimaging.utils.tests',
'neuroimaging.utils.tests.data',
'neuroimaging.testing')
def import_from(modulename, objectname):
"""Import and return objectname from modulename."""
module = __import__(modulename, {}, {}, (objectname,))
try:
return getattr(module, objectname)
except AttributeError:
return None
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
def _test_local_install():
""" Warn the user that running with neuroimaging being
imported locally is a bad idea.
"""
import os
if os.getcwd() == os.sep.join(
os.path.abspath(__file__).split(os.sep)[:-2]):
import warnings
warnings.warn('Running the tests from the install directory may '
'trigger some failures')
_test_local_install()
|
# -*- coding: utf-8 -*-
"""
Neuroimaging tools for Python (NIPY).
The aim of NIPY is to produce a platform-independent Python environment for
the analysis of brain imaging data using an open development model.
While
the project is still in its initial stages, packages for file I/O, script
support as well as single subject fMRI and random effects group comparisons
model are currently available.
Specifically, we aim to:
1. Provide an open source, mixed language scientific programming
environment suitable for rapid development.
2. Create sofware components in this environment to make it easy
to develop tools for MRI, EEG, PET and other modalities.
3. Create and maintain a wide base of developers to contribute to
this platform.
4. To maintain and develop this framework as a single, easily
installable bundle.
Package Organization
====================
The neuroimaging package contains the following subpackages and modules:
.. packagetree::
:style: UML
"""
__docformat__ = 'restructuredtext en'
from version import version as __version__
# FIXME
#__revision__ = int("$Rev$".split()[-2])
__status__ = 'alpha'
__date__ = "$LastChangedDate$"
__url__ = 'http://neuroimaging.scipy.org'
packages = (
'neuroimaging',
'neuroimaging.algorithms',
'neuroimaging.algorithms.tests',
'neuroimaging.algorithms.statistics',
'neuroimaging.algorithms.statistics.tests',
'neuroimaging.core',
'neuroimaging.core.image',
'neuroimaging.core.image.tests',
'neuroimaging.core.reference',
'neuroimaging.core.reference.tests',
'neuroimaging.io',
'neuroimaging.io.tests',
'neuroimaging.modalities',
'neuroimaging.modalities.fmri',
'neuroimaging.modalities.fmri.tests',
'neuroimaging.modalities.fmri.fmristat',
'neuroimaging.modalities.fmri.fmristat.tests',
'neuroimaging.utils',
'neuroimaging.utils.tests',
'neuroimaging.utils.tests.data',
'neuroimaging.testing')
def import_from(modulename, objectname):
"""Import and return objectname from modulename."""
module = __import__(modulename, {}, {}, (objectname,))
try:
return getattr(module, objectname)
except AttributeError:
return None
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
bsd-3-clause
|
Python
|
0b6ced2e048d4538db68abe356b8a4719a830fa0
|
Check the needed env vars are provided to the backfill script
|
AppliedTrust/traildash,AppliedTrust/traildash,AppliedTrust/traildash,AppliedTrust/traildash
|
backfill.py
|
backfill.py
|
#!/usr/bin/env python
import json
from os import environ
import boto3
if not all([environ.get('AWS_S3_BUCKET'), environ.get('AWS_SQS_URL')]):
print('You have to specify the AWS_S3_BUCKET and AWS_SQS_URL environment variables.')
print('Check the "Backfilling data" section of the README file for more info.')
exit(1)
bucket = boto3.resource('s3').Bucket(environ.get('AWS_S3_BUCKET'))
queue = boto3.resource('sqs').Queue(environ.get('AWS_SQS_URL'))
items_queued = 0
for item in bucket.objects.all():
if not item.key.endswith('.json.gz'):
continue
queue.send_message(
MessageBody=json.dumps({
'Message': json.dumps({
's3Bucket': environ.get('AWS_S3_BUCKET'),
's3ObjectKey': [item.key]
})
})
)
items_queued += 1
print('Done! {} items were backfilled'.format(items_queued))
|
#!/usr/bin/env python
import json
from os import environ
import boto3
bucket = boto3.resource('s3').Bucket(environ.get('AWS_S3_BUCKET'))
queue = boto3.resource('sqs').Queue(environ.get('AWS_SQS_URL'))
items_queued = 0
for item in bucket.objects.all():
if not item.key.endswith('.json.gz'):
continue
queue.send_message(
MessageBody=json.dumps({
'Message': json.dumps({
's3Bucket': environ.get('AWS_S3_BUCKET'),
's3ObjectKey': [item.key]
})
})
)
items_queued += 1
print('Done! {} items were backfilled'.format(items_queued))
|
bsd-2-clause
|
Python
|
d16cdad0fd12dcab26d670e83a746fede085d085
|
fix the test .tac to use new createService arguments
|
jeroenh/OpenNSA,jab1982/opennsa,NORDUnet/opennsa,jeroenh/OpenNSA,NORDUnet/opennsa,jeroenh/OpenNSA,NORDUnet/opennsa,jab1982/opennsa
|
opennsa-test.tac
|
opennsa-test.tac
|
#!/usr/bin/env python # syntax highlightning
import os, sys
from twisted.python import log
from twisted.python.log import ILogObserver
from twisted.application import internet, service
from opennsa import setup, registry, logging
from opennsa.backends import dud
from opennsa.topology import gole
DEBUG = False
PROFILE = False
TOPOLOGY = 'test-topology.owl'
MAPPING = 'test-mapping.nrm'
HOST = 'localhost'
SERVICES = [ ('Aruba', 9080), ('Bonaire', 9081), ('Curacao',9082) ]
WSDL_DIR = os.path.join(os.getcwd(), 'wsdl')
## Log messages before "real" logging infrastructure comes up
#earlyObserver = logging.EarlyObserver()
#log.startLoggingWithObserver(earlyObserver.emit, setStdout=0)
#log.defaultObserver = earlyObserver # This will make the log system plug it out when the real logging starts
logObserver = logging.DebugLogObserver(sys.stdout, DEBUG, PROFILE)
application = service.Application("OpenNSA")
application.setComponent(ILogObserver, logObserver.emit)
topo, _ = gole.parseTopology( [ open(TOPOLOGY) ], open(MAPPING))
for network, port in SERVICES:
backend = dud.DUDNSIBackend(network)
factory = setup.createService(network, backend, topo, HOST, port, WSDL_DIR)
internet.TCPServer(port, factory, interface='localhost').setServiceParent(application)
|
#!/usr/bin/env python # syntax highlightning
import os, sys
from twisted.python import log
from twisted.python.log import ILogObserver
from twisted.application import internet, service
from opennsa import setup, registry, logging
from opennsa.backends import dud
from opennsa.topology import gole
DEBUG = False
PROFILE = False
TOPOLOGY = 'test-topology.owl'
MAPPING = 'test-mapping.nrm'
HOST = 'localhost'
SERVICES = [ ('Aruba', 9080), ('Bonaire', 9081), ('Curacao',9082) ]
WSDL_DIR = os.path.join(os.getcwd(), 'wsdl')
## Log messages before "real" logging infrastructure comes up
#earlyObserver = logging.EarlyObserver()
#log.startLoggingWithObserver(earlyObserver.emit, setStdout=0)
#log.defaultObserver = earlyObserver # This will make the log system plug it out when the real logging starts
logObserver = logging.DebugLogObserver(sys.stdout, DEBUG, PROFILE)
application = service.Application("OpenNSA")
application.setComponent(ILogObserver, logObserver.emit)
topo, _ = gole.parseTopology( [ open(TOPOLOGY) ], open(MAPPING))
for network, port in SERVICES:
backend = dud.DUDNSIBackend(network)
es = registry.ServiceRegistry()
factory = setup.createService(network, topo, backend, es, HOST, port, WSDL_DIR)
internet.TCPServer(port, factory, interface='localhost').setServiceParent(application)
|
bsd-3-clause
|
Python
|
f662fafd2f69d64306ab89a1360a3cadda072b59
|
clean up pylint ignores to be more specific
|
lucidfrontier45/ScalaFunctional,EntilZha/PyFunctional,EntilZha/ScalaFunctional,ChuyuHsu/ScalaFunctional,EntilZha/ScalaFunctional,lucidfrontier45/ScalaFunctional,ChuyuHsu/ScalaFunctional,EntilZha/PyFunctional
|
functional/util.py
|
functional/util.py
|
# pylint: disable=no-name-in-module,unused-import
import collections
import six
import builtins
if six.PY2:
from itertools import ifilterfalse as filterfalse
def dict_item_iter(dictionary):
return dictionary.viewitems()
else:
from itertools import filterfalse
def dict_item_iter(dictionary):
return dictionary.items()
def is_primitive(val):
"""
Checks if the passed value is a primitive type.
>>> is_primitive(1)
True
>>> is_primitive("abc")
True
>>> is_primitive(True)
True
>>> is_primitive({})
False
>>> is_primitive([])
False
>>> is_primitive(set([]))
:param val: value to check
:return: True if value is a primitive, else False
"""
return isinstance(val, str) \
or isinstance(val, bool) \
or isinstance(val, six.string_types + (six.text_type,)) \
or isinstance(val, six.integer_types) \
or isinstance(val, float) \
or isinstance(val, complex) \
or isinstance(val, bytes)
def is_iterable(val):
if isinstance(val, list):
return False
return isinstance(val, collections.Iterable)
class LazyFile(object):
# pylint: disable=too-few-public-methods,too-many-instance-attributes
def __init__(self, path, delimiter=None, mode='r', buffering=-1, encoding=None,
errors=None, newline=None):
# pylint: disable=too-many-arguments
self.path = path
self.delimiter = delimiter
self.mode = mode
self.buffering = buffering
self.encoding = encoding
self.errors = errors
self.newline = newline
self.file = None
def __iter__(self):
if self.file is not None:
self.file.close()
self.file = builtins.open(self.path, mode=self.mode, buffering=self.buffering,
encoding=self.encoding, errors=self.errors, newline=self.newline)
return self
def next(self):
line = self.file.readline()
if line:
return line
else:
self.file.close()
raise StopIteration
def __next__(self):
return self.next()
|
# pylint: disable=no-name-in-module,unused-import,too-many-instance-attributes,too-many-arguments, too-few-public-methods
import collections
import six
import builtins
if six.PY2:
from itertools import ifilterfalse as filterfalse
def dict_item_iter(dictionary):
return dictionary.viewitems()
else:
from itertools import filterfalse
def dict_item_iter(dictionary):
return dictionary.items()
def is_primitive(val):
"""
Checks if the passed value is a primitive type.
>>> is_primitive(1)
True
>>> is_primitive("abc")
True
>>> is_primitive(True)
True
>>> is_primitive({})
False
>>> is_primitive([])
False
>>> is_primitive(set([]))
:param val: value to check
:return: True if value is a primitive, else False
"""
return isinstance(val, str) \
or isinstance(val, bool) \
or isinstance(val, six.string_types + (six.text_type,)) \
or isinstance(val, six.integer_types) \
or isinstance(val, float) \
or isinstance(val, complex) \
or isinstance(val, bytes)
def is_iterable(val):
if isinstance(val, list):
return False
return isinstance(val, collections.Iterable)
class LazyFile(object):
def __init__(self, path, delimiter=None, mode='r', buffering=-1, encoding=None,
errors=None, newline=None):
self.path = path
self.delimiter = delimiter
self.mode = mode
self.buffering = buffering
self.encoding = encoding
self.errors = errors
self.newline = newline
self.file = None
def __iter__(self):
if self.file is not None:
self.file.close()
self.file = builtins.open(self.path, mode=self.mode, buffering=self.buffering,
encoding=self.encoding, errors=self.errors, newline=self.newline)
return self
def next(self):
line = self.file.readline()
if line:
return line
else:
self.file.close()
raise StopIteration
def __next__(self):
return self.next()
|
mit
|
Python
|
4e74723aac53956fb0316ae0d438da623de133d5
|
Add and update tests for video renderer
|
felliott/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,CenterForOpenScience/modular-file-renderer,CenterForOpenScience/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,felliott/modular-file-renderer
|
tests/extensions/video/test_renderer.py
|
tests/extensions/video/test_renderer.py
|
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234',
'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
assert '<style>body{margin:0;padding:0;}</style>' in ''.join(body.split())
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
import pytest
from mfr.core.provider import ProviderMetadata
from mfr.extensions.video import VideoRenderer
@pytest.fixture
def metadata():
return ProviderMetadata('test', '.mp4', 'text/plain', '1234', 'http://wb.osf.io/file/test.mp4?token=1234')
@pytest.fixture
def file_path():
return '/tmp/test.mp4'
@pytest.fixture
def url():
return 'http://osf.io/file/test.mp4'
@pytest.fixture
def assets_url():
return 'http://mfr.osf.io/assets'
@pytest.fixture
def export_url():
return 'http://mfr.osf.io/export?url=' + url()
@pytest.fixture
def renderer(metadata, file_path, url, assets_url, export_url):
return VideoRenderer(metadata, file_path, url, assets_url, export_url)
class TestVideoRenderer:
def test_render_video(self, renderer, url):
body = renderer.render()
assert '<video controls' in body
assert 'src="{}"'.format(metadata().download_url) in body
def test_render_video_file_required(self, renderer):
assert renderer.file_required is False
def test_render_video_cache_result(self, renderer):
assert renderer.cache_result is False
|
apache-2.0
|
Python
|
e1c359fab8c351c77556e34731cd677b4c0cc99b
|
Update mono to 4.0.1
|
BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
packages/mono.py
|
packages/mono.py
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '4.0.1',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-nls=no',
'--enable-quiet-build'
]
)
if Package.profile.name == 'darwin' and not Package.profile.m64:
self.configure_flags.extend ([
# fix build on lion, it uses 64-bit host even with -m32
'--build=i386-apple-darwin11.2.0',
])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '4.0.0',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-nls=no',
'--enable-quiet-build'
]
)
if Package.profile.name == 'darwin' and not Package.profile.m64:
self.configure_flags.extend ([
# fix build on lion, it uses 64-bit host even with -m32
#'--build=i386-apple-darwin11.2.0',
])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
mit
|
Python
|
7e627a16c85a9ffa88833176201351908a5458c2
|
Fix (#795)
|
stripe/stripe-python
|
stripe/api_resources/terminal/reader.py
|
stripe/api_resources/terminal/reader.py
|
# File generated from our OpenAPI spec
from __future__ import absolute_import, division, print_function
from stripe import util
from stripe.api_resources.abstract import APIResourceTestHelpers
from stripe.api_resources.abstract import CreateableAPIResource
from stripe.api_resources.abstract import DeletableAPIResource
from stripe.api_resources.abstract import ListableAPIResource
from stripe.api_resources.abstract import UpdateableAPIResource
from stripe.api_resources.abstract import custom_method
from stripe.api_resources.abstract import test_helpers
@test_helpers
@custom_method("cancel_action", http_verb="post")
@custom_method("process_payment_intent", http_verb="post")
@custom_method("process_setup_intent", http_verb="post")
@custom_method("set_reader_display", http_verb="post")
class Reader(
CreateableAPIResource,
DeletableAPIResource,
ListableAPIResource,
UpdateableAPIResource,
):
OBJECT_NAME = "terminal.reader"
def cancel_action(self, idempotency_key=None, **params):
url = self.instance_url() + "/cancel_action"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_payment_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_payment_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_setup_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_setup_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def set_reader_display(self, idempotency_key=None, **params):
url = self.instance_url() + "/set_reader_display"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
@custom_method("present_payment_method", http_verb="post")
class TestHelpers(APIResourceTestHelpers):
def present_payment_method(self, idempotency_key=None, **params):
url = self.instance_url() + "/present_payment_method"
headers = util.populate_headers(idempotency_key)
self.resource.refresh_from(
self.resource.request("post", url, params, headers)
)
return self.resource
|
# File generated from our OpenAPI spec
from __future__ import absolute_import, division, print_function
from stripe import util
from stripe.api_resources.abstract import APIResourceTestHelpers
from stripe.api_resources.abstract import CreateableAPIResource
from stripe.api_resources.abstract import DeletableAPIResource
from stripe.api_resources.abstract import ListableAPIResource
from stripe.api_resources.abstract import UpdateableAPIResource
from stripe.api_resources.abstract import custom_method
from stripe.api_resources.abstract import test_helpers
@test_helpers
@custom_method("cancel_action", http_verb="post")
@custom_method("process_payment_intent", http_verb="post")
@custom_method("process_setup_intent", http_verb="post")
@custom_method("set_reader_display", http_verb="post")
class Reader(
CreateableAPIResource,
DeletableAPIResource,
ListableAPIResource,
UpdateableAPIResource,
):
OBJECT_NAME = "terminal.reader"
def cancel_action(self, idempotency_key=None, **params):
url = self.instance_url() + "/cancel_action"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_payment_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_payment_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def process_setup_intent(self, idempotency_key=None, **params):
url = self.instance_url() + "/process_setup_intent"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
def set_reader_display(self, idempotency_key=None, **params):
url = self.instance_url() + "/set_reader_display"
headers = util.populate_headers(idempotency_key)
self.refresh_from(self.request("post", url, params, headers))
return self
@custom_method("present_payment_method", http_verb="post")
class TestHelpers(APIResourceTestHelpers):
def present_payment_method(self, idempotency_key=None, **params):
url = self.instance_url() + "/present_payment_method"
headers = util.populate_headers(idempotency_key)
resp = self.resource.request("post", url, params, headers)
stripe_object = util.convert_to_stripe_object(resp)
return stripe_object
|
mit
|
Python
|
38de1280ff97d468dcb0214e6c1037ee12d9676b
|
Add another action
|
entering/suoreach,entering/suoreach,entering/suoreach
|
dashboard/controllers.py
|
dashboard/controllers.py
|
import cherrypy
class Dashboard:
@cherrypy.expose
def index(self):
return "Dashboard!"
@cherrypy.expose
def edit(self, number):
return "Dashboard edit " + number
|
import cherrypy
class Dashboard:
@cherrypy.expose
def index(self):
return "Dashboard!"
|
mit
|
Python
|
cd3f94c7574825812d4e0fea6fda20f9e4432495
|
Test GetApplication
|
GNOME/at-spi2-core,GNOME/at-spi2-core,GNOME/at-spi2-core
|
tests/registryd/test_root_accessible.py
|
tests/registryd/test_root_accessible.py
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
import pytest
import dbus
from utils import get_property, check_unknown_property_yields_error
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
ATSPI_ROLE_DESKTOP_FRAME = 14 # see atspi-constants.h
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_unknown_property_yields_error(registry_root, session_manager):
check_unknown_property_yields_error(registry_root, ACCESSIBLE_IFACE)
def test_root_get_interfaces(registry_root, session_manager):
ifaces = registry_root.GetInterfaces(dbus_interface=ACCESSIBLE_IFACE)
assert ifaces.signature == 's'
assert 'org.a11y.atspi.Accessible' in ifaces
assert 'org.a11y.atspi.Application' in ifaces
assert 'org.a11y.atspi.Component' in ifaces
assert 'org.a11y.atspi.Socket' in ifaces
def test_root_get_index_in_parent(registry_root, session_manager):
# The registry root is always index 0
assert registry_root.GetIndexInParent(dbus_interface=ACCESSIBLE_IFACE) == 0
def test_root_get_relation_set(registry_root, session_manager):
# The registry root has an empty relation set
assert len(registry_root.GetRelationSet(dbus_interface=ACCESSIBLE_IFACE)) == 0
def test_root_get_role(registry_root, session_manager):
# Hardcoded to ATSPI_ROLE_DESKTOP_FRAME
assert registry_root.GetRole(dbus_interface=ACCESSIBLE_IFACE) == ATSPI_ROLE_DESKTOP_FRAME
def test_root_get_role_name(registry_root, session_manager):
assert registry_root.GetRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_localized_role_name(registry_root, session_manager):
# FIXME: see the corresponding FIXME in registry.c, to actually localize this
assert registry_root.GetLocalizedRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_state(registry_root, session_manager):
assert registry_root.GetState(dbus_interface=ACCESSIBLE_IFACE) == [0, 0]
def test_root_get_attributes(registry_root, session_manager):
assert len(registry_root.GetAttributes(dbus_interface=ACCESSIBLE_IFACE)) == 0
def test_root_get_application(registry_root, session_manager):
(name, path) = registry_root.GetApplication(dbus_interface=ACCESSIBLE_IFACE)
assert path == '/org/a11y/atspi/null'
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
import pytest
import dbus
from utils import get_property, check_unknown_property_yields_error
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
ATSPI_ROLE_DESKTOP_FRAME = 14 # see atspi-constants.h
def test_accessible_iface_properties(registry_root, session_manager):
values = [
('Name', 'main'),
('Description', ''),
('Parent', ('', '/org/a11y/atspi/null')),
('ChildCount', 0),
]
for prop_name, expected in values:
assert get_property(registry_root, ACCESSIBLE_IFACE, prop_name) == expected
def test_unknown_property_yields_error(registry_root, session_manager):
check_unknown_property_yields_error(registry_root, ACCESSIBLE_IFACE)
def test_root_get_interfaces(registry_root, session_manager):
ifaces = registry_root.GetInterfaces(dbus_interface=ACCESSIBLE_IFACE)
assert ifaces.signature == 's'
assert 'org.a11y.atspi.Accessible' in ifaces
assert 'org.a11y.atspi.Application' in ifaces
assert 'org.a11y.atspi.Component' in ifaces
assert 'org.a11y.atspi.Socket' in ifaces
def test_root_get_index_in_parent(registry_root, session_manager):
# The registry root is always index 0
assert registry_root.GetIndexInParent(dbus_interface=ACCESSIBLE_IFACE) == 0
def test_root_get_relation_set(registry_root, session_manager):
# The registry root has an empty relation set
assert len(registry_root.GetRelationSet(dbus_interface=ACCESSIBLE_IFACE)) == 0
def test_root_get_role(registry_root, session_manager):
# Hardcoded to ATSPI_ROLE_DESKTOP_FRAME
assert registry_root.GetRole(dbus_interface=ACCESSIBLE_IFACE) == ATSPI_ROLE_DESKTOP_FRAME
def test_root_get_role_name(registry_root, session_manager):
assert registry_root.GetRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_localized_role_name(registry_root, session_manager):
# FIXME: see the corresponding FIXME in registry.c, to actually localize this
assert registry_root.GetLocalizedRoleName(dbus_interface=ACCESSIBLE_IFACE) == "desktop frame"
def test_root_get_state(registry_root, session_manager):
assert registry_root.GetState(dbus_interface=ACCESSIBLE_IFACE) == [0, 0]
def test_root_get_attributes(registry_root, session_manager):
assert len(registry_root.GetAttributes(dbus_interface=ACCESSIBLE_IFACE)) == 0
|
lgpl-2.1
|
Python
|
2ee9e4200c90eae9739a44cb56270d0e873907e9
|
Add more example
|
pk-python/basics
|
pandas/pandas.py
|
pandas/pandas.py
|
import pandas as pd
# Reading csv without header
inp = pd.read_csv('data.txt', header=None)
# Reading csv and set name of columns
inp = pd.read_csv('data.txt', names=['column1', 'column2'])
# Reading csv and set index
inp = pd.read_csv('data.txt', index_col=['column1'])
inp = pd.read_csv('data.txt', index_col=0)
# Reset index
inp = inp.reset_index()
inp.reset_index(inplace = True)
# Show top 5 row
inp.head(5)
# Show last 5 row
inp.tail(5)
# Retrieving particular columns by indexes, since column headers are not there
X_df = inp[inp.columns[0:2]]
# Converting dataframe to numpy ndarray
X_nd = X_df.values
|
import pandas as pd
# Reading csv without header
inp = pd.read_csv('data.txt', header=None)
# Retrieving particular columns by indexes, since column headers are not there
X_df = inp[inp.columns[0:2]]
# Converting dataframe to numpy ndarray
X_nd = X_df.values
|
mit
|
Python
|
54fab13f466d17acfa4f9b3d67d777de8d34f67f
|
Remove interdependence from get_path_category()
|
pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016
|
src/core/templatetags/pycontw_tools.py
|
src/core/templatetags/pycontw_tools.py
|
import re
from django.template import Library
register = Library()
@register.filter
def message_bootstrap_class_str(message):
return ' '.join('alert-' + tag for tag in message.tags.split(' '))
@register.filter
def get_path_category(url):
pattern = r'/(?P<lang>zh\-hant|en\-us)/(?P<category>[0-9a-z-]*)/'
result = re.match(pattern, url)
if not result:
return 'unmatched'
return result.groupdict().get('category', 'uncategorized')
|
import re
from django.template import Library
register = Library()
@register.filter
def message_bootstrap_class_str(message):
return ' '.join('alert-' + tag for tag in message.tags.split(' '))
@register.filter
def get_path_category(url):
lang = '\/(zh\-hant|en\-us)'
category_pattern_mapping = {
'about': '\/about/pycontw',
'sponsor': '\/sponsor/sponsor',
'speaking': '\/speaking\/(cfp|talk|tutorial|recording)',
'conference': '\/(events\/(overview|schedule)|portal)',
'event': '\/events\/(keynotes|talks|open-spaces)',
'registration': '\/registration/(financial-aid|ticket-info|registration)',
'venue': '\/venue'
}
end = '\/?$'
for category, pattern in category_pattern_mapping.items():
if re.match(lang + pattern + end, url):
return category
return 'uncategorized'
|
mit
|
Python
|
320a96337c55d770ed032520ecb75155e2d124e5
|
Update version
|
maxmind/GeoIP2-python,maxmind/GeoIP2-python,simudream/GeoIP2-python
|
geoip2/__init__.py
|
geoip2/__init__.py
|
#pylint:disable=C0111
__title__ = 'geoip2'
__version__ = '0.1.1'
__author__ = 'Gregory Oschwald'
__license__ = 'LGPLv2+'
__copyright__ = 'Copyright 2013 Maxmind, Inc.'
|
#pylint:disable=C0111
__title__ = 'geoip2'
__version__ = '0.1.0'
__author__ = 'Gregory Oschwald'
__license__ = 'LGPLv2+'
__copyright__ = 'Copyright 2013 Maxmind, Inc.'
|
apache-2.0
|
Python
|
01dd6198cba28623e3d2a72bc9b1f720a70112f0
|
Bump version to 0.2.1
|
geomet/geomet,larsbutler/geomet,larsbutler/geomet,geomet/geomet
|
geomet/__init__.py
|
geomet/__init__.py
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.2.1'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.2.0-2'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
apache-2.0
|
Python
|
ab8d6fc2163e7170e8d184f1321119bbcd469709
|
Update ipc_lista1.9.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.9.py
|
lista1/ipc_lista1.9.py
|
#ipc_lista1.9
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça a temperatura em graus Fahrenheit, transforme e mostre a temperatura em graus Celsius.
|
#ipc_lista1.9
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça a temperatura em graus Fahrenheit, transforme e mostre a temperatura em graus Celsius.
|
apache-2.0
|
Python
|
c9ba6d141e356b48caf1820a309e554f21e016c4
|
Transpose guards against None result
|
saurabhjn76/sympy,postvakje/sympy,kaichogami/sympy,Titan-C/sympy,bukzor/sympy,MridulS/sympy,toolforger/sympy,atsao72/sympy,iamutkarshtiwari/sympy,maniteja123/sympy,shikil/sympy,lindsayad/sympy,grevutiu-gabriel/sympy,Sumith1896/sympy,bukzor/sympy,yukoba/sympy,asm666/sympy,yashsharan/sympy,toolforger/sympy,jamesblunt/sympy,moble/sympy,mafiya69/sympy,beni55/sympy,cccfran/sympy,oliverlee/sympy,oliverlee/sympy,yashsharan/sympy,Curious72/sympy,sunny94/temp,saurabhjn76/sympy,hrashk/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,jaimahajan1997/sympy,Mitchkoens/sympy,liangjiaxing/sympy,wyom/sympy,Curious72/sympy,vipulroxx/sympy,jbbskinny/sympy,garvitr/sympy,aktech/sympy,emon10005/sympy,madan96/sympy,lindsayad/sympy,kaichogami/sympy,aktech/sympy,atreyv/sympy,debugger22/sympy,beni55/sympy,dqnykamp/sympy,grevutiu-gabriel/sympy,kaushik94/sympy,bukzor/sympy,cswiercz/sympy,ga7g08/sympy,Davidjohnwilson/sympy,lidavidm/sympy,jbbskinny/sympy,hargup/sympy,vipulroxx/sympy,lindsayad/sympy,skirpichev/omg,shikil/sympy,Vishluck/sympy,drufat/sympy,Arafatk/sympy,farhaanbukhsh/sympy,liangjiaxing/sympy,kumarkrishna/sympy,Gadal/sympy,kumarkrishna/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,shipci/sympy,Vishluck/sympy,rahuldan/sympy,moble/sympy,kmacinnis/sympy,sahilshekhawat/sympy,liangjiaxing/sympy,kevalds51/sympy,mcdaniel67/sympy,MechCoder/sympy,abhiii5459/sympy,meghana1995/sympy,ChristinaZografou/sympy,yashsharan/sympy,pandeyadarsh/sympy,AkademieOlympia/sympy,Titan-C/sympy,diofant/diofant,atreyv/sympy,VaibhavAgarwalVA/sympy,lidavidm/sympy,skidzo/sympy,abloomston/sympy,hrashk/sympy,jamesblunt/sympy,Titan-C/sympy,ga7g08/sympy,meghana1995/sympy,ga7g08/sympy,jerli/sympy,mafiya69/sympy,jaimahajan1997/sympy,garvitr/sympy,atsao72/sympy,sampadsaha5/sympy,Davidjohnwilson/sympy,chaffra/sympy,cccfran/sympy,Designist/sympy,abhiii5459/sympy,emon10005/sympy,AunShiLord/sympy,jbbskinny/sympy,toolforger/sympy,sampadsaha5/sympy,drufat/sympy,mafiya69/sympy,chaffra/sympy,Gadal/sympy,maniteja123/sympy,jerli/sympy,pandeyadarsh/sympy,asm666/sympy,meghana1995/sympy,Shaswat27/sympy,garvitr/sympy,MechCoder/sympy,Sumith1896/sympy,sahmed95/sympy,abloomston/sympy,beni55/sympy,kaichogami/sympy,Davidjohnwilson/sympy,kmacinnis/sympy,emon10005/sympy,farhaanbukhsh/sympy,vipulroxx/sympy,mcdaniel67/sympy,sunny94/temp,debugger22/sympy,kaushik94/sympy,yukoba/sympy,abloomston/sympy,Arafatk/sympy,Arafatk/sympy,Shaswat27/sympy,cswiercz/sympy,ahhda/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,AunShiLord/sympy,MridulS/sympy,wyom/sympy,kaushik94/sympy,Vishluck/sympy,moble/sympy,postvakje/sympy,cswiercz/sympy,rahuldan/sympy,sahmed95/sympy,rahuldan/sympy,skidzo/sympy,wanglongqi/sympy,Curious72/sympy,postvakje/sympy,sahmed95/sympy,MridulS/sympy,sahilshekhawat/sympy,kevalds51/sympy,MechCoder/sympy,pbrady/sympy,kmacinnis/sympy,saurabhjn76/sympy,hargup/sympy,asm666/sympy,drufat/sympy,maniteja123/sympy,abhiii5459/sympy,AkademieOlympia/sympy,dqnykamp/sympy,dqnykamp/sympy,sunny94/temp,yukoba/sympy,sampadsaha5/sympy,ChristinaZografou/sympy,wanglongqi/sympy,shipci/sympy,shikil/sympy,ahhda/sympy,hargup/sympy,souravsingh/sympy,jaimahajan1997/sympy,cccfran/sympy,Designist/sympy,Mitchkoens/sympy,AkademieOlympia/sympy,skidzo/sympy,jerli/sympy,sahilshekhawat/sympy,iamutkarshtiwari/sympy,wyom/sympy,Designist/sympy,souravsingh/sympy,Sumith1896/sympy,ahhda/sympy,kevalds51/sympy,Gadal/sympy,pandeyadarsh/sympy,oliverlee/sympy,AunShiLord/sympy,chaffra/sympy,lidavidm/sympy,atsao72/sympy,shipci/sympy,pbrady/sympy,wanglongqi/sympy,debugger22/sympy,pbrady/sympy,hrashk/sympy,kumarkrishna/sympy,grevutiu-gabriel/sympy,iamutkarshtiwari/sympy,atreyv/sympy,Shaswat27/sympy,aktech/sympy,jamesblunt/sympy,farhaanbukhsh/sympy,souravsingh/sympy,Mitchkoens/sympy
|
sympy/matrices/expressions/transpose.py
|
sympy/matrices/expressions/transpose.py
|
from sympy import Basic, Q
from sympy.functions import adjoint, conjugate
from sympy.matrices.expressions.matexpr import MatrixExpr
from sympy.matrices import MatrixBase
class Transpose(MatrixExpr):
"""
The transpose of a matrix expression.
This is a symbolic object that simply stores its argument without
evaluating it. To actually compute the transpose, use the ``transpose()``
function, or the ``.T`` attribute of matrices.
Examples
========
>>> from sympy.matrices import MatrixSymbol, Transpose
>>> from sympy.functions import transpose
>>> A = MatrixSymbol('A', 3, 5)
>>> B = MatrixSymbol('B', 5, 3)
>>> Transpose(A)
A'
>>> A.T == transpose(A) == Transpose(A)
True
>>> Transpose(A*B)
(A*B)'
>>> transpose(A*B)
B'*A'
"""
is_Transpose = True
def doit(self, **hints):
arg = self.arg
if hints.get('deep', True) and isinstance(arg, Basic):
arg = arg.doit(**hints)
try:
result = arg._eval_transpose()
return result if result is not None else Transpose(arg)
except AttributeError:
return Transpose(arg)
@property
def arg(self):
return self.args[0]
@property
def shape(self):
return self.arg.shape[::-1]
def _entry(self, i, j):
return self.arg._entry(j, i)
def _eval_adjoint(self):
return conjugate(self.arg)
def _eval_conjugate(self):
return adjoint(self.arg)
def _eval_transpose(self):
return self.arg
def _eval_trace(self):
from trace import Trace
return Trace(self.arg) # Trace(X.T) => Trace(X)
def _eval_determinant(self):
from sympy.matrices.expressions.determinant import det
return det(self.arg)
def transpose(expr):
""" Matrix transpose """
return Transpose(expr).doit()
|
from sympy import Basic, Q
from sympy.functions import adjoint, conjugate
from sympy.matrices.expressions.matexpr import MatrixExpr
from sympy.matrices import MatrixBase
class Transpose(MatrixExpr):
"""
The transpose of a matrix expression.
This is a symbolic object that simply stores its argument without
evaluating it. To actually compute the transpose, use the ``transpose()``
function, or the ``.T`` attribute of matrices.
Examples
========
>>> from sympy.matrices import MatrixSymbol, Transpose
>>> from sympy.functions import transpose
>>> A = MatrixSymbol('A', 3, 5)
>>> B = MatrixSymbol('B', 5, 3)
>>> Transpose(A)
A'
>>> A.T == transpose(A) == Transpose(A)
True
>>> Transpose(A*B)
(A*B)'
>>> transpose(A*B)
B'*A'
"""
is_Transpose = True
def doit(self, **hints):
arg = self.arg
if hints.get('deep', True) and isinstance(arg, Basic):
arg = arg.doit(**hints)
try:
return arg._eval_transpose()
except AttributeError:
return Transpose(arg)
@property
def arg(self):
return self.args[0]
@property
def shape(self):
return self.arg.shape[::-1]
def _entry(self, i, j):
return self.arg._entry(j, i)
def _eval_adjoint(self):
return conjugate(self.arg)
def _eval_conjugate(self):
return adjoint(self.arg)
def _eval_transpose(self):
return self.arg
def _eval_trace(self):
from trace import Trace
return Trace(self.arg) # Trace(X.T) => Trace(X)
def _eval_determinant(self):
from sympy.matrices.expressions.determinant import det
return det(self.arg)
def transpose(expr):
""" Matrix transpose """
return Transpose(expr).doit()
|
bsd-3-clause
|
Python
|
df4a47a1111908e7120cd9ef296322a41c8cc5aa
|
Update windows-1251 file scanner
|
FarGroup/FarManager,FarGroup/FarManager,johnd0e/FarManager,FarGroup/FarManager,FarGroup/FarManager,johnd0e/FarManager,johnd0e/FarManager,johnd0e/FarManager,johnd0e/FarManager,FarGroup/FarManager,FarGroup/FarManager,johnd0e/FarManager,FarGroup/FarManager,johnd0e/FarManager,FarGroup/FarManager,johnd0e/FarManager
|
enc/tools/contrib/techtonik/rucheck.py
|
enc/tools/contrib/techtonik/rucheck.py
|
#!/usr/bin/env python2
# -*- coding:windows-1251 -*-
"""Find files with letters in russian windows-1251 encoding.
windows-1251 is a single byte encoding with a range 0xC0-0xFF
and 0xA8,0xB8 for symbols and respectfully. Unfortunately,
russian symbols in windows-1251 clash with russian symbols in
utf-8, where they take two bytes in the ranges:
- 0xD081
- 0xD090-0xD0BF -
- 0xD180-0xD18F -
- 0xD191
This code ignores lines with <!-- NLC --> marker.
"""
# pythonized by techtonik // gmail.com
import codecs
import fnmatch
import os
import re
import sys
#sys.stdout.write("".decode("windows-1251"))
if len(sys.argv) < 2:
print(__doc__)
print("Usage: program <dir>")
sys.exit()
# set encoding for console output
#print sys.stdout.encoding
#sys.stdout = codecs.lookup("windows-1251")[-1](sys.stdout)
#print sys.stdout.encoding
#reload(sys)
#sys.setdefaultencoding("cp866")
#print "".decode("cp1251")
#sw = codecs.lookup("cp866")[-1](sys.stdout)
#sw.write("sdf")
dirs_exclude = ['.svn', '.git']
files_exclude = ['*.gif', '*.png', '*.jpg', '*.exe', '*.ico', '*.msi', '*.rar']
# https://en.wikipedia.org/wiki/Windows-1251
cp1251 = re.compile(r"[\xA8\xB8\xC0-\xFF]+")
# https://en.wikipedia.org/wiki/UTF-8#Encoding
utf8ru = re.compile(r"(\xD0[\x81\x90-\xBF]|\xD1[\x91\x80-\x8F])+")
utf8rest = re.compile(r"(\xC2[\xAB\xBB]|" # angle quotes
r"\xE2\x80\x94|" # long dash
r"\xC2\xA6)+") # unicode |
skip_mark = "<!-- NLC -->"
for root,dirs,files in os.walk(sys.argv[1]):
# exclude dirs by modifying dirs in place
dirs[:] = [d for d in dirs if not d in dirs_exclude]
for f in files:
# exclude files by skipping them
skip = False
for pattern in files_exclude:
if fnmatch.fnmatch(f.lower(), pattern):
skip = True
if skip:
continue
rucount = 0
for i,l in enumerate(open(os.path.join(root, f), "r")):
if l.find(skip_mark) != -1:
continue
noutf = utf8ru.sub('', l) # remove russian utf-8 to avoid false positives
noutf = utf8rest.sub('', noutf) # remove other clashing utf-8 symbols
rutext = "".join(cp1251.findall(noutf))
rucount += len(rutext)
if rutext:
sys.stdout.write("line %3d: " % (i+1))
print(noutf.encode('hex'))
sys.stdout.write(rutext.decode("windows-1251").encode('utf-8'))
#print(rutext.decode("cp1251"))
sys.stdout.write("\n")
if rucount:
print("%s - %d russian (cp1251) symbols" % (os.path.join(root, f), rucount))
#print root,dirs,files
|
#!/usr/bin/env python2
# -*- coding:windows-1251 -*-
"""Find files with letters in russian windows-1251 encoding.
If English encyclopedia files contain Russian letters they are considered
untranslated unless <!-- NLC --> marker is present at the same line in
HTML code.
"""
# pythonized by techtonik // gmail.com
import codecs
import fnmatch
import os
import re
import sys
#sys.stdout.write("".decode("windows-1251"))
if len(sys.argv) < 2:
print __doc__
print "Usage: program <dir>"
sys.exit()
# set encoding for console output
#print sys.stdout.encoding
#sys.stdout = codecs.lookup("windows-1251")[-1](sys.stdout)
#print sys.stdout.encoding
#reload(sys)
#sys.setdefaultencoding("cp866")
#print "".decode("cp1251")
#sw = codecs.lookup("cp866")[-1](sys.stdout)
#sw.write("sdf")
dirs_exclude = ['.svn', '.git']
files_exclude = ['*.gif', '*.png', '*.jpg', '*.exe', '*.ico', '*.msi', '*.rar']
# https://en.wikipedia.org/wiki/Windows-1251
cp1251 = re.compile(r"[\xA8\xB8\xC0-\xFF]+")
# by coincidence cp1251 codes match with lower
# byte of utf-8
skip_mark = "<!-- NLC -->"
for root,dirs,files in os.walk(sys.argv[1]):
# exclude dirs by modifying dirs in place
dirs[:] = [d for d in dirs if not d in dirs_exclude]
for f in files:
# exclude files by skipping them
skip = False
for pattern in files_exclude:
if fnmatch.fnmatch(f.lower(), pattern):
skip = True
if skip:
continue
rucount = 0
for l in open(os.path.join(root, f), "r"):
if l.find(skip_mark) != -1:
continue
rutext = "".join(cp1251.findall(l))
rucount += len(rutext)
#if rutext: print rutext #.decode("cp1251")
if rucount:
print "%s - %d russian (cp1251) symbols" % (os.path.join(root, f), rucount)
#print root,dirs,files
|
bsd-3-clause
|
Python
|
4e53b01f2024e320e4c31ded5d6ad7187aa6868b
|
Make Wordclient a class
|
anirudhagar13/SS_Graph,anirudhagar13/SS_Graph,anirudhagar13/SS_Graph,anirudhagar13/SS_Graph
|
Wordclient.py
|
Wordclient.py
|
from __future__ import print_function
from Commons import *
from Spider import *
from Edge import *
class Wordclient:
def __init__(self, word):
'''
Constructor to crawl web for a word
'''
self.word = word
sp = Spider(word)
self.web = sp.crawl() # Crawled web
def printweb(word, web):
'''
To Print entire web of mentioned word
'''
print ('FROM : ',word)
for word, paths in web.items():
print ('TO : ',word)
for i, path in enumerate(paths):
print ('PATH',i+1,' :',end='')
score = 1
for edge in path:
score *= edge.weight
print (' |',edge, end='')
print ()
print ('PathScore : ',score)
def printpaths(self, dest):
'''
To print paths to a specific word in web
'''
if dest in self.web:
paths = self.web[dest]
print ('TO : ',dest)
for i, path in enumerate(paths):
print ('PATH', i+1,' :',end='')
score = 1
for edge in path:
score *= edge.weight
print (' |',edge, end='')
print ()
print ('PathScore : ',score)
else:
print ('Word',word,'is not reachable from Source')
def score(self, dest):
'''
To Compute score of word in web
'''
if dest in self.web:
paths = self.web[dest]
print ('TO : ',dest)
score = 0
for i, path in enumerate(paths):
path_score = 1
for edge in path:
path_score *= edge.weight
score += path_score
return score
else:
print ('Word',word,'is not reachable from Source')
return 0
if __name__ == '__main__':
word = 'dog'
client = 'puppy'
try:
wc = Wordclient(word)
wc.printpaths(client)
print ('Final Score : ',wc.score(client))
except Exception as e:
print ('Error Wordclient- ',e)
|
from __future__ import print_function
from Commons import *
from Spider import *
from Edge import *
def Printpaths(word, web):
'''
To print paths to a specific word in web
'''
if word in web:
paths = web[word]
print ('TO : ',word)
for i, path in enumerate(paths):
print ('PATH', i+1,' :',end='')
score = 1
for edge in path:
score *= edge.weight
print (' |',edge, end='')
print ()
print ('PathScore : ',score)
else:
print ('Word',word,'is not reachable from Source')
def Score(word, web):
'''
To Compute score of word in web
'''
if word in web:
paths = web[word]
print ('TO : ',word)
score = 0
for i, path in enumerate(paths):
path_score = 1
for edge in path:
path_score *= edge.weight
score += path_score
return score
else:
print ('Word',word,'is not reachable from Source')
return 0
def Printweb(word, web):
'''
To Print entire web of mentioned word
'''
print ('FROM : ',word)
for word, paths in web.items():
print ('TO : ',word)
for i, path in enumerate(paths):
print ('PATH',i+1,' :',end='')
score = 1
for edge in path:
score *= edge.weight
print (' |',edge, end='')
print ()
print ('PathScore : ',score)
if __name__ == '__main__':
word = 'lion'
client = 'tiger'
try:
sp = Spider(word)
web = sp.crawl() # Web obtained back around mentioned word
# Printweb(word, web)
Printpaths(client, web)
print ('Final Score : ',Score(client, web))
except Exception as e:
print ('Error Wordclient- ',e)
|
apache-2.0
|
Python
|
92699fa0ac8c97a5a54da2a4155b08145c524d5d
|
revert the previous change: regression found
|
tinyerp/openerpweb_seven,tinyerp/openerpweb_seven
|
web_seven/openerpweb.py
|
web_seven/openerpweb.py
|
# -*- coding: utf-8 -*-
def patch_web7():
import babel
import os.path
import openerp.addons.web
try:
from openerp.addons.web import http as openerpweb
except ImportError:
# OpenERP Web 6.1
return
# Adapt the OpenERP Web 7.0 method for OpenERP 6.1 server
@openerpweb.jsonrequest
def translations(self, req, mods, lang):
res_lang = req.session.model('res.lang')
ids = res_lang.search([("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
"grouping", "decimal_point", "thousands_sep"])
separator = '_' if '_' in lang else '@'
langs = lang.split(separator)
langs = [separator.join(langs[:x]) for x in range(1, len(langs) + 1)]
translations_per_module = {}
for addon_name in mods:
translations_per_module[addon_name] = transl = {"messages": []}
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
for l in langs:
f_name = os.path.join(addons_path, addon_name, "i18n", l + ".po")
try:
with open(f_name) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
continue
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
transl["messages"].append({'id': x.id, 'string': x.string})
return {"modules": translations_per_module,
"lang_parameters": lang_params}
openerp.addons.web.controllers.main.WebClient.translations = translations
|
# -*- coding: utf-8 -*-
def patch_web7():
import babel
import os.path
import sys
import openerp.addons.web
try:
from openerp.addons.web import http as openerpweb
except ImportError:
# OpenERP Web 6.1
return
# Self-reference for 6.1 modules which import 'web.common.http'
openerp.addons.web.common = openerp.addons.web
sys.modules['openerp.addons.web.common'] = openerp.addons.web
# Adapt the OpenERP Web 7.0 method for OpenERP 6.1 server
@openerpweb.jsonrequest
def translations(self, req, mods, lang):
res_lang = req.session.model('res.lang')
ids = res_lang.search([("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
"grouping", "decimal_point", "thousands_sep"])
separator = '_' if '_' in lang else '@'
langs = lang.split(separator)
langs = [separator.join(langs[:x]) for x in range(1, len(langs) + 1)]
translations_per_module = {}
for addon_name in mods:
translations_per_module[addon_name] = transl = {"messages": []}
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
for l in langs:
f_name = os.path.join(addons_path, addon_name, "i18n", l + ".po")
try:
with open(f_name) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
continue
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
transl["messages"].append({'id': x.id, 'string': x.string})
return {"modules": translations_per_module,
"lang_parameters": lang_params}
openerp.addons.web.controllers.main.WebClient.translations = translations
|
bsd-3-clause
|
Python
|
f03b47e987ed2259b10e21123ed8bca711b8bf15
|
Add -D_REENTRANT to cflags as per Gemfire docs
|
gemfire/node-gemfire,gemfire/node-gemfire,mross-pivotal/node-gemfire,mross-pivotal/node-gemfire,gemfire/node-gemfire,mross-pivotal/node-gemfire,gemfire/node-gemfire,gemfire/node-gemfire,mross-pivotal/node-gemfire,gemfire/node-gemfire,mross-pivotal/node-gemfire,mross-pivotal/node-gemfire
|
binding.gyp
|
binding.gyp
|
# vim: set ft=javascript
{
# NOTE: 'module_name' and 'module_path' come from the 'binary' property in package.json
# node-pre-gyp handles passing them down to node-gyp when you build from source
"targets": [
{
"target_name": "<(module_name)",
"include_dirs": [ "include" ],
"sources": [ "src/binding.cpp" ],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES',
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'
}
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags_cc!': ['-fno-rtti', '-fno-exceptions'],
'cflags_cc+': ['-frtti', '-D_REENTRANT'],
"libraries": [ "<(module_root_dir)/lib/libgfcppcache.so" ]
}]
]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
]
}
|
# vim: set ft=javascript
{
# NOTE: 'module_name' and 'module_path' come from the 'binary' property in package.json
# node-pre-gyp handles passing them down to node-gyp when you build from source
"targets": [
{
"target_name": "<(module_name)",
"include_dirs": [ "include" ],
"sources": [ "src/binding.cpp" ],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES',
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'
}
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'cflags_cc!': ['-fno-rtti', '-fno-exceptions'],
'cflags_cc+': ['-frtti'],
"libraries": [ "<(module_root_dir)/lib/libgfcppcache.so" ]
}]
]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
]
}
|
bsd-2-clause
|
Python
|
726e70910e72f68085ecc7cdcc2d474c2ba99c6a
|
add source dir to include_dirs
|
embeddedfactor/node-mifare,embeddedfactor/node-mifare,embeddedfactor/node-mifare,embeddedfactor/node-mifare
|
binding.gyp
|
binding.gyp
|
{
"variables": {
"source_dir": "src",
},
"targets": [
{
"target_name": "node_mifare",
"dependencies": ["node_modules/libfreefare-pcsc/binding.gyp:freefare_pcsc"],
"conditions": [
['OS=="linux"', {
"defines": [
"USE_LIBNFC",
],
}]
],
"include_dirs": ["<(source_dir)"],
"sources": [
"src/mifare.cc",
"src/reader.cc",
"src/desfire.cc",
"src/utils.cc"
],
"cflags": [
"-Wall",
"-Wextra",
"-Wno-unused-parameter",
"-fPIC",
"-fno-strict-aliasing",
"-fno-exceptions",
"-pedantic"
],
}
]
}
|
{
"targets": [
{
"target_name": "node_mifare",
"dependencies": ["node_modules/libfreefare-pcsc/binding.gyp:freefare_pcsc"],
"conditions": [
['OS=="linux"', {
"defines": [
"USE_LIBNFC",
],
}]
],
"sources": [
"src/mifare.cc",
"src/reader.cc",
"src/desfire.cc",
"src/utils.cc"
],
"cflags": [
"-Wall",
"-Wextra",
"-Wno-unused-parameter",
"-fPIC",
"-fno-strict-aliasing",
"-fno-exceptions",
"-pedantic"
],
}
]
}
|
mit
|
Python
|
0b7eca03c652b5afefb7eabd48011310b122acbc
|
Fix #47 contect handling
|
zsiciarz/django-envelope,zsiciarz/django-envelope
|
envelope/templatetags/envelope_tags.py
|
envelope/templatetags/envelope_tags.py
|
# -*- coding: utf-8 -*-
"""
Template tags related to the contact form.
"""
from __future__ import unicode_literals
from django import template
register = template.Library()
try:
import honeypot
# Register antispam_fields as an inclusion tag
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
register.inclusion_tag(t, name='antispam_fields')(lambda: {})
except ImportError: # pragma: no cover
# Register antispam_fields as an empty tag
register.simple_tag(name='antispam_fields')(lambda: '')
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
"""
if 'form' not in context:
raise template.TemplateSyntaxError(
"There is no 'form' variable in the template context."
)
return context
|
# -*- coding: utf-8 -*-
"""
Template tags related to the contact form.
"""
from __future__ import unicode_literals
from django import template
register = template.Library()
try:
import honeypot
# Register antispam_fields as an inclusion tag
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
register.inclusion_tag(t, name='antispam_fields')(lambda: {})
except ImportError: # pragma: no cover
# Register antispam_fields as an empty tag
register.simple_tag(name='antispam_fields')(lambda: '')
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
|
mit
|
Python
|
444baf986cf90a952f5d2406b5aba60113494349
|
Add FloatingIP object implementation
|
openstack/oslo.versionedobjects,citrix-openstack-build/oslo.versionedobjects
|
nova/objects/__init__.py
|
nova/objects/__init__.py
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
|
apache-2.0
|
Python
|
4f7590ea19036ceb358c323d796be0046f33327e
|
move config.json to correct location
|
SqueezeStudioAnimation/omtk,SqueezeStudioAnimation/omtk
|
omtk/core/preferences.py
|
omtk/core/preferences.py
|
"""
Provide a Preference class to store the user preferences of the local installation.
"""
import os
import inspect
import json
import logging
log = logging.getLogger('omtk')
CONFIG_FILENAME = 'config.json'
def get_path_preferences():
"""
:return: The search path of the configuration file.
"""
current_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
config_dir = os.path.abspath(os.path.join(current_dir, '..', '..'))
config_path = os.path.join(config_dir, CONFIG_FILENAME)
return config_path
class Preferences(object):
def __init__(self):
self.default_rig = None
def save(self, path=None):
if path is None:
path = get_path_preferences()
data = self.__dict__
with open(path, 'w') as fp:
json.dump(data, fp)
def load(self, path=None):
if path is None:
path = get_path_preferences()
if not path or not os.path.exists(path):
log.warning("Can't find config file. Using default config.")
return
with open(path, 'r') as fp:
data = json.load(fp)
self.__dict__.update(data)
def get_default_rig_class(self):
from omtk.core import plugin_manager
if self.default_rig:
for plugin in plugin_manager.plugin_manager.iter_loaded_plugins_by_type('rigs'):
if plugin.cls.__name__ == self.default_rig:
return plugin.cls
log.warning("Can't find default rig type {0}.".format(self.default_rig))
# If no match is found, return the base implementation
from omtk.core import classRig
return classRig.Rig
preferences = Preferences()
preferences.load()
|
"""
Provide a Preference class to store the user preferences of the local installation.
"""
import os
import inspect
import json
import logging
log = logging.getLogger('omtk')
CONFIG_FILENAME = 'config.json'
def get_path_preferences():
"""
:return: The search path of the configuration file.
"""
current_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
config_dir = os.path.abspath(os.path.join(current_dir, '..', '..', '..'))
config_path = os.path.join(config_dir, CONFIG_FILENAME)
return config_path
class Preferences(object):
def __init__(self):
self.default_rig = None
def save(self, path=None):
if path is None:
path = get_path_preferences()
data = self.__dict__
with open(path, 'w') as fp:
json.dump(data, fp)
def load(self, path=None):
if path is None:
path = get_path_preferences()
if not path or not os.path.exists(path):
log.warning("Can't find config file. Using default config.")
return
with open(path, 'r') as fp:
data = json.load(fp)
self.__dict__.update(data)
def get_default_rig_class(self):
from omtk.core import plugin_manager
if self.default_rig:
for plugin in plugin_manager.plugin_manager.iter_loaded_plugins_by_type('rigs'):
if plugin.cls.__name__ == self.default_rig:
return plugin.cls
log.warning("Can't find default rig type {0}.".format(self.default_rig))
# If no match is found, return the base implementation
from omtk.core import classRig
return classRig.Rig
preferences = Preferences()
preferences.load()
|
mit
|
Python
|
cae6e403efdef67af23a3b8a6c80082fa9efe4bd
|
Fix test
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
bluebottle/files/views.py
|
bluebottle/files/views.py
|
import mimetypes
import magic
from django.conf import settings
from django.http import HttpResponse
from rest_framework.exceptions import ValidationError
from rest_framework.parsers import FileUploadParser
from rest_framework.permissions import IsAuthenticated
from rest_framework_json_api.views import AutoPrefetchMixin
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from sorl.thumbnail.shortcuts import get_thumbnail
from bluebottle.bluebottle_drf2.renderers import BluebottleJSONAPIRenderer
from bluebottle.files.serializers import DocumentSerializer, ImageSerializer
from bluebottle.initiatives.models import Initiative
from bluebottle.utils.views import CreateAPIView, RetrieveAPIView
mime = magic.Magic(mime=True)
class FileList(AutoPrefetchMixin, CreateAPIView):
queryset = Initiative.objects.all()
serializer_class = DocumentSerializer
renderer_classes = (BluebottleJSONAPIRenderer, )
parser_classes = (FileUploadParser,)
permission_classes = (IsAuthenticated, )
authentication_classes = (
JSONWebTokenAuthentication,
)
prefetch_for_includes = {
'owner': ['owner'],
}
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class FileContentView(RetrieveAPIView):
def retrieve(self, *args, **kwargs):
instance = self.get_object()
file = getattr(instance, self.field).file
thumbnail = get_thumbnail(file, self.kwargs['size'])
content_type = mimetypes.guess_type(file.name)[0]
if settings.DEBUG:
response = HttpResponse(content=thumbnail.read())
else:
response = HttpResponse()
response['X-Accel-Redirect'] = thumbnail.url
response['Content-Type'] = content_type
return response
class ImageList(FileList):
queryset = Initiative.objects.all()
serializer_class = ImageSerializer
def perform_create(self, serializer):
uploaded_file = self.request.FILES['file']
if not mime.from_buffer(uploaded_file.read()) == uploaded_file.content_type:
raise ValidationError('Mime-type does not match Content-Type')
serializer.save(owner=self.request.user)
|
import mimetypes
import magic
from django.conf import settings
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from rest_framework.parsers import FileUploadParser
from rest_framework.permissions import IsAuthenticated
from rest_framework_json_api.views import AutoPrefetchMixin
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from sorl.thumbnail.shortcuts import get_thumbnail
from bluebottle.bluebottle_drf2.renderers import BluebottleJSONAPIRenderer
from bluebottle.files.serializers import DocumentSerializer, ImageSerializer
from bluebottle.initiatives.models import Initiative
from bluebottle.utils.views import CreateAPIView, RetrieveAPIView
mime = magic.Magic(mime=True)
class FileList(AutoPrefetchMixin, CreateAPIView):
queryset = Initiative.objects.all()
serializer_class = DocumentSerializer
renderer_classes = (BluebottleJSONAPIRenderer, )
parser_classes = (FileUploadParser,)
permission_classes = (IsAuthenticated, )
authentication_classes = (
JSONWebTokenAuthentication,
)
prefetch_for_includes = {
'owner': ['owner'],
}
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class FileContentView(RetrieveAPIView):
def retrieve(self, *args, **kwargs):
instance = self.get_object()
file = getattr(instance, self.field).file
thumbnail = get_thumbnail(file, self.kwargs['size'])
content_type = mimetypes.guess_type(file.name)[0]
if settings.DEBUG:
response = HttpResponse(content=thumbnail.read())
else:
response = HttpResponse()
response['X-Accel-Redirect'] = thumbnail.url
response['Content-Type'] = content_type
return response
class ImageList(FileList):
queryset = Initiative.objects.all()
serializer_class = ImageSerializer
def perform_create(self, serializer):
uploaded_file = self.request.FILES['file']
if not mime.from_buffer(uploaded_file.read()) == uploaded_file.content_type:
raise ValidationError('Mime-type does not match Content-Type')
serializer.save(owner=self.request.user)
|
bsd-3-clause
|
Python
|
ab4351afae1f1a16206cce6801d114b047babf76
|
Update main to use tag and reuse_mesage
|
alvarogzp/telegram-games,alvarogzp/telegram-games,alvarogzp/telegram-games,alvarogzp/telegram-games
|
bot/main.py
|
bot/main.py
|
#!/usr/bin/env python3
import logging
from telegram.ext import Updater, CommandHandler, CallbackQueryHandler, InlineQueryHandler
from game.api.server import start_api_server
from game.chooser import inline_query_game_chooser_handler
from game.launch import callback_query_game_launcher_handler
from tools import config, commands
from tools.logger import Logger
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
updater = Updater(token=config.Key.AUTH_TOKEN.read())
bot = updater.bot
bot.updater = updater
dispatcher = updater.dispatcher
logger = Logger(bot, "START", reuse_message=True)
logger.debug("Starting bot...")
dispatcher.add_handler(CommandHandler("config", commands.config_editor_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CommandHandler("restart", commands.restart_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CallbackQueryHandler(callback_query_game_launcher_handler))
dispatcher.add_handler(InlineQueryHandler(inline_query_game_chooser_handler))
updater.start_polling()
logger.debug("Starting api server...")
start_api_server()
logger.info("Running!")
|
#!/usr/bin/env python3
import logging
from telegram.ext import Updater, CommandHandler, CallbackQueryHandler, InlineQueryHandler
from game.api.server import start_api_server
from game.chooser import inline_query_game_chooser_handler
from game.launch import callback_query_game_launcher_handler
from tools import config, commands
from tools.logger import Logger
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
updater = Updater(token=config.Key.AUTH_TOKEN.read())
bot = updater.bot
bot.updater = updater
dispatcher = updater.dispatcher
logger = Logger(bot)
logger.debug("Starting bot...")
dispatcher.add_handler(CommandHandler("config", commands.config_editor_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CommandHandler("restart", commands.restart_command, pass_args=True, allow_edited=True))
dispatcher.add_handler(CallbackQueryHandler(callback_query_game_launcher_handler))
dispatcher.add_handler(InlineQueryHandler(inline_query_game_chooser_handler))
updater.start_polling()
logger.debug("Starting api server...")
start_api_server()
logger.info("Running!")
|
apache-2.0
|
Python
|
23a1922afac917b06dbd6772fefc3b8a7c53c5ff
|
fix linespacing in footnotes (part of #3)
|
alerque/greek-reader,jtauber/greek-reader
|
backends.py
|
backends.py
|
class LaTeX:
def preamble(self, typeface):
return """
\\documentclass[a4paper,12pt]{{article}}
\\usepackage{{setspace}}
\\usepackage{{fontspec}}
\\usepackage{{dblfnote}}
\\usepackage{{pfnote}}
\\setromanfont{{{typeface}}}
\\linespread{{1.5}}
\\onehalfspacing
\\begin{{document}}
""".format(typeface=typeface)
def chapter_verse(self, chapter, verse):
return "\\textbf{{\Large {}.{}}}".format(chapter, verse)
def verse(self, verse):
return "\\textbf{{{}}}".format(verse)
def word(self, text, headword=None, parse=None, gloss=None):
if headword is None and parse is None and gloss is None:
return text
else:
footnote = []
if headword:
footnote.append(headword)
if parse:
footnote.append("\\textendash\\ {}".format(parse))
if gloss:
footnote.append("\\textendash\\ \\textit{{{}}}".format(gloss))
return "{}\\footnote{{{}}}".format(text, " ".join(footnote))
def comment(self, text):
return "% {}".format(text)
def postamble(self):
return "\\end{document}"
|
class LaTeX:
def preamble(self, typeface):
return """
\\documentclass[a4paper,12pt]{{article}}
\\usepackage{{fontspec}}
\\usepackage{{dblfnote}}
\\usepackage{{pfnote}}
\\setromanfont{{{typeface}}}
\\linespread{{1.5}}
\\spaceskip 0.5em
\\begin{{document}}
""".format(typeface=typeface)
def chapter_verse(self, chapter, verse):
return "\\textbf{{\Large {}.{}}}".format(chapter, verse)
def verse(self, verse):
return "\\textbf{{{}}}".format(verse)
def word(self, text, headword=None, parse=None, gloss=None):
if headword is None and parse is None and gloss is None:
return text
else:
footnote = []
if headword:
footnote.append(headword)
if parse:
footnote.append("\\textendash\\ {}".format(parse))
if gloss:
footnote.append("\\textendash\\ \\textit{{{}}}".format(gloss))
return "{}\\footnote{{{}}}".format(text, " ".join(footnote))
def comment(self, text):
return "% {}".format(text)
def postamble(self):
return "\\end{document}"
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.