commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
7252af9442d82c42a881d5dfbb4db470b28fcd3a
|
congress/datasources/settings.py
|
congress/datasources/settings.py
|
#!/usr/bin/env python
# Copyright (c) 2014 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
OS_USERNAME = "demo"
OS_PASSWORD = "password"
# Change this to keystone URL specific to your installation
OS_AUTH_URL = "http://10.37.2.84:5000/v2.0"
# 35357
OS_TENANT_NAME = "demo"
|
#!/usr/bin/env python
# Copyright (c) 2014 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
OS_USERNAME = "demo"
OS_PASSWORD = "password"
# Change this to keystone URL specific to your installation
OS_AUTH_URL = "http://127.0.0.1:5000/v2.0"
# 35357
OS_TENANT_NAME = "demo"
|
Change hardcoded ip to localhost
|
Change hardcoded ip to localhost
This patch just changes the hardcoded ip to local host. We should really
make this configurable through congress.conf but this will probably be
fine for now.
Change-Id: If4980beb73e32e458d9b7dfafecb016f9c2f740c
|
Python
|
apache-2.0
|
ramineni/my_congress,openstack/congress,ekcs/congress,openstack/congress,ramineni/my_congress,ekcs/congress,ekcs/congress,ekcs/congress,ramineni/my_congress,ramineni/my_congress
|
7b4fad8b4f08b986b184799817cf4b46958b9e85
|
tracker/src/main/freebayes-regenotype-workflow.py
|
tracker/src/main/freebayes-regenotype-workflow.py
|
from airflow import DAG
from airflow.operators import BashOperator, PythonOperator
from datetime import datetime, timedelta
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2015, 6, 1),
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
donor_index = "23"
sample_id = "f82d213f-bc96-5b1d-e040-11ac0c486880"
dag = DAG("freebayes-regenotype", default_args=default_args)
t1 = BashOperator(
task_id = "reserve_sample",
bash_command = "su - postgres -c \"python /tmp/update-sample-status.py " + donor_index + " " + sample_id + "1\"",
dag = dag)
|
from airflow import DAG
from airflow.operators import BashOperator, PythonOperator
from datetime import datetime, timedelta
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2015, 6, 1),
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
donor_index = "23"
sample_id = "f82d213f-bc96-5b1d-e040-11ac0c486880"
dag = DAG("freebayes-regenotype", default_args=default_args)
t1 = BashOperator(
task_id = "reserve_sample",
bash_command = "su - postgres -c \"python /tmp/update-sample-status.py " + donor_index + " " + sample_id + " 1\"",
dag = dag)
|
Fix parameter spacing in statup update script invocation from workflow.
|
Fix parameter spacing in statup update script invocation from workflow.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
fc9296957122fe7499711c0b0801c96e286b6725
|
turnstile/utils.py
|
turnstile/utils.py
|
import sys
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition(':')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ImportError, ValueError, AttributeError) as exc:
# Convert it into an import error
raise ImportError("Failed to import %s: %s" % (import_str, exc))
|
import sys
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition(':')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ImportError, ValueError, AttributeError) as exc:
# Convert it into an import error
raise ImportError("Failed to import %s: %s" % (import_str, exc))
class ignore_except(object):
"""Context manager to ignore all exceptions."""
def __enter__(self):
"""Entry does nothing."""
pass
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Return True to mark the exception as handled."""
return True
|
Add a context manager to ignore exceptions
|
Add a context manager to ignore exceptions
|
Python
|
apache-2.0
|
klmitch/turnstile
|
d18d4f72ed24177143a9dbcf74fc1c001235ded5
|
batch_effect.py
|
batch_effect.py
|
#!/usr/bin/env python
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="id attribute of object to manipulate")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
#!/usr/bin/env python
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="ID attributes of objects to manipulate. Passed to all extensions.")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
Clarify --id parameter help text
|
Clarify --id parameter help text
|
Python
|
mit
|
jturner314/inkscape-batch-effect
|
3672d178ac4f9a3f9308acf1e43e9eea663fe30a
|
OnlineParticipationDataset/pipelines.py
|
OnlineParticipationDataset/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from datetime import datetime
from scrapy.exporters import JsonLinesItemExporter
class OnlineparticipationdatasetPipeline(object):
def process_item(self, item, spider):
return item
class JsonWriterPipeline(object):
def open_spider(self, spider):
self.file = open("downloads/items_"+spider.name+".json", 'wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json,os
from datetime import datetime
from scrapy.exporters import JsonLinesItemExporter
path = "downloads"
class OnlineparticipationdatasetPipeline(object):
def process_item(self, item, spider):
return item
class JsonWriterPipeline(object):
def open_spider(self, spider):
if not os.path.isdir(path):
os.makedirs(path)
self.file = open("downloads/items_"+spider.name+".json", 'wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
|
Create path if it doesnt exists
|
Create path if it doesnt exists
|
Python
|
mit
|
Liebeck/OnlineParticipationDatasets
|
7169f578892f9a72c2c14baa9bfd1ce2b7f9b9ec
|
fastats/core/decorator.py
|
fastats/core/decorator.py
|
from contextlib import contextmanager
from functools import wraps
from fastats.core.ast_transforms.convert_to_jit import convert_to_jit
from fastats.core.ast_transforms.processor import AstProcessor
@contextmanager
def code_transform(func, replaced):
try:
yield func
finally:
for k, v in replaced.items():
func.__globals__[k] = v
replaced.clear()
def fs(func):
# The initial function *must* be jittable,
# else we can't do anything.
_func = func
replaced = {}
@wraps(func)
def fs_wrapper(*args, **kwargs):
return_callable = kwargs.pop('return_callable', None)
if not kwargs:
return _func(*args)
with code_transform(_func, replaced) as _f:
# TODO : remove fastats keywords such as 'debug'
# before passing into AstProcessor
new_funcs = {}
for v in kwargs.values():
if v.__name__ in kwargs:
continue
new_funcs[v.__name__] = convert_to_jit(v)
kwargs = {k: convert_to_jit(v) for k, v in kwargs.items()}
processor = AstProcessor(_f, kwargs, replaced, new_funcs)
proc = processor.process()
if return_callable:
return convert_to_jit(proc)
return convert_to_jit(proc)(*args)
return fs_wrapper
|
from contextlib import contextmanager
from functools import wraps
from fastats.core.ast_transforms.convert_to_jit import convert_to_jit
from fastats.core.ast_transforms.processor import AstProcessor
@contextmanager
def code_transform(func, replaced):
try:
yield func
finally:
for k, v in replaced.items():
func.__globals__[k] = v
replaced.clear()
def fs(func):
# The initial function *must* be jittable,
# else we can't do anything.
_func = func
replaced = {}
@wraps(func)
def fs_wrapper(*args, **kwargs):
return_callable = kwargs.pop('return_callable', None)
if not kwargs:
return _func(*args)
with code_transform(_func, replaced) as _f:
# TODO : remove fastats keywords such as 'debug'
# before passing into AstProcessor
new_funcs = {v.__name__: convert_to_jit(v) for v in kwargs.values()
if v.__name__ not in kwargs}
kwargs = {k: convert_to_jit(v) for k, v in kwargs.items()}
processor = AstProcessor(_f, kwargs, replaced, new_funcs)
proc = processor.process()
if return_callable:
return convert_to_jit(proc)
return convert_to_jit(proc)(*args)
return fs_wrapper
|
Remove a for loop in favour of a dict comprehension
|
Remove a for loop in favour of a dict comprehension
|
Python
|
mit
|
fastats/fastats,dwillmer/fastats
|
170fa50af3dc5809713ea274b2b6bb1537c22941
|
bm/app/admin.py
|
bm/app/admin.py
|
from django.contrib import admin
from app.models import Category, Bookmark, Trash
class CategoryAdmin(admin.ModelAdmin):
list_display = ['name', 'user', 'row_number', 'column_number', 'progress_bar_color']
list_filter = ['user']
class BookmarkAdmin(admin.ModelAdmin):
list_display = ['category', 'name', 'row_number', 'glyphicon', 'id']
list_filter = ['category']
class TrashAdmin(admin.ModelAdmin):
list_display = ['category', 'name', 'glyphicon', 'id']
list_filter = ['category']
admin.site.register(Category, CategoryAdmin)
admin.site.register(Bookmark, BookmarkAdmin)
admin.site.register(Trash, TrashAdmin)
|
from django.contrib import admin
from app.models import Category, Bookmark, Trash
class CategoryAdmin(admin.ModelAdmin):
list_display = ['name', 'user', 'row_number', 'column_number', 'progress_bar_color', 'id']
list_filter = ['user']
class BookmarkAdmin(admin.ModelAdmin):
list_display = ['category', 'name', 'row_number', 'glyphicon', 'id']
list_filter = ['category']
class TrashAdmin(admin.ModelAdmin):
list_display = ['category', 'name', 'glyphicon', 'id']
list_filter = ['category']
admin.site.register(Category, CategoryAdmin)
admin.site.register(Bookmark, BookmarkAdmin)
admin.site.register(Trash, TrashAdmin)
|
Include id in CategoryAdmin for easier debugging
|
Include id in CategoryAdmin for easier debugging
|
Python
|
mit
|
GSC-RNSIT/bookmark-manager,rohithpr/bookmark-manager,rohithpr/bookmark-manager,GSC-RNSIT/bookmark-manager
|
4fd6a98a887a59dabcc41361a6ba2791393d875e
|
test/tests/python-pip-requests-ssl/container.py
|
test/tests/python-pip-requests-ssl/container.py
|
import pip
pip.main(['install', '-q', 'requests'])
import requests
r = requests.get('https://google.com')
assert(r.status_code == 200)
|
import subprocess, sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'requests'])
import requests
r = requests.get('https://google.com')
assert(r.status_code == 200)
|
Fix "python-pip-requests-ssl" test to no longer "import pip"
|
Fix "python-pip-requests-ssl" test to no longer "import pip"
(https://blog.python.org/2018/04/pip-10-has-been-released.html)
> In addition, the previously announced reorganisation of pip's internals has now taken place. Unless you are the author of code that imports the pip module (or a user of such code), this change will not affect you. If you are affected, please report the issue to the author of the offending code (refer them to https://mail.python.org/pipermail/distutils-sig/2017-October/031642.html for the details of the announcement).
|
Python
|
apache-2.0
|
docker-library/official-images,docker-flink/official-images,docker-library/official-images,31z4/official-images,davidl-zend/official-images,jperrin/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,31z4/official-images,dinogun/official-images,docker-flink/official-images,neo-technology/docker-official-images,docker-solr/official-images,31z4/official-images,chorrell/official-images,dinogun/official-images,docker-solr/official-images,docker-library/official-images,infosiftr/stackbrew,davidl-zend/official-images,docker-library/official-images,neo-technology/docker-official-images,docker-flink/official-images,infosiftr/stackbrew,docker-solr/official-images,robfrank/official-images,docker-flink/official-images,thresheek/official-images,31z4/official-images,robfrank/official-images,robfrank/official-images,infosiftr/stackbrew,infosiftr/stackbrew,docker-flink/official-images,chorrell/official-images,chorrell/official-images,docker-solr/official-images,docker-flink/official-images,neo-technology/docker-official-images,thresheek/official-images,docker-library/official-images,docker-solr/official-images,jperrin/official-images,dinogun/official-images,thresheek/official-images,chorrell/official-images,davidl-zend/official-images,neo-technology/docker-official-images,31z4/official-images,docker-flink/official-images,davidl-zend/official-images,jperrin/official-images,thresheek/official-images,thresheek/official-images,neo-technology/docker-official-images,docker-library/official-images,chorrell/official-images,robfrank/official-images,31z4/official-images,chorrell/official-images,thresheek/official-images,docker-solr/official-images,neo-technology/docker-official-images,31z4/official-images,robfrank/official-images,robfrank/official-images,davidl-zend/official-images,neo-technology/docker-official-images,infosiftr/stackbrew,docker-library/official-images,thresheek/official-images,neo-technology/docker-official-images,jperrin/official-images,neo-technology/docker-official-images,docker-library/official-images,docker-library/official-images,docker-library/official-images,infosiftr/stackbrew,chorrell/official-images,dinogun/official-images,31z4/official-images,chorrell/official-images,robfrank/official-images,docker-flink/official-images,jperrin/official-images,thresheek/official-images,docker-solr/official-images,davidl-zend/official-images,docker-flink/official-images,docker-library/official-images,dinogun/official-images,31z4/official-images,davidl-zend/official-images,davidl-zend/official-images,docker-solr/official-images,jperrin/official-images,jperrin/official-images,davidl-zend/official-images,jperrin/official-images,robfrank/official-images,chorrell/official-images,docker-flink/official-images,thresheek/official-images,31z4/official-images,docker-flink/official-images,jperrin/official-images,neo-technology/docker-official-images,docker-library/official-images,jperrin/official-images,infosiftr/stackbrew,dinogun/official-images,robfrank/official-images,jperrin/official-images,robfrank/official-images,robfrank/official-images,dinogun/official-images,31z4/official-images,31z4/official-images,docker-flink/official-images,chorrell/official-images,docker-library/official-images,davidl-zend/official-images,dinogun/official-images,infosiftr/stackbrew,docker-solr/official-images,dinogun/official-images,thresheek/official-images,docker-flink/official-images,jperrin/official-images,robfrank/official-images,docker-solr/official-images,thresheek/official-images,docker-solr/official-images,neo-technology/docker-official-images,thresheek/official-images,docker-solr/official-images,docker-solr/official-images,docker-library/official-images,robfrank/official-images,dinogun/official-images,jperrin/official-images,chorrell/official-images,infosiftr/stackbrew,infosiftr/stackbrew,dinogun/official-images,davidl-zend/official-images,31z4/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,31z4/official-images,dinogun/official-images,chorrell/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,davidl-zend/official-images,docker-solr/official-images,thresheek/official-images,davidl-zend/official-images,dinogun/official-images,chorrell/official-images,thresheek/official-images,infosiftr/stackbrew
|
e75201bb34f7e930c04e393ff630fdbc1549fd77
|
core/admin/migrations/versions/8f9ea78776f4_.py
|
core/admin/migrations/versions/8f9ea78776f4_.py
|
"""empty message
Revision ID: 8f9ea78776f4
Revises: 3b7eee912b41
Create Date: 2022-03-11 13:53:08.996055
"""
# revision identifiers, used by Alembic.
revision = '8f9ea78776f4'
down_revision = '3b7eee912b41'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('spam_mark_as_read', sa.Boolean(), nullable=False))
def downgrade():
op.drop_column('user', 'spam_mark_as_read')
|
"""empty message
Revision ID: 8f9ea78776f4
Revises: 3b7eee912b41
Create Date: 2022-03-11 13:53:08.996055
"""
# revision identifiers, used by Alembic.
revision = '8f9ea78776f4'
down_revision = '3b7eee912b41'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('spam_mark_as_read', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
op.drop_column('user', 'spam_mark_as_read')
|
Add default to column spam_mark_as_read
|
Add default to column spam_mark_as_read
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
9c44a1c97b1ba60fd7340c15dd82ebe1ceb6d8c3
|
icekit/project/settings/_production.py
|
icekit/project/settings/_production.py
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
MIDDLEWARE_CLASSES = (
('django.middleware.cache.UpdateCacheMiddleware', ) +
MIDDLEWARE_CLASSES +
('django.middleware.cache.FetchFromCacheMiddleware', )
)
TEMPLATES_DJANGO['OPTIONS']['loaders'] = [
(
'django.template.loaders.cached.Loader',
TEMPLATES_DJANGO['OPTIONS']['loaders'],
),
]
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
AWS_STORAGE_BUCKET_NAME = SITE_SLUG
ENABLE_S3_MEDIA = True
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
AWS_STORAGE_BUCKET_NAME = SITE_SLUG
ENABLE_S3_MEDIA = True
|
Disable cache middleware and template loader by default. Avoid premature optimisation.
|
Disable cache middleware and template loader by default. Avoid premature optimisation.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
345ac9264fbfab5860835b544d2c8951821fe522
|
mptt_urls/__init__.py
|
mptt_urls/__init__.py
|
# coding: utf-8
from django.utils.module_loading import import_string
def _load(module):
return import_string(module) if isinstance(module, str) else module
class view():
def __init__(self, model, view, slug_field):
self.model = _load(model)
self.view = _load(view)
self.slug_field = slug_field
# define 'get_path' method for model
self.model.get_path = lambda instance: '/'.join([getattr(item, slug_field) for item in instance.get_ancestors(include_self=True)])
def __call__(self, *args, **kwargs):
if 'path' not in kwargs:
raise ValueError('Path was not captured! Please capture it in your urlconf. Example: url(r\'^gallery/(?P<path>.*)\', mptt_urls.view(...), ...)')
instance = None # actual instance the path is pointing to (None by default)
path = kwargs['path']
instance_slug = path.split('/')[-1] # slug of the instance
if instance_slug:
candidates = self.model.objects.filter(**{self.slug_field: instance_slug}) # candidates to be the instance
for candidate in candidates:
# here we compare each candidate's path to the path passed to this view
if candidate.get_path() == path:
instance = candidate
break
kwargs['instance'] = instance
return self.view(*args, **kwargs)
|
# coding: utf-8
from django.utils.module_loading import import_string
def _load(module):
return import_string(module) if isinstance(module, str) else module
class view():
def __init__(self, model, view, slug_field):
self.model = _load(model)
self.view = _load(view)
self.slug_field = slug_field
# define 'get_path' method for model
self.model.get_path = lambda instance: '/'.join([getattr(item, slug_field) for item in instance.get_ancestors(include_self=True)]) + '/'
def __call__(self, *args, **kwargs):
if 'path' not in kwargs:
raise ValueError('Path was not captured! Please capture it in your urlconf. Example: url(r\'^gallery/(?P<path>.*)\', mptt_urls.view(...), ...)')
instance = None # actual instance the path is pointing to (None by default)
path = kwargs['path']
instance_slug = path.split('/')[-2] # slug of the instance
if instance_slug:
candidates = self.model.objects.filter(**{self.slug_field: instance_slug}) # candidates to be the instance
for candidate in candidates:
# here we compare each candidate's path to the path passed to this view
if candidate.get_path() == path:
instance = candidate
break
kwargs['instance'] = instance
return self.view(*args, **kwargs)
|
Add trailing slash to forward and reverse url resolutions
|
Add trailing slash to forward and reverse url resolutions
|
Python
|
mit
|
c0ntribut0r/django-mptt-urls,c0ntribut0r/django-mptt-urls
|
a244623642cdf26bd6615cdc7ff2540c9361d10d
|
tmapi/models/typed.py
|
tmapi/models/typed.py
|
from django.db import models
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
self.type = construct_type
self.save()
|
from django.db import models
from tmapi.exceptions import ModelConstraintException
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
if construct_type is None:
raise ModelConstraintException
self.type = construct_type
self.save()
|
Raise an exception when setting a construct's type to None.
|
Raise an exception when setting a construct's type to None.
|
Python
|
apache-2.0
|
ajenhl/django-tmapi
|
a4931218fbb00d646dbc8de6f8861e4647ef7ab4
|
lib/rapidsms/tests/test_backend_irc.py
|
lib/rapidsms/tests/test_backend_irc.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
|
Rename test class (sloppy cut n' paste job)
|
Rename test class (sloppy cut n' paste job)
|
Python
|
bsd-3-clause
|
catalpainternational/rapidsms,caktus/rapidsms,ken-muturi/rapidsms,rapidsms/rapidsms-core-dev,catalpainternational/rapidsms,caktus/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,rapidsms/rapidsms-core-dev,lsgunth/rapidsms,dimagi/rapidsms,ken-muturi/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,peterayeni/rapidsms,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,dimagi/rapidsms-core-dev,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,ken-muturi/rapidsms,unicefuganda/edtrac,dimagi/rapidsms-core-dev,eHealthAfrica/rapidsms,dimagi/rapidsms,catalpainternational/rapidsms,caktus/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,lsgunth/rapidsms
|
865651b0d23274d0dcbd9e3123ea9497a06172cf
|
docker_scripts/lib/common.py
|
docker_scripts/lib/common.py
|
# -*- coding: utf-8 -*-
import docker
import os
import sys
import requests
DEFAULT_TIMEOUT_SECONDS = 600
def docker_client():
# Default timeout 10 minutes
try:
timeout = int(os.getenv('DOCKER_TIMEOUT', 600))
except ValueError as e:
print("Provided timeout value: %s cannot be parsed as integer, exiting." %
os.getenv('DOCKER_TIMEOUT'))
sys.exit(1)
if not timeout > 0:
print(
"Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout)
sys.exit(1)
# Default base url for the connection
base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock')
try:
client = docker.Client(base_url=base_url, timeout=timeout)
except docker.errors.DockerException as e:
print("Error while creating the Docker client: %s" % e)
print(
"Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
if client and valid_docker_connection(client):
return client
else:
print(
"Could not connect to the Docker daemon, please make sure the Docker daemon is running.")
if os.environ.get('DOCKER_CONNECTION'):
print(
"If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
def valid_docker_connection(client):
try:
return client.ping()
except requests.exceptions.ConnectionError:
return False
|
# -*- coding: utf-8 -*-
import docker
import os
import sys
import requests
DEFAULT_TIMEOUT_SECONDS = 600
def docker_client():
# Default timeout 10 minutes
try:
timeout = int(os.getenv('DOCKER_TIMEOUT', 600))
except ValueError as e:
print("Provided timeout value: %s cannot be parsed as integer, exiting." %
os.getenv('DOCKER_TIMEOUT'))
sys.exit(1)
if not timeout > 0:
print(
"Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout)
sys.exit(1)
# Default base url for the connection
base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock')
try:
client = docker.AutoVersionClient(base_url=base_url, timeout=timeout)
except docker.errors.DockerException as e:
print("Error while creating the Docker client: %s" % e)
print(
"Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
if client and valid_docker_connection(client):
return client
else:
print(
"Could not connect to the Docker daemon, please make sure the Docker daemon is running.")
if os.environ.get('DOCKER_CONNECTION'):
print(
"If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
def valid_docker_connection(client):
try:
return client.ping()
except requests.exceptions.ConnectionError:
return False
|
Use AutoVersionClient to fix client incompatibity issues
|
Use AutoVersionClient to fix client incompatibity issues
Fixes #35
|
Python
|
mit
|
lichia/docker-scripts,jpopelka/docker-scripts,goldmann/docker-scripts,goldmann/docker-squash,TomasTomecek/docker-scripts
|
b888e6e6fac1a8dca0c8b64134de0380f9c5096b
|
mopidy_beets/__init__.py
|
mopidy_beets/__init__.py
|
from __future__ import unicode_literals
import os
from mopidy import ext, config
from mopidy.exceptions import ExtensionError
__version__ = '1.0.4'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def validate_environment(self):
try:
import requests # noqa
except ImportError as e:
raise ExtensionError('Library requests not found', e)
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
|
from __future__ import unicode_literals
import os
from mopidy import ext, config
__version__ = '1.0.4'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
|
Remove env check as Mopidy checks deps automatically
|
ext: Remove env check as Mopidy checks deps automatically
|
Python
|
mit
|
mopidy/mopidy-beets
|
9d7b39708a2abd953aedf6c211fe242d86caed78
|
pymacaroons/field_encryptors/secret_box_encryptor.py
|
pymacaroons/field_encryptors/secret_box_encryptor.py
|
from base64 import standard_b64encode, standard_b64decode
import nacl.bindings
import nacl.utils
from nacl.secret import SecretBox
from pymacaroons.field_encryptors.base_field_encryptor import (
BaseFieldEncryptor
)
from pymacaroons.utils import (
truncate_or_pad, convert_to_bytes, convert_to_string
)
class SecretBoxEncryptor(BaseFieldEncryptor):
def __init__(self, signifier=None, nonce=None):
super(SecretBoxEncryptor, self).__init__(
signifier=signifier or 'sbe::'
)
self.nonce = (nonce or
nacl.utils.random(nacl.bindings.crypto_secretbox_NONCEBYTES))
def encrypt(self, signature, field_data):
encrypt_key = truncate_or_pad(signature)
box = SecretBox(key=encrypt_key)
encrypted = box.encrypt(convert_to_bytes(field_data), nonce=self.nonce)
return self._signifier + standard_b64encode(encrypted)
def decrypt(self, signature, field_data):
key = truncate_or_pad(signature)
box = SecretBox(key=key)
encoded = convert_to_bytes(field_data[len(self.signifier):])
decrypted = box.decrypt(standard_b64decode(encoded))
return convert_to_string(decrypted)
|
from base64 import standard_b64encode, standard_b64decode
import nacl.bindings
import nacl.utils
from nacl.secret import SecretBox
from pymacaroons.field_encryptors.base_field_encryptor import (
BaseFieldEncryptor
)
from pymacaroons.utils import (
truncate_or_pad, convert_to_bytes, convert_to_string
)
class SecretBoxEncryptor(BaseFieldEncryptor):
def __init__(self, signifier=None, nonce=None):
super(SecretBoxEncryptor, self).__init__(
signifier=signifier or 'sbe::'
)
self.nonce = nonce or nacl.utils.random(
nacl.bindings.crypto_secretbox_NONCEBYTES
)
def encrypt(self, signature, field_data):
encrypt_key = truncate_or_pad(signature)
box = SecretBox(key=encrypt_key)
encrypted = box.encrypt(convert_to_bytes(field_data), nonce=self.nonce)
return self._signifier + standard_b64encode(encrypted)
def decrypt(self, signature, field_data):
key = truncate_or_pad(signature)
box = SecretBox(key=key)
encoded = convert_to_bytes(field_data[len(self.signifier):])
decrypted = box.decrypt(standard_b64decode(encoded))
return convert_to_string(decrypted)
|
Break how flake8 wants me to break
|
Break how flake8 wants me to break
|
Python
|
mit
|
matrix-org/pymacaroons,matrix-org/pymacaroons
|
158987eebbcd2d58270cf55cc42aa6e2e5738390
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._error import NullNameError
from ._error import InvalidCharError
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
Add imports for error classes
|
Add imports for error classes
|
Python
|
mit
|
thombashi/pathvalidate
|
b916f1e3ad294c780c782e93222f018fa57ee981
|
apps/bluebottle_utils/models.py
|
apps/bluebottle_utils/models.py
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField()
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
Allow country to be empty in Address model.
|
Allow country to be empty in Address model.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
bc8e064e41d43a4579c8111f1480b55e660ca186
|
pep8ify/fixes/fix_tabs.py
|
pep8ify/fixes/fix_tabs.py
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
Use globally define number of spaces.
|
Clean-up: Use globally define number of spaces.
|
Python
|
apache-2.0
|
spulec/pep8ify
|
64533459ffa6c33d4708bbc6ff0de9f295ef771d
|
permissions/tests/base.py
|
permissions/tests/base.py
|
from django.test import TestCase as BaseTestCase
from django.test import RequestFactory
from permissions import PermissionsRegistry as BasePermissionsRegistry
class PermissionsRegistry(BasePermissionsRegistry):
def _get_model_instance(self, model, **kwargs):
return model(**kwargs)
class Model:
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TestCase(BaseTestCase):
def setUp(self):
self.registry = PermissionsRegistry()
self.request_factory = RequestFactory()
|
from django.test import TestCase as BaseTestCase
from django.test import RequestFactory
from permissions import PermissionsRegistry as BasePermissionsRegistry
class PermissionsRegistry(BasePermissionsRegistry):
def _get_model_instance(self, model, **kwargs):
return model(**kwargs)
class Model:
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class User(Model):
def __init__(self, **kwargs):
kwargs.setdefault('permissions', [])
super(User, self).__init__(**kwargs)
def is_anonymous(self):
return False
class AnonymousUser(User):
def is_anonymous(self):
return True
class TestCase(BaseTestCase):
def setUp(self):
self.registry = PermissionsRegistry()
self.request_factory = RequestFactory()
|
Add mock User and Anonymous user classes for testing
|
Add mock User and Anonymous user classes for testing
|
Python
|
mit
|
PSU-OIT-ARC/django-perms,wylee/django-perms
|
80264289fa7fc4085069df011cd8499c64767dc5
|
elang/tools/pre-commit.py
|
elang/tools/pre-commit.py
|
#
# Pre-commit check
#
import pipes
import sys
def main():
git_pipe = pipes.Template()
git_pipe.prepend('git diff --cached --name-status', '.-')
diff_output = git_pipe.open('files', 'r')
lines = diff_output.readlines()
exit_code = 0
for line in lines:
line = line.rstrip();
if len(line) == 0:
continue
words = line.split()
if words[0] == 'D':
continue
cpplint_pipe = pipes.Template()
command_line = 'cpplint %(name)s 2>&1' % {'name': words[1]}
cpplint_pipe.prepend(command_line, '.-');
outputs = cpplint_pipe.open('files', 'r').readlines()
if outputs[len(outputs) - 1] == 'Total errors found: 0\n':
continue
exit_code = 1
for output in outputs:
output = output.rstrip()
print output
diff_output.close()
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
#
# Pre-commit check
#
import pipes
import sys
EXCLUDE_DIRS = [
'base/',
'build/',
'testing/',
'third_party/',
]
def shouldCheck(path):
for dir in EXCLUDE_DIRS:
if path.startswith(dir):
return False
return True
def main():
git_pipe = pipes.Template()
git_pipe.prepend('git diff --cached --name-status', '.-')
diff_output = git_pipe.open('files', 'r')
lines = diff_output.readlines()
exit_code = 0
for line in lines:
line = line.rstrip();
if len(line) == 0:
continue
words = line.split()
if words[0] == 'D':
continue
cpplint_pipe = pipes.Template()
if not shouldCheck(words[1]):
continue
command_line = 'cpplint %(name)s 2>&1' % {'name': words[1]}
cpplint_pipe.prepend(command_line, '.-');
outputs = cpplint_pipe.open('files', 'r').readlines()
if outputs[len(outputs) - 1] == 'Total errors found: 0\n':
continue
exit_code = 1
for output in outputs:
output = output.rstrip()
print output
diff_output.close()
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
Exclude base/, build/, third_party/ and tools/ from cpplint.
|
tools: Exclude base/, build/, third_party/ and tools/ from cpplint.
|
Python
|
apache-2.0
|
eval1749/elang,eval1749/elang,eval1749/elang,eval1749/elang,eval1749/elang
|
cefaa6c8f0fd3c26be2bf6fba75d01b2f5095a34
|
strapmin/widgets.py
|
strapmin/widgets.py
|
from django import forms
from django.forms.util import flatatt
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
class RichTextEditorWidget(forms.Textarea):
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
|
from django import forms
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
try:
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
class RichTextEditorWidget(forms.Textarea):
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
|
Fix flatatt import path for Django 1.9
|
Fix flatatt import path for Django 1.9
|
Python
|
bsd-2-clause
|
knyghty/strapmin,knyghty/strapmin,knyghty/strapmin
|
61909686132143db127deb31d582e44f5b198729
|
project/gym_app/admin.py
|
project/gym_app/admin.py
|
from django.contrib import admin
from gym_app.models import Task, Athlete, PersonalTrainer, BodyScreening, WorkoutPlan, Tracker, MailBox, Message
from django.contrib.auth.models import Permission
# Register your models here.
admin.site.register(Task)
admin.site.register(Permission)
admin.site.register(Athlete)
admin.site.register(PersonalTrainer)
admin.site.register(BodyScreening)
admin.site.register(WorkoutPlan)
admin.site.register(Tracker)
admin.site.register(MailBox)
admin.site.register(Message)
|
from django.contrib import admin
from gym_app.models import Task
from django.contrib.auth.models import Permission
# Register your models here.
admin.site.register(Task)
admin.site.register(Permission)
|
Remove permission to edit tables
|
Remove permission to edit tables
|
Python
|
mit
|
brunoliveira8/managyment,brunoliveira8/managyment,brunoliveira8/managyment
|
752cc60f8c41d098ff8772ed400ac3ae209d9e0f
|
lib/exp/featx/__init__.py
|
lib/exp/featx/__init__.py
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.prepare import Prepare
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
pp = Prepare(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(pp.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
Use `reducer` to replace `prepare`
|
Use `reducer` to replace `prepare`
|
Python
|
agpl-3.0
|
speed-of-light/pyslider
|
159e1e210480c0037b3a550e70b77dbfce34bbca
|
ptyme/ptyme.py
|
ptyme/ptyme.py
|
#!/bin/env python3
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
print(time)
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
house = 0
time = time[0].split('m')
print(time)
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
time = time[0].split('s')
if time:
seconds = time[0]
else:
print("commands go here")
if __name__ == "__main__":
main()
|
#!/bin/env python3
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
if __name__ == "__main__":
main()
|
Update current parse, add alt parse frame
|
Update current parse, add alt parse frame
|
Python
|
mit
|
jabocg/ptyme
|
dff5a8650c5d7ed5b5bab12b36ac5d61541dbb4e
|
python/day3.py
|
python/day3.py
|
import sys
def read_sides(line):
return map(int, line.split())
def valid_triangle((a, b, c)):
return a + b > c and b + c > a and a + c > b
if __name__ == '__main__':
print len(filter(valid_triangle, map(read_sides, sys.stdin)))
|
import sys
import itertools
def grouper(iterable, n, fillvalue=None):
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def transpose(xs):
return zip(*xs)
def read_sides(line):
return map(int, line.split())
def valid_triangle((a, b, c)):
return a + b > c and b + c > a and a + c > b
if __name__ == '__main__':
print len(filter(valid_triangle,
grouper(
itertools.chain.from_iterable(
transpose(map(read_sides, sys.stdin))), 3)))
|
Implement part 2 of day 3 Python solution.
|
Implement part 2 of day 3 Python solution.
|
Python
|
mit
|
jonathanj/advent2016
|
65f0ef0ae523059db6ecf3856c9e2695cad81d38
|
montage/__init__.py
|
montage/__init__.py
|
from commands import *
from wrappers import *
__version__ = '0.9.1'
|
from commands import *
from wrappers import *
__version__ = '0.9.1'
# Check whether Montage is installed
installed = False
for dir in os.environ['PATH'].split(':'):
if os.path.exists(dir + '/mProject'):
installed = True
break
if not installed:
raise Exception("Montage commands are not in your PATH")
|
Check whether Montage commands are available
|
Check whether Montage commands are available
|
Python
|
bsd-3-clause
|
astrofrog/montage-wrapper,astrofrog/python-montage,astrofrog/montage-wrapper,jat255/montage-wrapper,vterron/montage-wrapper,astropy/montage-wrapper
|
a2d77c167ea8ae3a62183a56b10cd121dc476481
|
openfisca_france/conf/cache_blacklist.py
|
openfisca_france/conf/cache_blacklist.py
|
# When using openfisca for a large population, having too many variables in cache make openfisca performances drop.
# The following variables are intermadiate results and do not need to be cached in those usecases.
cache_blacklist = set([
'aide_logement_loyer_retenu',
'aide_logement_charges',
'aide_logement_R0',
'aide_logement_taux_famille',
'aide_logement_taux_loyer',
'aide_logement_participation_personelle'
])
|
# When using openfisca for a large population, having too many variables in cache make openfisca performances drop.
# The following variables are intermadiate results and do not need to be cached in those usecases.
cache_blacklist = set([
'aide_logement_loyer_retenu',
'aide_logement_charges',
'aide_logement_R0',
'aide_logement_taux_famille',
'aide_logement_taux_loyer',
'aide_logement_participation_personelle',
'aide_logement_loyer_seuil_degressivite',
'aide_logement_loyer_seuil_suppression',
'aide_logement_montant_brut_avant_degressivite',
])
|
Add intermediary variables in cache blacklist
|
Add intermediary variables in cache blacklist
|
Python
|
agpl-3.0
|
antoinearnoud/openfisca-france,antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france
|
7a453a7c289bf72a84e69d5f69389afa2f492588
|
winthrop/__init__.py
|
winthrop/__init__.py
|
__version_info__ = (0, 6, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (0, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
Bump version number on dev to 0.7
|
Bump version number on dev to 0.7
|
Python
|
apache-2.0
|
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
|
f34330817414f72494aec359c169e5d6d9d1568f
|
examples/quotes/quotes.py
|
examples/quotes/quotes.py
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
my_name = input("Name: ")
nw0.advertise(my_name)
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
nw0.advertise(my_name, address_pattern)
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
if __name__ == '__main__':
main(*sys.argv[1:])
|
Allow for a wildcard address
|
Allow for a wildcard address
|
Python
|
mit
|
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
|
6fe391b2e2f9b88a6835a6636a5d58810852ab5e
|
pyhole/tests/test_log.py
|
pyhole/tests/test_log.py
|
# Copyright 2011-2016 Josh Kearney
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger(name="test")
except SystemExit:
logger.setup_logger(name="test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
# Copyright 2011-2016 Josh Kearney
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger("test")
except SystemExit:
logger.setup_logger("test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
Use setup_logger properly in tests.
|
Use setup_logger properly in tests.
|
Python
|
apache-2.0
|
jk0/pyhole,jk0/pyhole,jk0/pyhole
|
f868a9181d659c2440a50e6e325ad2ae5b99f5c8
|
project_recalculate/models/resource_calendar.py
|
project_recalculate/models/resource_calendar.py
|
# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
|
# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
|
Define UTC as tz in get_working_days_of_date method
|
[FIX] Define UTC as tz in get_working_days_of_date method
|
Python
|
agpl-3.0
|
Endika/project,NeovaHealth/project-service,OCA/project-service,Antiun/project,eezee-it/project-service,Antiun/project-service,acsone/project-service,dreispt/project-service,xpansa/project-service,ddico/project,acsone/project,akretion/project-service,sergiocorato/project-service,dreispt/project
|
1a830d0581f2baed76cb48eeee5f32d465737657
|
src/artgraph/plugins/infobox.py
|
src/artgraph/plugins/infobox.py
|
from artgraph.node import NodeTypes
from artgraph.plugins import Plugin
class InfoboxPlugin(Plugin):
def __init__(self, node):
self._node = node
@staticmethod
def get_target_node_type():
return NodeTypes.ARTIST
def get_nodes(self):
from artgraph.node import Node, NodeTypes
from artgraph.relationship import AssociatedActRelationship
relationships = []
wikicode = self.get_wikicode(self._node.get_dbtitle())
if wikicode:
templates = wikicode.filter_templates()
for t in templates:
if t.name.matches('Infobox musical artist'):
# Fill in current node info
if t.has('birth_name'):
name = str(t.get('birth_name').value)
db = self.get_artistgraph_connection()
cursor = db.cursor()
cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id()))
db.commit()
db.close()
associated_acts = t.get('associated_acts')
for w in associated_acts.value.filter_wikilinks():
relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST)))
return relationships
|
from artgraph.node import NodeTypes
from artgraph.plugins import Plugin
class InfoboxPlugin(Plugin):
def __init__(self, node):
self._node = node
@staticmethod
def get_target_node_type():
return NodeTypes.ARTIST
def get_nodes(self):
from artgraph.node import Node, NodeTypes
from artgraph.relationship import AssociatedActRelationship
relationships = []
wikicode = self.get_wikicode(self._node.get_dbtitle())
if wikicode:
templates = wikicode.filter_templates()
for t in templates:
if t.name.matches('Infobox musical artist'):
# Fill in current node info
if t.has('birth_name'):
name = str(t.get('birth_name').value)
db = self.get_artistgraph_connection()
cursor = db.cursor()
cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id()))
db.commit()
db.close()
if not t.has('associated_acts'):
continue
associated_acts = t.get('associated_acts')
for w in associated_acts.value.filter_wikilinks():
relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST)))
return relationships
|
Check if there are associated acts before querying for them
|
Check if there are associated acts before querying for them
|
Python
|
mit
|
dMaggot/ArtistGraph
|
a5441719e8f12cc16189b2ca110c878decfed120
|
airflow/operators/mysql_operator.py
|
airflow/operators/mysql_operator.py
|
import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
*args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(self.sql, parameters=self.parameters)
|
import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
autocommit=False, *args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
|
Allow auto-commit option for Mysql Operator
|
Allow auto-commit option for Mysql Operator
|
Python
|
apache-2.0
|
jlowin/airflow,lxneng/incubator-airflow,cjqian/incubator-airflow,dhuang/incubator-airflow,gritlogic/incubator-airflow,forevernull/incubator-airflow,modsy/incubator-airflow,jesusfcr/airflow,ronfung/incubator-airflow,nathanielvarona/airflow,holygits/incubator-airflow,akosel/incubator-airflow,wxiang7/airflow,fenglu-g/incubator-airflow,NielsZeilemaker/incubator-airflow,mtagle/airflow,gtoonstra/airflow,holygits/incubator-airflow,ronfung/incubator-airflow,sekikn/incubator-airflow,subodhchhabra/airflow,wxiang7/airflow,malmiron/incubator-airflow,Twistbioscience/incubator-airflow,jbhsieh/incubator-airflow,NielsZeilemaker/incubator-airflow,zack3241/incubator-airflow,Tagar/incubator-airflow,jfantom/incubator-airflow,modsy/incubator-airflow,juvoinc/airflow,saguziel/incubator-airflow,cademarkegard/airflow,d-lee/airflow,danielvdende/incubator-airflow,Acehaidrey/incubator-airflow,artwr/airflow,artwr/airflow,vijaysbhat/incubator-airflow,sid88in/incubator-airflow,lxneng/incubator-airflow,Twistbioscience/incubator-airflow,wndhydrnt/airflow,lxneng/incubator-airflow,wileeam/airflow,zack3241/incubator-airflow,holygits/incubator-airflow,jwi078/incubator-airflow,aminghadersohi/airflow,gilt/incubator-airflow,ronfung/incubator-airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,Twistbioscience/incubator-airflow,cademarkegard/airflow,airbnb/airflow,mattuuh7/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,alexvanboxel/airflow,gilt/incubator-airflow,hamedhsn/incubator-airflow,mtdewulf/incubator-airflow,KL-WLCR/incubator-airflow,plypaul/airflow,skudriashev/incubator-airflow,wolfier/incubator-airflow,mylons/incubator-airflow,mattuuh7/incubator-airflow,DEVELByte/incubator-airflow,dud225/incubator-airflow,DEVELByte/incubator-airflow,dhuang/incubator-airflow,griffinqiu/airflow,stverhae/incubator-airflow,zodiac/incubator-airflow,dmitry-r/incubator-airflow,andyxhadji/incubator-airflow,nathanielvarona/airflow,r39132/airflow,andrewmchen/incubator-airflow,andyxhadji/incubator-airflow,Fokko/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,DinoCow/airflow,bolkedebruin/airflow,easytaxibr/airflow,hamedhsn/incubator-airflow,vijaysbhat/incubator-airflow,jhsenjaliya/incubator-airflow,MortalViews/incubator-airflow,owlabs/incubator-airflow,dud225/incubator-airflow,dud225/incubator-airflow,caseyching/incubator-airflow,N3da/incubator-airflow,aminghadersohi/airflow,asnir/airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,jfantom/incubator-airflow,yiqingj/airflow,mrares/incubator-airflow,kerzhner/airflow,mrkm4ntr/incubator-airflow,jiwang576/incubator-airflow,bolkedebruin/airflow,wndhydrnt/airflow,dud225/incubator-airflow,ronfung/incubator-airflow,spektom/incubator-airflow,ty707/airflow,AllisonWang/incubator-airflow,gilt/incubator-airflow,caseyching/incubator-airflow,ty707/airflow,Chedi/airflow,yiqingj/airflow,wolfier/incubator-airflow,vineet-rh/incubator-airflow,hgrif/incubator-airflow,aminghadersohi/airflow,zoyahav/incubator-airflow,criccomini/airflow,jesusfcr/airflow,mylons/incubator-airflow,mrares/incubator-airflow,preete-dixit-ck/incubator-airflow,biln/airflow,OpringaoDoTurno/airflow,DinoCow/airflow,dgies/incubator-airflow,plypaul/airflow,jlowin/airflow,neovintage/airflow,edgarRd/incubator-airflow,skudriashev/incubator-airflow,nathanielvarona/airflow,lyft/incubator-airflow,sdiazb/airflow,DEVELByte/incubator-airflow,CloverHealth/airflow,moritzpein/airflow,saguziel/incubator-airflow,modsy/incubator-airflow,andrewmchen/incubator-airflow,wileeam/airflow,owlabs/incubator-airflow,rishibarve/incubator-airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,yiqingj/airflow,wooga/airflow,forevernull/incubator-airflow,sdiazb/airflow,wndhydrnt/airflow,MetrodataTeam/incubator-airflow,artwr/airflow,ledsusop/airflow,sergiohgz/incubator-airflow,mtustin-handy/airflow,sid88in/incubator-airflow,cfei18/incubator-airflow,zodiac/incubator-airflow,cfei18/incubator-airflow,Acehaidrey/incubator-airflow,edgarRd/incubator-airflow,zoyahav/incubator-airflow,skudriashev/incubator-airflow,dmitry-r/incubator-airflow,malmiron/incubator-airflow,Fokko/incubator-airflow,mrares/incubator-airflow,sekikn/incubator-airflow,btallman/incubator-airflow,N3da/incubator-airflow,dgies/incubator-airflow,easytaxibr/airflow,RealImpactAnalytics/airflow,andyxhadji/incubator-airflow,lyft/incubator-airflow,cfei18/incubator-airflow,nathanielvarona/airflow,sdiazb/airflow,CloverHealth/airflow,mtdewulf/incubator-airflow,criccomini/airflow,saguziel/incubator-airflow,fenglu-g/incubator-airflow,asnir/airflow,apache/incubator-airflow,jbhsieh/incubator-airflow,Acehaidrey/incubator-airflow,yati-sagade/incubator-airflow,vineet-rh/incubator-airflow,yati-sagade/incubator-airflow,mrkm4ntr/incubator-airflow,adamhaney/airflow,brandsoulmates/incubator-airflow,kerzhner/airflow,mtagle/airflow,mistercrunch/airflow,adrpar/incubator-airflow,apache/incubator-airflow,zodiac/incubator-airflow,ProstoMaxim/incubator-airflow,NielsZeilemaker/incubator-airflow,stverhae/incubator-airflow,plypaul/airflow,DinoCow/airflow,yoziru-desu/airflow,Tagar/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,jwi078/incubator-airflow,janczak10/incubator-airflow,Acehaidrey/incubator-airflow,aminghadersohi/airflow,hgrif/incubator-airflow,danielvdende/incubator-airflow,Twistbioscience/incubator-airflow,andyxhadji/incubator-airflow,moritzpein/airflow,mistercrunch/airflow,asnir/airflow,plypaul/airflow,d-lee/airflow,sergiohgz/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,yk5/incubator-airflow,sdiazb/airflow,dhuang/incubator-airflow,ledsusop/airflow,mtdewulf/incubator-airflow,malmiron/incubator-airflow,N3da/incubator-airflow,rishibarve/incubator-airflow,gritlogic/incubator-airflow,brandsoulmates/incubator-airflow,yiqingj/airflow,criccomini/airflow,vineet-rh/incubator-airflow,stverhae/incubator-airflow,jfantom/incubator-airflow,CloverHealth/airflow,subodhchhabra/airflow,wndhydrnt/airflow,wileeam/airflow,ProstoMaxim/incubator-airflow,btallman/incubator-airflow,MortalViews/incubator-airflow,mtustin-handy/airflow,biln/airflow,MortalViews/incubator-airflow,btallman/incubator-airflow,apache/incubator-airflow,wileeam/airflow,adrpar/incubator-airflow,hgrif/incubator-airflow,airbnb/airflow,r39132/airflow,brandsoulmates/incubator-airflow,neovintage/airflow,opensignal/airflow,gritlogic/incubator-airflow,apache/airflow,biln/airflow,gilt/incubator-airflow,N3da/incubator-airflow,biln/airflow,zack3241/incubator-airflow,danielvdende/incubator-airflow,cademarkegard/airflow,Acehaidrey/incubator-airflow,Tagar/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,yati-sagade/incubator-airflow,juvoinc/airflow,bolkedebruin/airflow,asnir/airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,KL-WLCR/incubator-airflow,danielvdende/incubator-airflow,fenglu-g/incubator-airflow,cfei18/incubator-airflow,mrkm4ntr/incubator-airflow,juvoinc/airflow,griffinqiu/airflow,wooga/airflow,OpringaoDoTurno/airflow,apache/airflow,yoziru-desu/airflow,mattuuh7/incubator-airflow,spektom/incubator-airflow,modsy/incubator-airflow,saguziel/incubator-airflow,NielsZeilemaker/incubator-airflow,AllisonWang/incubator-airflow,gtoonstra/airflow,mylons/incubator-airflow,Tagar/incubator-airflow,vijaysbhat/incubator-airflow,d-lee/airflow,jlowin/airflow,jbhsieh/incubator-airflow,nathanielvarona/airflow,andrewmchen/incubator-airflow,DinoCow/airflow,holygits/incubator-airflow,KL-WLCR/incubator-airflow,cjqian/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,criccomini/airflow,akosel/incubator-airflow,mattuuh7/incubator-airflow,MetrodataTeam/incubator-airflow,bolkedebruin/airflow,r39132/airflow,lyft/incubator-airflow,d-lee/airflow,apache/airflow,brandsoulmates/incubator-airflow,Fokko/incubator-airflow,mtustin-handy/airflow,sergiohgz/incubator-airflow,wxiang7/airflow,neovintage/airflow,jesusfcr/airflow,caseyching/incubator-airflow,edgarRd/incubator-airflow,lyft/incubator-airflow,griffinqiu/airflow,adrpar/incubator-airflow,ProstoMaxim/incubator-airflow,sekikn/incubator-airflow,gritlogic/incubator-airflow,sergiohgz/incubator-airflow,ProstoMaxim/incubator-airflow,adamhaney/airflow,hamedhsn/incubator-airflow,yoziru-desu/airflow,subodhchhabra/airflow,airbnb/airflow,gtoonstra/airflow,Chedi/airflow,sid88in/incubator-airflow,AllisonWang/incubator-airflow,vijaysbhat/incubator-airflow,jwi078/incubator-airflow,zoyahav/incubator-airflow,caseyching/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,cademarkegard/airflow,griffinqiu/airflow,preete-dixit-ck/incubator-airflow,danielvdende/incubator-airflow,CloverHealth/airflow,adamhaney/airflow,rishibarve/incubator-airflow,mtagle/airflow,juvoinc/airflow,skudriashev/incubator-airflow,OpringaoDoTurno/airflow,stverhae/incubator-airflow,jgao54/airflow,forevernull/incubator-airflow,RealImpactAnalytics/airflow,janczak10/incubator-airflow,preete-dixit-ck/incubator-airflow,btallman/incubator-airflow,andrewmchen/incubator-airflow,mtustin-handy/airflow,janczak10/incubator-airflow,jfantom/incubator-airflow,wooga/airflow,hamedhsn/incubator-airflow,cfei18/incubator-airflow,malmiron/incubator-airflow,dhuang/incubator-airflow,mtdewulf/incubator-airflow,yk5/incubator-airflow,jhsenjaliya/incubator-airflow,ty707/airflow,ty707/airflow,jiwang576/incubator-airflow,Chedi/airflow,danielvdende/incubator-airflow,nathanielvarona/airflow,hgrif/incubator-airflow,gtoonstra/airflow,wooga/airflow,DEVELByte/incubator-airflow,jwi078/incubator-airflow,bolkedebruin/airflow,wolfier/incubator-airflow,wolfier/incubator-airflow,dgies/incubator-airflow,yati-sagade/incubator-airflow,vineet-rh/incubator-airflow,sekikn/incubator-airflow,kerzhner/airflow,zack3241/incubator-airflow,RealImpactAnalytics/airflow,artwr/airflow,AllisonWang/incubator-airflow,jgao54/airflow,sid88in/incubator-airflow,jgao54/airflow,jbhsieh/incubator-airflow,dmitry-r/incubator-airflow,mrkm4ntr/incubator-airflow,alexvanboxel/airflow,jlowin/airflow,akosel/incubator-airflow,MortalViews/incubator-airflow,subodhchhabra/airflow,moritzpein/airflow,KL-WLCR/incubator-airflow,apache/airflow,apache/airflow,mtagle/airflow,easytaxibr/airflow,owlabs/incubator-airflow,mistercrunch/airflow,easytaxibr/airflow,ledsusop/airflow,janczak10/incubator-airflow,owlabs/incubator-airflow,apache/airflow,ledsusop/airflow,MetrodataTeam/incubator-airflow,zodiac/incubator-airflow,kerzhner/airflow,lxneng/incubator-airflow,adrpar/incubator-airflow,Chedi/airflow,opensignal/airflow,rishibarve/incubator-airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,wxiang7/airflow,yoziru-desu/airflow,apache/incubator-airflow,r39132/airflow,fenglu-g/incubator-airflow,mylons/incubator-airflow,spektom/incubator-airflow,akosel/incubator-airflow,zoyahav/incubator-airflow,jgao54/airflow,yk5/incubator-airflow,edgarRd/incubator-airflow,neovintage/airflow,spektom/incubator-airflow,moritzpein/airflow,preete-dixit-ck/incubator-airflow,forevernull/incubator-airflow
|
9f5f59783fa29cf376f975864af1b3a9325cadd0
|
tests/test_utils.py
|
tests/test_utils.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import types
from os.path import abspath, dirname, join
from epubcheck import utils, samples
TEST_DIR = abspath(dirname(samples.__file__))
def test_utils_java_version():
assert utils.java_version().startswith('java version')
def test_epubcheck_help():
assert 'listChecks' in utils.epubcheck_help()
def test_epubcheck_version():
assert utils.epubcheck_version().startswith('EpubCheck v4.0.1')
def test_iter_files_simple():
gen = utils.iter_files(TEST_DIR, ['py'])
assert isinstance(gen, types.GeneratorType)
assert len(list(gen)) == 1
def test_iter_files_no_matches():
gen = utils.iter_files(TEST_DIR, ['noext'])
assert len(list(gen)) == 0
def test_iter_files_flat():
gen = utils.iter_files(TEST_DIR, ['epub'])
assert len(list(gen)) == 2
gen = utils.iter_files(TEST_DIR, ['EPUB'])
assert len(list(gen)) == 2
def test_iter_files_recursive():
gen = utils.iter_files(join('../', TEST_DIR), ['epub'], recursive=True)
assert len(list(gen)) == 2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import types
from os.path import abspath, dirname, join
from epubcheck import utils, samples
TEST_DIR = abspath(dirname(samples.__file__))
def test_utils_java_version():
assert utils.java_version().startswith('java version')
def test_epubcheck_help():
assert 'listChecks' in utils.epubcheck_help()
def test_epubcheck_version():
assert utils.epubcheck_version().startswith('EPUBCheck v4.2.1')
def test_iter_files_simple():
gen = utils.iter_files(TEST_DIR, ['py'])
assert isinstance(gen, types.GeneratorType)
assert len(list(gen)) == 1
def test_iter_files_no_matches():
gen = utils.iter_files(TEST_DIR, ['noext'])
assert len(list(gen)) == 0
def test_iter_files_flat():
gen = utils.iter_files(TEST_DIR, ['epub'])
assert len(list(gen)) == 2
gen = utils.iter_files(TEST_DIR, ['EPUB'])
assert len(list(gen)) == 2
def test_iter_files_recursive():
gen = utils.iter_files(join('../', TEST_DIR), ['epub'], recursive=True)
assert len(list(gen)) == 2
|
Fix unit test failing on case sensitivity
|
Fix unit test failing on case sensitivity
|
Python
|
bsd-2-clause
|
titusz/epubcheck
|
21d9b2f89a7eb9a6801a48c2586cc360e6be47c3
|
LTA_to_UVFITS.py
|
LTA_to_UVFITS.py
|
def lta_to_uvfits():
lta_files = glob.glob('*.lta*')
#flag_files = glob.glob('*.FLAGS*')
for i in range(len(lta_files)):
lta_file_name = lta_files[i]
uvfits_file_name = lta_file_name +'.UVFITS'
spam.convert_lta_to_uvfits( lta_file_name, uvfits_file_name )
|
def lta_to_uvfits():
lta_files = glob.glob('*.lta*')
#flag_files = glob.glob('*.FLAGS*')
for i in range(len(lta_files)):
lta_file_name = lta_files[i]
uvfits_file_name = lta_file_name +'.UVFITS'
spam.convert_lta_to_uvfits( lta_file_name, uvfits_file_name )
return lta_files
|
Return LTA files to use as argument in main thread code
|
Return LTA files to use as argument in main thread code
|
Python
|
mit
|
NCRA-TIFR/gadpu,NCRA-TIFR/gadpu
|
dee7b02d0cdd6969b4228086ab9af77ad1da60ef
|
asymmetric_jwt_auth/models.py
|
asymmetric_jwt_auth/models.py
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_pem_public_key, load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
is_valid = False
exc = None
for load in (load_pem_public_key, load_ssh_public_key):
if not is_valid:
try:
load(value.encode('utf-8'), default_backend())
is_valid = True
except Exception as e:
exc = e
if not is_valid:
raise ValidationError('Public key is invalid: %s' % exc)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
Allow PEM format keys through validation
|
Allow PEM format keys through validation
|
Python
|
isc
|
crgwbr/asymmetric_jwt_auth,crgwbr/asymmetric_jwt_auth
|
f03a8fa100aeb5944402ae3b2761c0373fb6a29d
|
como/settings.py
|
como/settings.py
|
# -*- coding: utf-8 -*-
"""
como.settings - some global variables
"""
import os
from paxo.util import DEBUG_MODE
LOCATION_CODES = [
'1C', '2Z', '4H', '5K', '8H', '5D', '7J', 'CK', 'E', 'EE',
'F', 'FC', 'G8', 'GQ', 'PT', 'CY', 'QT', 'QP', 'RN', 'RM',
'SG', 'UV', 'U2', 'V7', 'VM', 'W8', 'WQ', 'XA', 'XB', 'YM'
]
DEV_URL = 'http://127.0.0.1:5000'
REAL_URL = 'http://como.cwoebker.com'
COMO_BATTERY_FILE = os.path.expanduser('~/.como')
if DEBUG_MODE:
SERVER_URL = DEV_URL
else:
SERVER_URL = REAL_URL
|
# -*- coding: utf-8 -*-
"""
como.settings - some global variables
"""
import os
from paxo.util import DEBUG_MODE
LOCATION_CODES = [
'1C', '2Z', '4H', '5K', '8H', '5D', '7J', 'CK', 'E', 'EE',
'F', 'FC', 'G8', 'GQ', 'PT', 'CY', 'QT', 'QP', 'RN', 'RM',
'SG', 'UV', 'U2', 'V7', 'VM', 'W8', 'WQ', 'XA', 'XB', 'YM'
]
DEV_URL = 'http://127.0.0.1:5000'
REAL_URL = 'https://como.cwoebker.com'
COMO_BATTERY_FILE = os.path.expanduser('~/.como')
if DEBUG_MODE:
SERVER_URL = DEV_URL
else:
SERVER_URL = REAL_URL
|
Update server url with https
|
Update server url with https
|
Python
|
bsd-3-clause
|
cwoebker/como
|
5aef0b64477248f6cdadfd864a6d05cbc6939f09
|
trex/serializers.py
|
trex/serializers.py
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <[email protected]>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import HyperlinkedModelSerializer
from trex.models.project import Project, Entry
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("name", "description", "active", "created", "entries")
class EntryDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Entry
fields = ("date", "duration", "description", "state", "user", "created")
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <[email protected]>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("name", "description", "active", "created", "entries")
class EntryDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Entry
fields = ("date", "duration", "description", "state", "user", "created")
|
Use HyperlinkedIdentityField for entries in ProjectDetailSerializer
|
Use HyperlinkedIdentityField for entries in ProjectDetailSerializer
|
Python
|
mit
|
bjoernricks/trex,bjoernricks/trex
|
90ab0bfbac851a52f0e48f5186a727692e699a6f
|
geodj/youtube.py
|
geodj/youtube.py
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'viewCount'
query.racy = 'exclude'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'duration': int(entry.media.duration.seconds),
})
return results
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
if entry.rating is not None and float(entry.rating.average) < 3:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in entry.media.title.text.lower():
return False
return True
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'viewCount'
query.racy = 'exclude'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': smart_str(artist), 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
if entry.rating is not None and float(entry.rating.average) < 3:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if smart_str(artist).lower() not in smart_str(entry.media.title.text).lower():
return False
return True
|
Use smart_str and include artist in results
|
Use smart_str and include artist in results
|
Python
|
mit
|
6/GeoDJ,6/GeoDJ
|
8815507f8e334238d269468e08c22f4415e58528
|
spacy/lang/es/__init__.py
|
spacy/lang/es/__init__.py
|
# coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class Spanish(Language):
lang = 'es'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
__all__ = ['Spanish']
|
# coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class SpanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
class Spanish(Language):
lang = 'es'
Defaults = SpanishDefaults
__all__ = ['Spanish']
|
Move SpanishDefaults out of Language class, for pickle
|
Move SpanishDefaults out of Language class, for pickle
|
Python
|
mit
|
recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy
|
13da95de0f2fb17ec6cbf005be33db3b3d348831
|
motivation_text/models.py
|
motivation_text/models.py
|
from django.db import models
from patient.models import Patient
from django.utils.encoding import smart_unicode
class MotivationText(models.Model):
patient = models.ForeignKey(Patient, null=False)
text = models.TextField(default='', blank=False)
time_created = models.DateTimeField(null=False, auto_now_add=True, auto_now=False)
def __unicode__(self):
return smart_unicode(
"Motivational text for "
+ self.patient.user.first_name + " " + self.patient.user.last_name
+ " created at " + str(self.time_created)
)
class Meta():
ordering = ['-id']
TEXT_INFORMATION = 'I'
TEXT_MOTIVATION = 'M'
TYPES = [
(TEXT_INFORMATION, 'InformationText'),
(TEXT_MOTIVATION, 'MotivationText'),
]
type = models.CharField(max_length=1, choices=TYPES, null=False, default='M')
|
from django.db import models
from patient.models import Patient
from django.utils.encoding import smart_unicode
class MotivationText(models.Model):
patient = models.ForeignKey(Patient, null=False)
text = models.TextField(default='', blank=False)
time_created = models.DateTimeField(null=False, auto_now_add=True, auto_now=False)
def __unicode__(self):
return smart_unicode(
("InformationText" if self.type == 'I' else 'MotivationText')
+ " for " + self.patient.user.get_full_name()
+ " created at " + str(self.time_created)
)
class Meta():
ordering = ['-id']
TEXT_INFORMATION = 'I'
TEXT_MOTIVATION = 'M'
TYPES = [
(TEXT_INFORMATION, 'InformationText'),
(TEXT_MOTIVATION, 'MotivationText'),
]
type = models.CharField(max_length=1, choices=TYPES, null=False, default='M')
|
Improve unicode for motivational texts
|
Improve unicode for motivational texts
|
Python
|
mit
|
sigurdsa/angelika-api
|
f42ba1bebb0e7f92222d8a66f94e2550b4dde9e1
|
helpers/custom_filters.py
|
helpers/custom_filters.py
|
import json
def strslice(s, length):
if not isinstance(s, basestring):
s = str(s)
return s[:length]
def urlencode(s):
if isinstance(s, unicode):
s = s.encode('utf-8')
import urllib
return urllib.quote(s)
def json_filter(data):
return json.dumps(data)
def datetimeformat(value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
filters = {
'strslice': strslice,
'urlencode': urlencode,
'json': json_filter,
'datetime': datetimeformat,
}
|
import json
def strslice(s, length):
if not isinstance(s, basestring):
s = str(s)
return s[:length]
def urlencode(s):
if isinstance(s, unicode):
s = s.encode('utf-8')
import urllib
return urllib.quote(s)
def json_filter(data):
return json.dumps(data)
def datetimeformat(value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
def neat_time(dt):
"""Return the time in dt as a neat string.
Examples:
>>> neat_time(time(7, 30))
7:30AM
>>> neat_time(time(14, 00))
2PM
"""
if dt.minute:
timestring = dt.strftime('%I:%M%p')
else:
timestring = dt.strftime('%I%p')
if timestring[0] == '0':
timestring = timestring[1:]
return timestring
filters = {
'strslice': strslice,
'urlencode': urlencode,
'json': json_filter,
'datetime': datetimeformat,
'neattime': neat_time,
}
|
Add a neattime custom filter to give me pretty times.
|
Add a neattime custom filter to give me pretty times.
|
Python
|
agpl-3.0
|
watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforamerica/Change-By-Us
|
3af22fd5583ee110f731b9e1ebecba67ebee2bd4
|
sendwithus/exceptions.py
|
sendwithus/exceptions.py
|
class SendwithusError(Exception):
"""Base class for Sendwithus API errors"""
class AuthenticationError(SendwithusError):
"""API Authentication Failed"""
class APIError(SendwithusError):
"""4xx - Invalid Request (Client error)"""
class ServerError(SendwithusError):
"""5xx - Failed Request (Server error)"""
|
class SendwithusError(Exception):
"""Base class for Sendwithus API errors"""
def __init__(self, content=None):
self.content = content
class AuthenticationError(SendwithusError):
"""API Authentication Failed"""
class APIError(SendwithusError):
"""4xx - Invalid Request (Client error)"""
class ServerError(SendwithusError):
"""5xx - Failed Request (Server error)"""
|
Add a constructor to SendwithusError that stores content
|
Add a constructor to SendwithusError that stores content
|
Python
|
apache-2.0
|
sendwithus/sendwithus_python
|
d13db6f3466629df7d3aa8d2dc7eb31a9ec1f8b4
|
app/settings.py
|
app/settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
# To generate: import os; os.urandom(24)
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = SERVER_NAME
API_TOKEN = 'some-api-token'
# Flat pages
FLATPAGES_ROOT = 'pages/flat'
FLATPAGES_EXTENSION = '.md'
FLATPAGES_MARKDOWN_EXTENSIONS = []
DATA_DIR = 'static/data'
class Dev(Default):
APP_NAME = project_name + ' (dev)'
DEBUG = True
SERVER_NAME = '0.0.0.0:5000'
API_SERVER = SERVER_NAME
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
# To generate: import os; os.urandom(24)
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = SERVER_NAME
API_TOKEN = 'some-api-token'
# Flat pages
FLATPAGES_ROOT = 'pages/flat'
FLATPAGES_EXTENSION = '.md'
FLATPAGES_MARKDOWN_EXTENSIONS = []
DATA_DIR = 'static/data'
class Dev(Default):
APP_NAME = project_name + ' (dev)'
DEBUG = True
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
Use localhost:5000 for the dev server by default.
|
Use localhost:5000 for the dev server by default.
|
Python
|
mit
|
peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag
|
93512f1837a4e72752d4ffa07ac49e1f2cd5a7f6
|
opensimplex_test.py
|
opensimplex_test.py
|
import random
import time
from PIL import Image # Depends on the Pillow lib
from opensimplex import OpenSimplexNoise
WIDTH = 512
HEIGHT = 512
FEATURE_SIZE = 24
def main():
random.seed(time.time())
seed = random.randint(0, 100000)
simplex = OpenSimplexNoise(seed)
im = Image.new('L', (WIDTH, HEIGHT))
for y in range(0, HEIGHT):
for x in range(0, WIDTH):
#value = simplex.noise2d(x / FEATURE_SIZE, y / FEATURE_SIZE)
value = simplex.noise2d(x * 0.05, y * 0.05)
color = int((value + 1) * 128)
im.putpixel((x, y), color)
im.show()
if __name__ == '__main__':
main()
|
from PIL import Image # Depends on the Pillow lib
from opensimplex import OpenSimplexNoise
WIDTH = 512
HEIGHT = 512
FEATURE_SIZE = 24
def main():
simplex = OpenSimplexNoise()
im = Image.new('L', (WIDTH, HEIGHT))
for y in range(0, HEIGHT):
for x in range(0, WIDTH):
#value = simplex.noise2d(x / FEATURE_SIZE, y / FEATURE_SIZE)
value = simplex.noise2d(x * 0.05, y * 0.05)
color = int((value + 1) * 128)
im.putpixel((x, y), color)
im.show()
if __name__ == '__main__':
main()
|
Use default seed for the test.
|
Use default seed for the test.
|
Python
|
mit
|
lmas/opensimplex,antiface/opensimplex
|
9249dc161e9fdd64e15a42f644232c43cb6875b2
|
src/dependenpy/plugins.py
|
src/dependenpy/plugins.py
|
# -*- coding: utf-8 -*-
"""dependenpy plugins module."""
try:
from archan import Provider, Argument, DSM as ArchanDSM
from .dsm import DSM as DependenpyDSM
from .helpers import guess_depth
class InternalDependencies(Provider):
"""Dependenpy provider for Archan."""
identifier = 'dependenpy.InternalDependencies'
name = 'Internal Dependencies'
description = 'Provide matrix data about internal dependencies ' \
'in a set of packages.'
arguments = (
Argument('packages', list, 'The list of packages to check for.'),
Argument('enforce_init', bool, default=True,
description='Whether to assert presence of '
'__init__.py files in directories.'),
Argument('depth', int, 'The depth of the matrix to generate.'),
)
def get_dsm(self, packages, enforce_init=True, depth=None):
"""
Provide matrix data for internal dependencies in a set of packages.
Args:
*packages (list): the list of packages to check for.
enforce_init (bool):
whether to assert presence of __init__.py files
in directories.
depth (int): the depth of the matrix to generate.
Returns:
archan.DSM: instance of archan DSM.
"""
dsm = DependenpyDSM(*packages, enforce_init=enforce_init)
if depth is None:
depth = guess_depth(packages)
matrix = dsm.as_matrix(depth=depth)
return ArchanDSM(data=matrix.data, entities=matrix.keys)
except ImportError:
class InternalDependencies(object):
"""Empty dependenpy provider."""
|
# -*- coding: utf-8 -*-
"""dependenpy plugins module."""
try:
from archan import Provider, Argument, DesignStructureMatrix as ArchanDSM
from .dsm import DSM as DependenpyDSM
from .helpers import guess_depth
class InternalDependencies(Provider):
"""Dependenpy provider for Archan."""
identifier = 'dependenpy.InternalDependencies'
name = 'Internal Dependencies'
description = 'Provide matrix data about internal dependencies ' \
'in a set of packages.'
argument_list = (
Argument('packages', list, 'The list of packages to check for.'),
Argument('enforce_init', bool, default=True,
description='Whether to assert presence of '
'__init__.py files in directories.'),
Argument('depth', int, 'The depth of the matrix to generate.'),
)
def get_data(self, packages, enforce_init=True, depth=None):
"""
Provide matrix data for internal dependencies in a set of packages.
Args:
*packages (list): the list of packages to check for.
enforce_init (bool):
whether to assert presence of __init__.py files
in directories.
depth (int): the depth of the matrix to generate.
Returns:
archan.DSM: instance of archan DSM.
"""
dsm = DependenpyDSM(*packages, enforce_init=enforce_init)
if depth is None:
depth = guess_depth(packages)
matrix = dsm.as_matrix(depth=depth)
return ArchanDSM(data=matrix.data, entities=matrix.keys)
except ImportError:
class InternalDependencies(object):
"""Empty dependenpy provider."""
|
Update archan provider for archan 3.0
|
Update archan provider for archan 3.0
|
Python
|
isc
|
Pawamoy/dependenpy,Pawamoy/dependenpy
|
5da820b85f9e55a54639856bdd698c35b866833c
|
fireplace/cards/gvg/neutral_epic.py
|
fireplace/cards/gvg/neutral_epic.py
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
def OWN_MINION_SUMMON(self, minion):
if minion.atk == 1:
return [Buff(minion, "GVG_104a")]
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
def OWN_CARD_PLAYED(self, card):
if card.type == CardType.MINION and card.atk == 1:
return [Buff(card, "GVG_104a")]
|
Fix Hobgoblin to trigger only on cards played
|
Fix Hobgoblin to trigger only on cards played
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,jleclanche/fireplace,liujimj/fireplace,Meerkov/fireplace,amw2104/fireplace,butozerca/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,NightKev/fireplace,smallnamespace/fireplace,Meerkov/fireplace,liujimj/fireplace,butozerca/fireplace,Ragowit/fireplace,beheh/fireplace,oftc-ftw/fireplace,amw2104/fireplace
|
48b2460c718af88e8140b108d4a9acd9258ade8c
|
gargoyle/__init__.py
|
gargoyle/__init__.py
|
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
__import__('pkg_resources').declare_namespace(__name__)
|
Change to vanilla namespace package
|
Change to vanilla namespace package
|
Python
|
apache-2.0
|
disqus/gutter,disqus/gutter,kalail/gutter,kalail/gutter,kalail/gutter
|
c083481eed1578551daab7ece2e34b3ff4aece82
|
accelerator/migrations/0044_add_sitetree_sidenav_toggle.py
|
accelerator/migrations/0044_add_sitetree_sidenav_toggle.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-03-20 18:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0043_remove_exclude_fields'),
]
operations = [
migrations.RemoveField(
model_name='programfamily',
name='side_navigation',
),
migrations.AddField(
model_name='programfamily',
name='use_site_tree_side_nav',
field=models.BooleanField(default=False, help_text='Show the new-style side navigation'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-03-20 18:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0043_remove_exclude_fields'),
]
help_text = 'Show the new-style side navigation'
operations = [
migrations.RemoveField(
model_name='programfamily',
name='side_navigation',
),
migrations.AddField(
model_name='programfamily',
name='use_site_tree_side_nav',
field=models.BooleanField(default=False,
help_text=help_text),
),
]
|
Fix style on migration - waste of time, but whatever
|
Fix style on migration - waste of time, but whatever
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
9cc4ce152ba8d683db1371a963dae2621f1b8fc6
|
dbaas/dbaas/celeryconfig.py
|
dbaas/dbaas/celeryconfig.py
|
import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
|
import os
from django.conf import settings
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://{}:{}/0'.format(settings.REDIS_HOST, REDIS_PORT))
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
|
Fix celery config to respect the host configured on settings
|
Fix celery config to respect the host configured on settings
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
a8ec60daaee52603a1c3bab879a5eee9f0fd931b
|
ddd/dataobjects/datatype.py
|
ddd/dataobjects/datatype.py
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Python
|
mit
|
toesus/ddd,Sauci/ddd,toesus/ddd,Sauci/ddd,Sauci/ddd
|
6bd59ef149ec32f7d0a64ea2ad50a2729aceb6f5
|
fluent_contents/models/mixins.py
|
fluent_contents/models/mixins.py
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
collector_result = super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
return collector_result
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Python
|
apache-2.0
|
edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents
|
a88f0ab8be669f573f2869617717653bb008e11b
|
sheldon/bot.py
|
sheldon/bot.py
|
# -*- coding: utf-8 -*-
"""
@author: Lises team
@contact: [email protected]
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Lises team'
__version__ = '0.1'
__email__ = '[email protected]'
class Sheldon():
pass
class Plugin():
pass
class Adapter():
pass
|
# -*- coding: utf-8 -*-
"""
@author: Lises team
@contact: [email protected]
@license: The MIT license
Copyright (C) 2015
"""
# Python 2 compatibility
if sys.version_info[0] == 2:
# Exceptions for bot
from exceptions import *
# Tool for loading plugins and adapters
from modules import *
# Tool for loading config from project folder
from config import *
else:
# Exceptions for bot
from .exceptions import *
# Tool for loading plugins and adapters
from .modules import *
# Tool for loading config from project folder
from .config import *
class Sheldon():
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self):
"""
Function for loading bot.
:return:
"""
# Creating empty lists for plugins and adapters
self.plugins = []
self.adapters = []
def load_plugins(self):
"""
Function for collecting and loading plugins from plugins folder.
:return:
"""
class Plugin():
pass
class Adapter():
pass
|
Update structure of Sheldon class
|
Update structure of Sheldon class
|
Python
|
mit
|
lises/sheldon
|
3b47c9ec6950819b99588dcb29978d5cb8761cad
|
midi/examples/printmidiin.py
|
midi/examples/printmidiin.py
|
import pyb
from mid.midiin import MidiIn
def midi_printer(msg):
print(tuple(msg))
def loop(midiin):
while True:
midiin.poll()
pyb.udelay(500)
uart = py.UART(2, 31250)
midiin = MidiIn(uart, callback=midi_printer)
loop(midiin)
|
import pyb
from midi.midiin import MidiIn
def midi_printer(msg):
print(tuple(msg))
def loop(midiin):
while True:
midiin.poll()
pyb.udelay(50)
uart = pyb.UART(2, 31250)
midiin = MidiIn(uart, callback=midi_printer)
loop(midiin)
|
Fix wrong module names and lower poll interval to 50 ms
|
Fix wrong module names and lower poll interval to 50 ms
|
Python
|
mit
|
SpotlightKid/micropython-stm-lib
|
caa4dcdcf7e936f352eea22513433d8f8deca2ab
|
sahara/tests/unit/utils/test_hashabledict.py
|
sahara/tests/unit/utils/test_hashabledict.py
|
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import testtools
from sahara.utils import hashabledict as h
class HashableDictTest(testtools.TestCase):
def test_is_hashable(self):
hd = h.HashableDict()
hd['one'] = 'oneValue'
self.assertTrue(isinstance(hd, collections.Hashable))
|
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import testtools
from sahara.utils import hashabledict as h
class HashableDictTest(testtools.TestCase):
def test_is_hashable_collection(self):
dct = h.HashableDict(one='oneValue')
self.assertIsInstance(dct, collections.Hashable)
def test_hash_consistency(self):
dct1 = h.HashableDict(one='oneValue')
dct2 = h.HashableDict(one='oneValue')
self.assertEqual(hash(dct1), hash(dct2))
|
Improve unit test for HashableDict
|
Improve unit test for HashableDict
We have HashableDict introduced to network info storing, but hash
function of this implementation was never tested in unit tests.
Change-Id: Id48c9172ca63e19b397dc131d85ed631874142cd
|
Python
|
apache-2.0
|
openstack/sahara,esikachev/sahara-backup,ekasitk/sahara,henaras/sahara,egafford/sahara,tellesnobrega/sahara,ekasitk/sahara,egafford/sahara,henaras/sahara,zhangjunli177/sahara,zhangjunli177/sahara,esikachev/sahara-backup,esikachev/sahara-backup,openstack/sahara,henaras/sahara,ekasitk/sahara,crobby/sahara,crobby/sahara,crobby/sahara,tellesnobrega/sahara,zhangjunli177/sahara
|
1a4e634c001e1734ae0a2a881624ab10073865cd
|
virtool/app_init.py
|
virtool/app_init.py
|
import coloredlogs
import logging.handlers
from argparse import ArgumentParser
def get_args():
parser = ArgumentParser()
parser.add_argument(
"-H", "--host",
dest="host",
help="hostname to listen on"
)
parser.add_argument(
"-p", "--port",
dest="port",
help="port to listen on"
)
parser.add_argument(
"-v", "--verbose",
dest="verbose",
action="store_true",
default=False,
help="log debug messages"
)
parser.add_argument(
action="store_true",
default=False,
)
parser.add_argument(
"--force-version",
dest="force_version",
help="make the server think it is the passed FORCE_VERSION or v1.8.5 if none provided",
nargs="?",
const="v1.8.5"
)
return parser.parse_args()
def configure(verbose=False):
logging_level = logging.INFO if verbose else logging.DEBUG
logging.captureWarnings(True)
log_format = "%(asctime)-20s %(module)-11s %(levelname)-8s %(message)s"
coloredlogs.install(
level=logging_level,
fmt=log_format
)
logger = logging.getLogger("virtool")
handler = logging.handlers.RotatingFileHandler("virtool.log", maxBytes=1000000, backupCount=5)
handler.setFormatter(logging.Formatter(log_format))
logger.addHandler(handler)
return logger
|
import coloredlogs
import logging.handlers
from argparse import ArgumentParser
def get_args():
parser = ArgumentParser()
parser.add_argument(
"-H", "--host",
dest="host",
help="hostname to listen on"
)
parser.add_argument(
"-p", "--port",
dest="port",
help="port to listen on"
)
parser.add_argument(
"-v", "--verbose",
dest="verbose",
action="store_true",
default=False,
help="log debug messages"
)
parser.add_argument(
"--dev",
dest="dev",
action="store_true",
default=False,
help="run in dev mode"
)
parser.add_argument(
"--force-version",
dest="force_version",
help="make the server think it is the passed FORCE_VERSION or v1.8.5 if none provided",
nargs="?",
const="v1.8.5"
)
return parser.parse_args()
def configure(verbose=False):
logging_level = logging.INFO if verbose else logging.DEBUG
logging.captureWarnings(True)
log_format = "%(asctime)-20s %(module)-11s %(levelname)-8s %(message)s"
coloredlogs.install(
level=logging_level,
fmt=log_format
)
logger = logging.getLogger("virtool")
handler = logging.handlers.RotatingFileHandler("virtool.log", maxBytes=1000000, backupCount=5)
handler.setFormatter(logging.Formatter(log_format))
logger.addHandler(handler)
return logger
|
Add --dev command line arg
|
Add --dev command line arg
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool
|
821f1ef9f7eae6d146248b5a212812fee4996cfd
|
django_website/settings/docs.py
|
django_website/settings/docs.py
|
from django_website.settings.www import *
PREPEND_WWW = False
APPEND_SLASH = True
TEMPLATE_CONTEXT_PROCESSORS += ["django.core.context_processors.request"]
ROOT_URLCONF = 'django_website.urls.docs'
CACHE_MIDDLEWARE_KEY_PREFIX = 'djangodocs'
_has_ddt = 'debug_toolbar' in INSTALLED_APPS
INSTALLED_APPS = [
'django_website.docs',
'haystack',
]
if _has_ddt:
INSTALLED_APPS.append('debug_toolbar')
# Where to store the build Sphinx docs.
if PRODUCTION:
DOCS_BUILD_ROOT = BASE.ancestor(2).child('docbuilds')
else:
DOCS_BUILD_ROOT = '/tmp/djangodocs'
# Haystack settings
HAYSTACK_SITECONF = 'django_website.docs.search_sites'
if PRODUCTION:
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://127.0.0.1:8983/solr'
else:
HAYSTACK_SEARCH_ENGINE = 'whoosh'
HAYSTACK_WHOOSH_PATH = '/tmp/djangodocs.index'
|
from django_website.settings.www import *
PREPEND_WWW = False
APPEND_SLASH = True
TEMPLATE_CONTEXT_PROCESSORS += ["django.core.context_processors.request"]
ROOT_URLCONF = 'django_website.urls.docs'
CACHE_MIDDLEWARE_KEY_PREFIX = 'djangodocs'
_has_ddt = 'debug_toolbar' in INSTALLED_APPS
INSTALLED_APPS = [
'django_website.docs',
'haystack',
]
if _has_ddt:
INSTALLED_APPS.append('debug_toolbar')
# Where to store the build Sphinx docs.
if PRODUCTION:
DOCS_BUILD_ROOT = BASE.ancestor(2).child('docbuilds')
else:
DOCS_BUILD_ROOT = '/tmp/djangodocs'
# Haystack settings
HAYSTACK_SITECONF = 'django_website.docs.search_sites'
HAYSTACK_SEARCH_ENGINE = 'whoosh'
HAYSTACK_WHOOSH_PATH = '/tmp/djangodocs.index'
|
Use Whoosh in prod for now.
|
Use Whoosh in prod for now.
|
Python
|
bsd-3-clause
|
khkaminska/djangoproject.com,relekang/djangoproject.com,rmoorman/djangoproject.com,rmoorman/djangoproject.com,khkaminska/djangoproject.com,hassanabidpk/djangoproject.com,alawnchen/djangoproject.com,khkaminska/djangoproject.com,relekang/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,vxvinh1511/djangoproject.com,rmoorman/djangoproject.com,alawnchen/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,vxvinh1511/djangoproject.com,nanuxbe/django,hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,django/djangoproject.com,vxvinh1511/djangoproject.com,django/djangoproject.com,alawnchen/djangoproject.com,gnarf/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,nanuxbe/django,nanuxbe/django,gnarf/djangoproject.com
|
cdc6390ec88a14b339cb336fcc0d77e747aae99a
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
Revert back to a generator - it's actually slight faster
|
Revert back to a generator - it's actually slight faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
b32900269b3f8c701702e74734ffe248c521fa73
|
dooblr/transformers/__init__.py
|
dooblr/transformers/__init__.py
|
from influxdbclient import InfluxDBClient, InfluxDBClientError, DooblrInfluxDBError
__all__ = ["InfluxDBClient", "InfluxDBClientError", "DooblrInfluxDBError"]
|
from dooblr.transformers.influxdbclient import InfluxDBClient, InfluxDBClientError, DooblrInfluxDBError
__all__ = ["InfluxDBClient", "InfluxDBClientError", "DooblrInfluxDBError"]
|
Fix py3 'relative' import error.
|
Fix py3 'relative' import error.
|
Python
|
isc
|
makerslocal/dooblr
|
288127c575c7672e3a41d7ada360d56a4853f279
|
scripts/examples/14-WiFi-Shield/fw_update.py
|
scripts/examples/14-WiFi-Shield/fw_update.py
|
# WINC Firmware Update Script.
#
# This script updates the ATWINC1500 WiFi module firmware.
# Copy the firmware image to uSD card before running this script.
# NOTE: Firmware version 19.5.2 does NOT support ATWINC1500-MR210PA.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
# Start the firmware update process.
# For ATWINC1500-MR210PA/B
#wlan.fw_update("/winc_19_4_4.bin")
# For ATWINC1500-MR210PB only.
wlan.fw_update("/winc_19_5_2.bin")
|
# WINC Firmware Update Script.
#
# This script updates the ATWINC1500 WiFi module firmware.
# Copy the firmware image to uSD card before running this script.
# NOTE: Older fimware versions are no longer supported by the host driver.
# NOTE: The latest firmware (19.6.1) only works on ATWINC1500-MR210PB.
import network
# Init wlan module in Download mode.
wlan = network.WINC(mode=network.WINC.MODE_FIRMWARE)
# For ATWINC1500-MR210PB only.
wlan.fw_update("/winc_19_6_1.bin")
|
Update WiFi firmware update script.
|
Update WiFi firmware update script.
|
Python
|
mit
|
openmv/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv
|
8f80099062a03fcf6be783f3e5260882f704ec22
|
scss/tests/test_files.py
|
scss/tests/test_files.py
|
from __future__ import absolute_import
import glob
import os.path
import pytest
from scss import Scss
HERE = os.path.join(os.path.split(__file__)[0], 'files')
@pytest.mark.parametrize(
('scss_fn', 'css_fn'), [
(scss_fn, os.path.splitext(scss_fn)[0] + '.css')
for scss_fn in glob.glob(os.path.join(HERE, '*/*.scss'))
]
)
def test_pair(scss_fn, css_fn):
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
compiler = Scss(scss_opts=dict(compress=0))
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert actual == expected
|
from __future__ import absolute_import
import glob
import os.path
import pytest
from scss import Scss
HERE = os.path.join(os.path.split(__file__)[0], 'files')
@pytest.mark.parametrize(
('scss_fn', 'css_fn'), [
(scss_fn, os.path.splitext(scss_fn)[0] + '.css')
for scss_fn in glob.glob(os.path.join(HERE, '*/*.scss'))
]
)
def test_pair(scss_fn, css_fn):
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
compiler = Scss(scss_opts=dict(compress=0))
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
Swap the test-file assertion, to make output more sensible.
|
Swap the test-file assertion, to make output more sensible.
|
Python
|
mit
|
cpfair/pyScss,cpfair/pyScss,Kronuz/pyScss,hashamali/pyScss,hashamali/pyScss,Kronuz/pyScss,Kronuz/pyScss,hashamali/pyScss,Kronuz/pyScss,cpfair/pyScss
|
d9cdde787439d022e8c0504666cd4809a8243fe6
|
oscar/management/commands/oscar_send_alerts.py
|
oscar/management/commands/oscar_send_alerts.py
|
import logging
from django.utils.translation import ugettext_lazy as _
from django.core.management.base import BaseCommand
from oscar.apps.customer.alerts import utils
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Check stock records of products for availability and send out alerts
to customers that have registered for an alert.
"""
help = _("Check check for products that are back in "
"stock and send out alerts")
def handle(self, **options):
"""
Check all products with active product alerts for
availability and send out email alerts when a product is
available to buy.
"""
utils.send_alerts()
|
import logging
from django.utils.translation import ugettext_lazy as _
from django.core.management.base import BaseCommand
from oscar.apps.customer.alerts import utils
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Check stock records of products for availability and send out alerts
to customers that have registered for an alert.
"""
help = _("Check for products that are back in "
"stock and send out alerts")
def handle(self, **options):
"""
Check all products with active product alerts for
availability and send out email alerts when a product is
available to buy.
"""
utils.send_alerts()
|
Correct typo in help string
|
Correct typo in help string
|
Python
|
bsd-3-clause
|
ka7eh/django-oscar,pdonadeo/django-oscar,itbabu/django-oscar,anentropic/django-oscar,sasha0/django-oscar,michaelkuty/django-oscar,amirrpp/django-oscar,amirrpp/django-oscar,Idematica/django-oscar,jinnykoo/wuyisj,okfish/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,DrOctogon/unwash_ecom,manevant/django-oscar,marcoantoniooliveira/labweb,josesanch/django-oscar,jlmadurga/django-oscar,ka7eh/django-oscar,nfletton/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj.com,ahmetdaglarbas/e-commerce,WillisXChen/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,john-parton/django-oscar,bschuon/django-oscar,django-oscar/django-oscar,Bogh/django-oscar,jinnykoo/christmas,sasha0/django-oscar,Jannes123/django-oscar,makielab/django-oscar,amirrpp/django-oscar,vovanbo/django-oscar,michaelkuty/django-oscar,binarydud/django-oscar,spartonia/django-oscar,eddiep1101/django-oscar,nickpack/django-oscar,ademuk/django-oscar,faratro/django-oscar,elliotthill/django-oscar,jlmadurga/django-oscar,machtfit/django-oscar,QLGu/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,ahmetdaglarbas/e-commerce,ahmetdaglarbas/e-commerce,faratro/django-oscar,WadeYuChen/django-oscar,saadatqadri/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,jinnykoo/wuyisj,saadatqadri/django-oscar,josesanch/django-oscar,mexeniz/django-oscar,adamend/django-oscar,okfish/django-oscar,Jannes123/django-oscar,jinnykoo/wuyisj.com,itbabu/django-oscar,kapari/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,bschuon/django-oscar,makielab/django-oscar,QLGu/django-oscar,sonofatailor/django-oscar,monikasulik/django-oscar,john-parton/django-oscar,makielab/django-oscar,QLGu/django-oscar,jinnykoo/wuyisj,manevant/django-oscar,MatthewWilkes/django-oscar,adamend/django-oscar,nickpack/django-oscar,elliotthill/django-oscar,MatthewWilkes/django-oscar,manevant/django-oscar,marcoantoniooliveira/labweb,marcoantoniooliveira/labweb,QLGu/django-oscar,nfletton/django-oscar,vovanbo/django-oscar,mexeniz/django-oscar,nfletton/django-oscar,binarydud/django-oscar,okfish/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,machtfit/django-oscar,elliotthill/django-oscar,django-oscar/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,bnprk/django-oscar,vovanbo/django-oscar,thechampanurag/django-oscar,DrOctogon/unwash_ecom,machtfit/django-oscar,adamend/django-oscar,bnprk/django-oscar,WillisXChen/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,anentropic/django-oscar,ademuk/django-oscar,spartonia/django-oscar,nickpack/django-oscar,Jannes123/django-oscar,vovanbo/django-oscar,kapari/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,lijoantony/django-oscar,ka7eh/django-oscar,Bogh/django-oscar,itbabu/django-oscar,rocopartners/django-oscar,saadatqadri/django-oscar,rocopartners/django-oscar,bschuon/django-oscar,jinnykoo/wuyisj.com,kapt/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,jmt4/django-oscar,sasha0/django-oscar,ademuk/django-oscar,Bogh/django-oscar,pdonadeo/django-oscar,saadatqadri/django-oscar,spartonia/django-oscar,WadeYuChen/django-oscar,kapt/django-oscar,monikasulik/django-oscar,solarissmoke/django-oscar,lijoantony/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,kapt/django-oscar,spartonia/django-oscar,django-oscar/django-oscar,lijoantony/django-oscar,kapari/django-oscar,eddiep1101/django-oscar,jmt4/django-oscar,pasqualguerrero/django-oscar,mexeniz/django-oscar,rocopartners/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,dongguangming/django-oscar,thechampanurag/django-oscar,jlmadurga/django-oscar,anentropic/django-oscar,rocopartners/django-oscar,monikasulik/django-oscar,Jannes123/django-oscar,josesanch/django-oscar,Idematica/django-oscar,makielab/django-oscar,ahmetdaglarbas/e-commerce,WadeYuChen/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,ka7eh/django-oscar,sasha0/django-oscar,marcoantoniooliveira/labweb,anentropic/django-oscar,kapari/django-oscar,jinnykoo/christmas,thechampanurag/django-oscar,WillisXChen/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,faratro/django-oscar,john-parton/django-oscar,manevant/django-oscar,michaelkuty/django-oscar,faratro/django-oscar,sonofatailor/django-oscar,taedori81/django-oscar,DrOctogon/unwash_ecom,pasqualguerrero/django-oscar,bnprk/django-oscar,binarydud/django-oscar,eddiep1101/django-oscar,lijoantony/django-oscar,pdonadeo/django-oscar,sonofatailor/django-oscar,okfish/django-oscar,jinnykoo/christmas,WadeYuChen/django-oscar,Idematica/django-oscar,amirrpp/django-oscar,dongguangming/django-oscar,pdonadeo/django-oscar,eddiep1101/django-oscar
|
7b3267b2bae436e0580e2a229a64bd8d6a04bc1f
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
# Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
# Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
settings.POLICY_FILES.update({
'share': 'manila_policy.json',
})
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
Define the default policy file
|
Define the default policy file
This change ensures that the default policy file for Manila API access
is defined by default, so that operators can deploy their own policy
more easily.
Change-Id: Ie890766ea2a274791393304cdfe532e024171195
|
Python
|
apache-2.0
|
openstack/manila-ui,openstack/manila-ui,openstack/manila-ui
|
fd90fc7ce0c8a8070966e4a8273c69b8c13955d3
|
masters/master.tryserver.webrtc/master_site_config.py
|
masters/master.tryserver.webrtc/master_site_config.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCTryServer(Master.Master4):
project_name = 'WebRTC Try Server'
master_port = 8070
slave_port = 8170
master_port_alt = 8270
try_job_port = 8370
from_address = '[email protected]'
reply_to = '[email protected]'
svn_url = 'svn://svn-mirror.golo.chromium.org/chrome-try/try-webrtc'
base_app_url = 'https://webrtc-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = None
code_review_site = 'https://webrtc-codereview.appspot.com'
buildbot_url = 'http://build.chromium.org/p/tryserver.webrtc/'
|
Make trybots use HEAD instead of LKGR
|
WebRTC: Make trybots use HEAD instead of LKGR
It's about time we make this change, which turned out
to be very simple.
Review URL: https://codereview.chromium.org/776233003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@293261 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
d611341f95bc95fa3c3e42d7e830be1c2baea9b1
|
moksha/api/streams/datastream.py
|
moksha/api/streams/datastream.py
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
now = True
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=self.now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
Enable a 'now' property for the PollingDataStream
|
Enable a 'now' property for the PollingDataStream
|
Python
|
apache-2.0
|
ralphbean/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,lmacken/moksha
|
d88a6afddcc9ac90b1fb2327d4df4ece29f1c031
|
docs/plans/wordpress/mysql_boot.py
|
docs/plans/wordpress/mysql_boot.py
|
#!/usr/bin/env python
import sys
import os
import simplejson as json
f = open("bootconf.json", "r")
vals_dict = json.load(f)
f.close()
os.putenv('DEBIAN_FRONTEND', 'noninteractive')
os.putenv('TERM', 'dumb')
password=vals_dict['dbpassword']
dbname=vals_dict['dbname']
commands = []
commands.append('sudo -E apt-get -y -q install mysql-server-5.1')
commands.append('sudo -E mysqladmin -u root password %s' % (password))
commands.append('sudo -E mysqladmin --password=%s create %s' % (password, dbname))
commands.append("sudo -E mysql --password=%s -e \"GRANT Select, Insert, Update ON *.* TO 'root'@'%%' IDENTIFIED BY '%s';\"" % (password, password))
commands.append("sudo -E sed -i 's/bind-address.*/bind-address = 0.0.0.0/' /etc/mysql/my.cnf")
commands.append("sudo -E restart mysql")
for cmd in commands:
print cmd
rc = os.system(cmd)
if rc != 0:
print "ERROR! %d" % (rc)
sys.exit(rc)
print "SUCCESS"
sys.exit(0)
|
#!/usr/bin/env python
import sys
import os
import simplejson as json
f = open("bootconf.json", "r")
vals_dict = json.load(f)
f.close()
os.putenv('DEBIAN_FRONTEND', 'noninteractive')
os.putenv('TERM', 'dumb')
password=vals_dict['dbpassword']
dbname=vals_dict['dbname']
commands = []
commands.append('sudo -E apt-get -y -q install mysql-server-5.1')
commands.append('sudo -E mysqladmin -u root password %s' % (password))
commands.append('sudo -E mysqladmin --password=%s create %s' % (password, dbname))
commands.append("sudo -E mysql --password=%s -e \"GRANT Select, Insert, Update, Create, Delete ON *.* TO 'root'@'%%' IDENTIFIED BY '%s';\"" % (password, password))
commands.append("sudo -E sed -i 's/bind-address.*/bind-address = 0.0.0.0/' /etc/mysql/my.cnf")
commands.append("sudo -E restart mysql")
for cmd in commands:
print cmd
rc = os.system(cmd)
if rc != 0:
print "ERROR! %d" % (rc)
sys.exit(rc)
print "SUCCESS"
sys.exit(0)
|
Add create and delete permissions to mysql user in wordpress example
|
Add create and delete permissions to mysql user in wordpress example
|
Python
|
apache-2.0
|
buzztroll/cloudinit.d,nimbusproject/cloudinit.d,buzztroll/cloudinit.d,nimbusproject/cloudinit.d
|
68b499ea6b73232b3b8a860b3c8b808a1736b733
|
myfedora/controllers/template.py
|
myfedora/controllers/template.py
|
from ${package}.lib.base import *
class TemplateController(BaseController):
def view(self, url):
"""By default, the final controller tried to fulfill the request
when no other routes match. It may be used to display a template
when all else fails, e.g.::
def view(self, url):
return render('/%s' % url)
Or if you're using Mako and want to explicitly send a 404 (Not
Found) response code when the requested template doesn't exist::
import mako.exceptions
def view(self, url):
try:
return render('/%s' % url)
except mako.exceptions.TopLevelLookupException:
abort(404)
By default this controller aborts the request with a 404 (Not
Found)
"""
abort(404)
|
from myfedora.lib.base import *
class TemplateController(BaseController):
def view(self, url):
"""By default, the final controller tried to fulfill the request
when no other routes match. It may be used to display a template
when all else fails, e.g.::
def view(self, url):
return render('/%s' % url)
Or if you're using Mako and want to explicitly send a 404 (Not
Found) response code when the requested template doesn't exist::
import mako.exceptions
def view(self, url):
try:
return render('/%s' % url)
except mako.exceptions.TopLevelLookupException:
abort(404)
By default this controller aborts the request with a 404 (Not
Found)
"""
abort(404)
|
Fix a busted import statement in our TemplateController
|
Fix a busted import statement in our TemplateController
|
Python
|
agpl-3.0
|
Fale/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages,Fale/fedora-packages,Fale/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages
|
3b5c26db02a3b895af8c6da9ef9e5b79274a6857
|
spectator/core/migrations/0003_set_creator_slug.py
|
spectator/core/migrations/0003_set_creator_slug.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-01 09:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spectator_core', '0002_creator_slug'),
]
operations = [
migrations.AddField(
model_name='creator',
name='slug',
field=models.SlugField(blank=True, max_length=10),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-01 09:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spectator_core', '0002_creator_slug'),
]
operations = [
migrations.AlterField(
model_name='creator',
name='slug',
field=models.SlugField(blank=True, max_length=10),
),
]
|
Fix error in Creator migration
|
Fix error in Creator migration
Due to manually tweaking old migrations.
Tests in django-hines were complaining about the migration trying
to add Creator.slug a second time.
|
Python
|
mit
|
philgyford/django-spectator,philgyford/django-spectator,philgyford/django-spectator
|
926ddeb63f0366a59f14adbab5421ccb7f78f144
|
exercises/book-store/example.py
|
exercises/book-store/example.py
|
BOOK_PRICE = 8
def _group_price(size):
discounts = [0, .05, .1, .2, .25]
if not (0 < size <= 5):
raise ValueError('size must be in 1..' + len(discounts))
return 8 * size * (1 - discounts[size - 1])
def calculate_total(books, price_so_far=0.):
if not books:
return price_so_far
groups = list(set(books))
min_price = float('inf')
for i in range(len(groups)):
remaining_books = books[:]
for v in groups[:i + 1]:
remaining_books.remove(v)
price = calculate_total(remaining_books,
price_so_far + _group_price(i + 1))
min_price = min(min_price, price)
return min_price
|
BOOK_PRICE = 8
def _group_price(size):
discounts = [0, .05, .1, .2, .25]
if not (0 < size <= 5):
raise ValueError('size must be in 1..' + len(discounts))
return BOOK_PRICE * size * (1 - discounts[size - 1])
def calculate_total(books, price_so_far=0.):
if not books:
return price_so_far
groups = list(set(books))
min_price = float('inf')
for i in range(len(groups)):
remaining_books = books[:]
for v in groups[:i + 1]:
remaining_books.remove(v)
price = calculate_total(remaining_books,
price_so_far + _group_price(i + 1))
min_price = min(min_price, price)
return min_price
|
Use book price constant in calculation
|
book-store: Use book price constant in calculation
|
Python
|
mit
|
N-Parsons/exercism-python,pheanex/xpython,jmluy/xpython,behrtam/xpython,exercism/xpython,smalley/python,exercism/xpython,exercism/python,N-Parsons/exercism-python,smalley/python,pheanex/xpython,jmluy/xpython,exercism/python,behrtam/xpython
|
a220a62e4444e75974ad28915e7216a276f60c9c
|
test_valid_object_file.py
|
test_valid_object_file.py
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
# increase timeout so that the Travis builds succeed
name_resolve.NAME_RESOLVE_TIMEOUT.set(30)
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
Increase timeout for satrapy name lookups
|
Increase timeout for satrapy name lookups
|
Python
|
bsd-2-clause
|
mwcraig/feder-object-list
|
5cd3b53f677fd6ab6e77bee5b7d42cf2ac85e47f
|
feincms/apps.py
|
feincms/apps.py
|
# flake8: noqa
from feincms.content.application.models import *
|
def __getattr__(key):
# Work around Django 3.2's autoloading of *.apps modules (AppConfig
# autodiscovery)
if key in {
"ApplicationContent",
"app_reverse",
"app_reverse_lazy",
"permalink",
"UnpackTemplateResponse",
"standalone",
"unpack",
}:
from feincms.content.application import models
return getattr(models, key)
raise AttributeError("Unknown attribute '%s'" % key)
|
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
|
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
|
Python
|
bsd-3-clause
|
mjl/feincms,feincms/feincms,mjl/feincms,feincms/feincms,feincms/feincms,mjl/feincms
|
f29630aa08bfc86953f40f0b3166a0b9a074122c
|
csft/__info__.py
|
csft/__info__.py
|
# -*- coding:utf-8 -*-
"""
Information about this package.
"""
__author__ = 'Yan QiDong'
__version__ = '0.2.2'
__email__ = '[email protected]'
__url__ = 'https://github.com/yanqd0/csft'
__copyright__ = 'Copyright (C) 2017 ' + __author__
__license__ = 'MIT License'
|
# -*- coding:utf-8 -*-
"""
Information about this package.
"""
__author__ = 'Yan QiDong'
__version__ = '0.2.1'
__email__ = '[email protected]'
__url__ = 'https://github.com/yanqd0/csft'
__copyright__ = 'Copyright (C) 2017 ' + __author__
__license__ = 'MIT License'
|
Revert "Update version to 0.2.2"
|
Revert "Update version to 0.2.2"
This reverts commit 028a8722df2f6df6ca308430c672e651a57bd771.
|
Python
|
mit
|
yanqd0/csft
|
65e4aba86730525a75e915fe61eb15b681817cc3
|
app/commander.py
|
app/commander.py
|
import rethinkdb as r
class Commander:
def process_message(self, message):
return "I got your message"
|
import re
import rethinkdb as r
class Commander:
def process_message(self, message):
return self.parse_message(message)
def parse_message(self, message):
stripped_message = message.strip()
commander_match = re.match(r'commander\s*(.*)',
stripped_message,
flags=re.IGNORECASE)
if commander_match:
# parse message as incident commander message
task_match = re.match(r'add task\s*(.*)',
commander_match.groups()[0],
flags=re.I)
if task_match:
return self.add_task(task_match.groups()[0])
return 'no match for this command'
def add_task(self, task):
# add task to task list
print(task)
return 'Added task to list!'
|
Add parsing for adding message
|
Add parsing for adding message
|
Python
|
mit
|
henryfjordan/incident-commander
|
a28826a0b57742d3cb2ac57c0a17b37f2afff302
|
homedisplay/control_milight/management/commands/listen_433.py
|
homedisplay/control_milight/management/commands/listen_433.py
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
|
from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
import logging
logger = logging.getLogger("%s.%s" % ("homecontroller", __name__))
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
ITEM_MAP = settings.ARDUINO_433_ITEM_MAP
sent_event_map = {}
while True:
line = s.readline()
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
continue
logger.info("Processing trigger %s (%s)", item_name, id)
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
logger.warn("Unknown ID: %s", id)
|
Use logging. Remove hardcoded settings
|
Use logging. Remove hardcoded settings
|
Python
|
bsd-3-clause
|
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
|
65f7c62ec30ca7e3d811e146b00cc75bec783129
|
fbmsgbot/models/template.py
|
fbmsgbot/models/template.py
|
from attachment import Button
import json
class Template(object):
"""
Facebook Messenger message
model for structured messages
"""
button_type = 'button'
generic_type = 'generic'
receipt_type = 'receipt'
def __init__(self, type, **kwargs):
self.type = type
self.kwargs = kwargs
def to_json(self):
payload = {}
if self.type == self.button_type:
assert all([isinstance(button, Button)
for button in self.kwargs['buttons']]), "Missing type button"
buttons = [json.loads(b.to_json()) for b in self.kwargs['buttons']]
payload = {
'template_type' : 'button',
'text' : self.kwargs['title'],
'buttons' : buttons
}
elif self.type == self.generic_type:
# elements = kwargs.get('elements')
# TODO: Check types and check if elements exist in kwargs
elements = [element.to_json() for element in self.kwargs['elements']]
payload = {
'template_type' : 'generic',
'elements': elements
}
elif self.type == self.receipt_type:
raise NotImplementedError
return payload
|
from attachment import Button
import json
class Template(object):
"""
Facebook Messenger message
model for structured messages
"""
button_type = 'button'
generic_type = 'generic'
receipt_type = 'receipt'
def __init__(self, type, **kwargs):
self.type = type
self.kwargs = kwargs
def to_json(self):
payload = {}
if self.type == self.button_type:
assert all([isinstance(button, Button)
for button in self.kwargs['buttons']]), "Missing type button"
buttons = [b.to_json() for b in self.kwargs['buttons']]
payload = {
'template_type' : 'button',
'text' : self.kwargs['title'],
'buttons' : buttons
}
elif self.type == self.generic_type:
# elements = kwargs.get('elements')
# TODO: Check types and check if elements exist in kwargs
elements = [element.to_json() for element in self.kwargs['elements']]
payload = {
'template_type' : 'generic',
'elements': elements
}
elif self.type == self.receipt_type:
raise NotImplementedError
return payload
|
Remove conversion to json string
|
Remove conversion to json string
|
Python
|
mit
|
ben-cunningham/python-messenger-bot,ben-cunningham/pybot
|
c2fe4483ba70f0ca37b4713a51baf0804a68accd
|
lms/djangoapps/course_wiki/plugins/markdownedx/wiki_plugin.py
|
lms/djangoapps/course_wiki/plugins/markdownedx/wiki_plugin.py
|
# -*- coding: utf-8 -*-
from wiki.core.plugins.base import BasePlugin
from wiki.core.plugins import registry as plugin_registry
from course_wiki.plugins.markdownedx import mdx_mathjax, mdx_video
class ExtendMarkdownPlugin(BasePlugin):
"""
This plugin simply loads all of the markdown extensions we use in edX.
"""
markdown_extensions = [
mdx_mathjax.MathJaxExtension(configs={}),
mdx_video.VideoExtension(configs={})]
plugin_registry.register(ExtendMarkdownPlugin)
|
# -*- coding: utf-8 -*-
from wiki.core.plugins.base import BasePlugin
from wiki.core.plugins import registry as plugin_registry
from course_wiki.plugins.markdownedx import mdx_mathjax, mdx_video
class ExtendMarkdownPlugin(BasePlugin):
"""
This plugin simply loads all of the markdown extensions we use in edX.
"""
markdown_extensions = [
mdx_mathjax.MathJaxExtension(configs={}),
mdx_video.VideoExtension(configs={}),
]
plugin_registry.register(ExtendMarkdownPlugin)
|
Fix PEP8: E126 continuation line over-indented
|
Fix PEP8: E126 continuation line over-indented
for hanging indent
|
Python
|
agpl-3.0
|
IndonesiaX/edx-platform,mbareta/edx-platform-ft,proversity-org/edx-platform,IONISx/edx-platform,Edraak/edx-platform,doganov/edx-platform,shabab12/edx-platform,lduarte1991/edx-platform,deepsrijit1105/edx-platform,pomegranited/edx-platform,prarthitm/edxplatform,fintech-circle/edx-platform,prarthitm/edxplatform,waheedahmed/edx-platform,xingyepei/edx-platform,jbzdak/edx-platform,louyihua/edx-platform,TeachAtTUM/edx-platform,stvstnfrd/edx-platform,nttks/edx-platform,cognitiveclass/edx-platform,jjmiranda/edx-platform,Endika/edx-platform,antoviaque/edx-platform,JCBarahona/edX,ampax/edx-platform,zubair-arbi/edx-platform,wwj718/edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,ahmadiga/min_edx,synergeticsedx/deployment-wipro,doganov/edx-platform,waheedahmed/edx-platform,itsjeyd/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,miptliot/edx-platform,inares/edx-platform,MakeHer/edx-platform,JCBarahona/edX,edx-solutions/edx-platform,bigdatauniversity/edx-platform,teltek/edx-platform,fintech-circle/edx-platform,amir-qayyum-khan/edx-platform,hamzehd/edx-platform,IONISx/edx-platform,caesar2164/edx-platform,Livit/Livit.Learn.EdX,cpennington/edx-platform,defance/edx-platform,stvstnfrd/edx-platform,amir-qayyum-khan/edx-platform,tanmaykm/edx-platform,eduNEXT/edunext-platform,ahmedaljazzar/edx-platform,UOMx/edx-platform,iivic/BoiseStateX,CourseTalk/edx-platform,ovnicraft/edx-platform,kmoocdev2/edx-platform,arbrandes/edx-platform,cpennington/edx-platform,edx-solutions/edx-platform,defance/edx-platform,franosincic/edx-platform,arbrandes/edx-platform,IONISx/edx-platform,arbrandes/edx-platform,halvertoluke/edx-platform,IONISx/edx-platform,Lektorium-LLC/edx-platform,halvertoluke/edx-platform,Edraak/edraak-platform,kmoocdev2/edx-platform,devs1991/test_edx_docmode,simbs/edx-platform,solashirai/edx-platform,Edraak/circleci-edx-platform,marcore/edx-platform,Stanford-Online/edx-platform,Endika/edx-platform,tanmaykm/edx-platform,hamzehd/edx-platform,pomegranited/edx-platform,procangroup/edx-platform,msegado/edx-platform,zubair-arbi/edx-platform,procangroup/edx-platform,deepsrijit1105/edx-platform,nttks/edx-platform,RPI-OPENEDX/edx-platform,appsembler/edx-platform,shurihell/testasia,kursitet/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,zubair-arbi/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,ahmedaljazzar/edx-platform,devs1991/test_edx_docmode,wwj718/edx-platform,jzoldak/edx-platform,cognitiveclass/edx-platform,antoviaque/edx-platform,naresh21/synergetics-edx-platform,edx/edx-platform,gsehub/edx-platform,MakeHer/edx-platform,alexthered/kienhoc-platform,jbzdak/edx-platform,Livit/Livit.Learn.EdX,ahmadiga/min_edx,Edraak/circleci-edx-platform,caesar2164/edx-platform,pabloborrego93/edx-platform,defance/edx-platform,IndonesiaX/edx-platform,cognitiveclass/edx-platform,waheedahmed/edx-platform,wwj718/edx-platform,synergeticsedx/deployment-wipro,stvstnfrd/edx-platform,Endika/edx-platform,alu042/edx-platform,Edraak/edraak-platform,ZLLab-Mooc/edx-platform,CourseTalk/edx-platform,IndonesiaX/edx-platform,longmen21/edx-platform,amir-qayyum-khan/edx-platform,appsembler/edx-platform,romain-li/edx-platform,chrisndodge/edx-platform,lduarte1991/edx-platform,jbzdak/edx-platform,cecep-edu/edx-platform,wwj718/edx-platform,naresh21/synergetics-edx-platform,EDUlib/edx-platform,Lektorium-LLC/edx-platform,ampax/edx-platform,jzoldak/edx-platform,Ayub-Khan/edx-platform,shurihell/testasia,philanthropy-u/edx-platform,antoviaque/edx-platform,alu042/edx-platform,nttks/edx-platform,philanthropy-u/edx-platform,ZLLab-Mooc/edx-platform,BehavioralInsightsTeam/edx-platform,solashirai/edx-platform,franosincic/edx-platform,caesar2164/edx-platform,CredoReference/edx-platform,10clouds/edx-platform,eduNEXT/edx-platform,RPI-OPENEDX/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,itsjeyd/edx-platform,a-parhom/edx-platform,raccoongang/edx-platform,nttks/edx-platform,jzoldak/edx-platform,mbareta/edx-platform-ft,mcgachey/edx-platform,JCBarahona/edX,pomegranited/edx-platform,marcore/edx-platform,a-parhom/edx-platform,JioEducation/edx-platform,shurihell/testasia,ZLLab-Mooc/edx-platform,bigdatauniversity/edx-platform,teltek/edx-platform,inares/edx-platform,edx/edx-platform,lduarte1991/edx-platform,mcgachey/edx-platform,chrisndodge/edx-platform,synergeticsedx/deployment-wipro,pomegranited/edx-platform,JCBarahona/edX,RPI-OPENEDX/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,alexthered/kienhoc-platform,CourseTalk/edx-platform,teltek/edx-platform,Stanford-Online/edx-platform,mbareta/edx-platform-ft,EDUlib/edx-platform,eduNEXT/edunext-platform,prarthitm/edxplatform,longmen21/edx-platform,xingyepei/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,simbs/edx-platform,BehavioralInsightsTeam/edx-platform,cognitiveclass/edx-platform,jolyonb/edx-platform,pepeportela/edx-platform,proversity-org/edx-platform,mbareta/edx-platform-ft,proversity-org/edx-platform,IndonesiaX/edx-platform,iivic/BoiseStateX,cpennington/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,xingyepei/edx-platform,hamzehd/edx-platform,zhenzhai/edx-platform,ESOedX/edx-platform,miptliot/edx-platform,mitocw/edx-platform,eduNEXT/edx-platform,Edraak/circleci-edx-platform,RPI-OPENEDX/edx-platform,jbzdak/edx-platform,JCBarahona/edX,jjmiranda/edx-platform,kursitet/edx-platform,fintech-circle/edx-platform,ampax/edx-platform,edx-solutions/edx-platform,alu042/edx-platform,Edraak/circleci-edx-platform,raccoongang/edx-platform,TeachAtTUM/edx-platform,itsjeyd/edx-platform,ampax/edx-platform,JioEducation/edx-platform,jolyonb/edx-platform,UOMx/edx-platform,ESOedX/edx-platform,hastexo/edx-platform,iivic/BoiseStateX,Stanford-Online/edx-platform,mcgachey/edx-platform,gsehub/edx-platform,proversity-org/edx-platform,angelapper/edx-platform,CredoReference/edx-platform,CredoReference/edx-platform,zubair-arbi/edx-platform,solashirai/edx-platform,romain-li/edx-platform,cecep-edu/edx-platform,Ayub-Khan/edx-platform,franosincic/edx-platform,pepeportela/edx-platform,10clouds/edx-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,tanmaykm/edx-platform,appsembler/edx-platform,ZLLab-Mooc/edx-platform,eduNEXT/edunext-platform,gsehub/edx-platform,Edraak/edx-platform,solashirai/edx-platform,inares/edx-platform,angelapper/edx-platform,msegado/edx-platform,RPI-OPENEDX/edx-platform,JioEducation/edx-platform,tanmaykm/edx-platform,kursitet/edx-platform,nttks/edx-platform,pabloborrego93/edx-platform,simbs/edx-platform,longmen21/edx-platform,MakeHer/edx-platform,gymnasium/edx-platform,MakeHer/edx-platform,ovnicraft/edx-platform,a-parhom/edx-platform,shabab12/edx-platform,xingyepei/edx-platform,EDUlib/edx-platform,kmoocdev2/edx-platform,jzoldak/edx-platform,defance/edx-platform,franosincic/edx-platform,philanthropy-u/edx-platform,longmen21/edx-platform,prarthitm/edxplatform,eduNEXT/edx-platform,gsehub/edx-platform,itsjeyd/edx-platform,angelapper/edx-platform,gymnasium/edx-platform,analyseuc3m/ANALYSE-v1,zhenzhai/edx-platform,analyseuc3m/ANALYSE-v1,edx/edx-platform,simbs/edx-platform,devs1991/test_edx_docmode,kmoocdev2/edx-platform,alexthered/kienhoc-platform,Stanford-Online/edx-platform,teltek/edx-platform,msegado/edx-platform,Edraak/edraak-platform,Ayub-Khan/edx-platform,alexthered/kienhoc-platform,pepeportela/edx-platform,bigdatauniversity/edx-platform,mitocw/edx-platform,romain-li/edx-platform,cognitiveclass/edx-platform,waheedahmed/edx-platform,BehavioralInsightsTeam/edx-platform,Edraak/edx-platform,synergeticsedx/deployment-wipro,cecep-edu/edx-platform,deepsrijit1105/edx-platform,Edraak/edx-platform,devs1991/test_edx_docmode,louyihua/edx-platform,Ayub-Khan/edx-platform,procangroup/edx-platform,jolyonb/edx-platform,shurihell/testasia,10clouds/edx-platform,pepeportela/edx-platform,jolyonb/edx-platform,eduNEXT/edx-platform,alexthered/kienhoc-platform,IONISx/edx-platform,TeachAtTUM/edx-platform,antoviaque/edx-platform,gymnasium/edx-platform,cpennington/edx-platform,hamzehd/edx-platform,simbs/edx-platform,MakeHer/edx-platform,jjmiranda/edx-platform,doganov/edx-platform,naresh21/synergetics-edx-platform,ahmedaljazzar/edx-platform,10clouds/edx-platform,chrisndodge/edx-platform,naresh21/synergetics-edx-platform,Livit/Livit.Learn.EdX,cecep-edu/edx-platform,zubair-arbi/edx-platform,deepsrijit1105/edx-platform,ovnicraft/edx-platform,mitocw/edx-platform,ovnicraft/edx-platform,marcore/edx-platform,gymnasium/edx-platform,Edraak/circleci-edx-platform,iivic/BoiseStateX,louyihua/edx-platform,Edraak/edraak-platform,xingyepei/edx-platform,ahmadiga/min_edx,inares/edx-platform,marcore/edx-platform,angelapper/edx-platform,alu042/edx-platform,zhenzhai/edx-platform,procangroup/edx-platform,Endika/edx-platform,TeachAtTUM/edx-platform,jjmiranda/edx-platform,mcgachey/edx-platform,jbzdak/edx-platform,CourseTalk/edx-platform,miptliot/edx-platform,wwj718/edx-platform,ESOedX/edx-platform,raccoongang/edx-platform,pabloborrego93/edx-platform,UOMx/edx-platform,iivic/BoiseStateX,mcgachey/edx-platform,raccoongang/edx-platform,kursitet/edx-platform,eduNEXT/edunext-platform,ahmadiga/min_edx,shabab12/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,caesar2164/edx-platform,Livit/Livit.Learn.EdX,romain-li/edx-platform,shurihell/testasia,halvertoluke/edx-platform,BehavioralInsightsTeam/edx-platform,halvertoluke/edx-platform,devs1991/test_edx_docmode,JioEducation/edx-platform,pomegranited/edx-platform,msegado/edx-platform,inares/edx-platform,UOMx/edx-platform,hastexo/edx-platform,amir-qayyum-khan/edx-platform,msegado/edx-platform,ESOedX/edx-platform,kursitet/edx-platform,arbrandes/edx-platform,franosincic/edx-platform,ovnicraft/edx-platform,IndonesiaX/edx-platform,pabloborrego93/edx-platform,hastexo/edx-platform,mitocw/edx-platform,doganov/edx-platform,louyihua/edx-platform,bigdatauniversity/edx-platform,doganov/edx-platform,philanthropy-u/edx-platform,zhenzhai/edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,ahmadiga/min_edx,miptliot/edx-platform,Edraak/edx-platform,edx/edx-platform,analyseuc3m/ANALYSE-v1,CredoReference/edx-platform,analyseuc3m/ANALYSE-v1,devs1991/test_edx_docmode,shabab12/edx-platform,Lektorium-LLC/edx-platform,fintech-circle/edx-platform,EDUlib/edx-platform
|
089d7a26f527e7a38ed00348f8b9c644d0b5134b
|
tests/test_metric_expression.py
|
tests/test_metric_expression.py
|
from datetime import datetime
import json
import unittest
from pypercube.cube import Cube
from pypercube.cube import Query
from pypercube.metric import Metric
from pypercube.expression import EventExpression
from pypercube.expression import Sum
from tests import MockResponse
from tests import mock_get
class TestEventExpressions(unittest.TestCase):
def setUp(self):
self.c = Cube('unittest')
def test_no_matching_metrics(self):
mock_response = MockResponse(ok=True, status_code='200',
content="[]", json=[])
Query.get = mock_get(mock_response)
event = EventExpression('test')
metric = Sum(event)
response = self.c.get_metric(metric, limit=10)
self.assertEqual(len(response), 0)
def test_single_matching_metric(self):
timestamp = datetime.utcnow()
expected_content = '[{"time":"' + timestamp.isoformat() + '", '\
'"value":100}]'
mock_response = MockResponse(ok=True, status_code='200',
content=expected_content, json=json.loads(expected_content))
Query.get = mock_get(mock_response)
event = EventExpression('test')
metric = Sum(event)
response = self.c.get_metric(metric, limit=1)
self.assertEqual(len(response), 1)
self.assertTrue(isinstance(response[0], Metric))
self.assertEqual(response[0].time, timestamp)
self.assertEqual(response[0].value, 100)
|
from datetime import datetime
import json
import unittest
from pypercube.cube import Cube
from pypercube.cube import Query
from pypercube.metric import Metric
from pypercube.expression import EventExpression
from pypercube.expression import Sum
from tests import MockResponse
from tests import mock_get
class TestMetricExpressions(unittest.TestCase):
def setUp(self):
self.c = Cube('unittest')
def test_no_matching_metrics(self):
mock_response = MockResponse(ok=True, status_code='200',
content="[]", json=[])
Query.get = mock_get(mock_response)
event = EventExpression('test')
metric = Sum(event)
response = self.c.get_metric(metric, limit=10)
self.assertEqual(len(response), 0)
def test_single_matching_metric(self):
timestamp = datetime.utcnow()
expected_content = '[{"time":"' + timestamp.isoformat() + '", '\
'"value":100}]'
mock_response = MockResponse(ok=True, status_code='200',
content=expected_content, json=json.loads(expected_content))
Query.get = mock_get(mock_response)
event = EventExpression('test')
metric = Sum(event)
response = self.c.get_metric(metric, limit=1)
self.assertEqual(len(response), 1)
self.assertTrue(isinstance(response[0], Metric))
self.assertEqual(response[0].time, timestamp)
self.assertEqual(response[0].value, 100)
|
Fix metric expression test name.
|
Fix metric expression test name.
|
Python
|
bsd-3-clause
|
sbuss/pypercube
|
4e704ab26f4ed326dd4ca1c04e1355b619109d09
|
tests/test_wfgenerator.py
|
tests/test_wfgenerator.py
|
import pytest
from nlppln import WorkflowGenerator
class TestWFGenerator(object):
@pytest.fixture
def wf(self):
return WorkflowGenerator()
def test_steps_in_library(self, wf):
assert len(wf.steps_library) > 0
|
import pytest
from nlppln import WorkflowGenerator
class TestWFGenerator(object):
@pytest.fixture
def wf(self):
return WorkflowGenerator()
def test_steps_in_library(self, wf):
assert len(wf.steps_library.steps) > 0
|
Fix reference to new StepsLibrary object
|
Fix reference to new StepsLibrary object
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
351e88dd95db81418cc6d2deb4a943e2659292bc
|
wsgi.py
|
wsgi.py
|
import os
import sys
import site
VIRTUALENV="venv"
# Get site root from this file's location:
SITE_ROOT=os.path.abspath(os.path.dirname(__file__))
# Add virtualenv path to site package root:
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages"))
site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages"))
# Add site package root to start of pythonpath:
sys.path.insert(0, SITE_ROOT)
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
import os
# celery should now be available (on the virtualenv path)
import djcelery
djcelery.setup_loader()
# Point Django to settings file:
os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Remove virtualenv setup from WSGI entrypoint
|
Remove virtualenv setup from WSGI entrypoint
Handle it in front end server instead.
|
Python
|
agpl-3.0
|
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
|
da5ca6baf75b2230e3e8a62066bebaa96a16bf3d
|
test/server.py
|
test/server.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Server used for tests
"""
import sys
import os
# ensure sys knows about pyqode.core in the test env
sys.path.insert(0, os.getcwd())
from pyqode.core import backend
if __name__ == '__main__':
print('Server started')
print(sys.path)
print(os.getcwd())
backend.CodeCompletionWorker.providers.append(
backend.DocumentWordsProvider())
backend.serve_forever()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Server used for tests
"""
import sys
import os
# ensure sys knows about pyqode.core in the test env
sys.path.insert(0, os.getcwd())
sys.path.insert(0, os.path.abspath(".."))
from pyqode.core import backend
if __name__ == '__main__':
print('Server started')
print(sys.path)
print(os.getcwd())
backend.CodeCompletionWorker.providers.append(
backend.DocumentWordsProvider())
backend.serve_forever()
|
Fix test suite on travis (restore previous path config)
|
Fix test suite on travis (restore previous path config)
|
Python
|
mit
|
pyQode/pyqode.core,zwadar/pyqode.core,pyQode/pyqode.core
|
9a81d58bfb1088c8c6286c65150cd13c54c0b4c5
|
wagtail/wagtailredirects/middleware.py
|
wagtail/wagtailredirects/middleware.py
|
from django import http
from wagtail.wagtailredirects import models
# Originally pinched from: https://github.com/django/django/blob/master/django/contrib/redirects/middleware.py
class RedirectMiddleware(object):
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
except:
pass
return response
|
from django import http
from wagtail.wagtailredirects import models
# Originally pinched from: https://github.com/django/django/blob/master/django/contrib/redirects/middleware.py
class RedirectMiddleware(object):
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
# Get the path
path = models.Redirect.normalise_path(request.get_full_path())
# Find redirect
try:
redirect = models.Redirect.get_for_site(request.site).get(old_path=path)
except models.Redirect.DoesNotExist:
# No redirect found, return the 400 page
return response
if redirect.is_permanent:
return http.HttpResponsePermanentRedirect(redirect.link)
else:
return http.HttpResponseRedirect(redirect.link)
|
Refactor out a bare except: statement
|
Refactor out a bare except: statement
It now catches `Redirect.DoesNotExist`, returning the normal 404 page if
no redirect is found. Any other exception should not be caught here.
|
Python
|
bsd-3-clause
|
rjsproxy/wagtail,jnns/wagtail,chrxr/wagtail,Klaudit/wagtail,iansprice/wagtail,kaedroho/wagtail,wagtail/wagtail,mixxorz/wagtail,kurtrwall/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,chrxr/wagtail,mayapurmedia/wagtail,JoshBarr/wagtail,Klaudit/wagtail,torchbox/wagtail,nrsimha/wagtail,nimasmi/wagtail,iansprice/wagtail,hamsterbacke23/wagtail,FlipperPA/wagtail,Toshakins/wagtail,hanpama/wagtail,Toshakins/wagtail,takeflight/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,nutztherookie/wagtail,hamsterbacke23/wagtail,gogobook/wagtail,rjsproxy/wagtail,jnns/wagtail,kurtw/wagtail,gasman/wagtail,hanpama/wagtail,nealtodd/wagtail,torchbox/wagtail,Pennebaker/wagtail,hanpama/wagtail,kurtw/wagtail,Pennebaker/wagtail,inonit/wagtail,takeshineshiro/wagtail,takeflight/wagtail,kaedroho/wagtail,rjsproxy/wagtail,nilnvoid/wagtail,timorieber/wagtail,JoshBarr/wagtail,quru/wagtail,Pennebaker/wagtail,serzans/wagtail,nealtodd/wagtail,hamsterbacke23/wagtail,thenewguy/wagtail,gasman/wagtail,davecranwell/wagtail,thenewguy/wagtail,davecranwell/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,mikedingjan/wagtail,Tivix/wagtail,tangentlabs/wagtail,nrsimha/wagtail,rsalmaso/wagtail,takeflight/wagtail,takeflight/wagtail,nealtodd/wagtail,timorieber/wagtail,kurtw/wagtail,zerolab/wagtail,JoshBarr/wagtail,FlipperPA/wagtail,nrsimha/wagtail,nilnvoid/wagtail,nutztherookie/wagtail,gogobook/wagtail,mayapurmedia/wagtail,Klaudit/wagtail,thenewguy/wagtail,wagtail/wagtail,nimasmi/wagtail,chrxr/wagtail,rsalmaso/wagtail,rjsproxy/wagtail,davecranwell/wagtail,Toshakins/wagtail,takeshineshiro/wagtail,quru/wagtail,kurtrwall/wagtail,nimasmi/wagtail,JoshBarr/wagtail,serzans/wagtail,thenewguy/wagtail,iansprice/wagtail,nutztherookie/wagtail,hanpama/wagtail,gogobook/wagtail,tangentlabs/wagtail,kaedroho/wagtail,Pennebaker/wagtail,jnns/wagtail,mikedingjan/wagtail,Klaudit/wagtail,iansprice/wagtail,kurtw/wagtail,timorieber/wagtail,nealtodd/wagtail,inonit/wagtail,Tivix/wagtail,nilnvoid/wagtail,Tivix/wagtail,quru/wagtail,rsalmaso/wagtail,wagtail/wagtail,gasman/wagtail,jnns/wagtail,FlipperPA/wagtail,davecranwell/wagtail,takeshineshiro/wagtail,zerolab/wagtail,quru/wagtail,inonit/wagtail,wagtail/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,gasman/wagtail,kaedroho/wagtail,rsalmaso/wagtail,gasman/wagtail,nrsimha/wagtail,serzans/wagtail,nimasmi/wagtail,inonit/wagtail,rsalmaso/wagtail,wagtail/wagtail,mixxorz/wagtail,timorieber/wagtail,Tivix/wagtail,serzans/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,tangentlabs/wagtail,torchbox/wagtail,zerolab/wagtail,mixxorz/wagtail,thenewguy/wagtail,zerolab/wagtail,Toshakins/wagtail,chrxr/wagtail,torchbox/wagtail,mixxorz/wagtail,gogobook/wagtail,zerolab/wagtail,mixxorz/wagtail,mayapurmedia/wagtail,nilnvoid/wagtail
|
60efbb9b6b70036b72f3c756139524c4ca7698d2
|
carepoint/models/cph/fdb_gcn_seq.py
|
carepoint/models/cph/fdb_gcn_seq.py
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from carepoint import Carepoint
from sqlalchemy import (Column,
Integer,
String,
Boolean,
)
class FdbGcnSeq(Carepoint.BASE):
__tablename__ = 'fdrgcnseq'
__dbname__ = 'cph'
gcn_seqno = Column(Integer, primary_key=True)
hic3 = Column(String)
hicl_seqno = Column(Integer)
gcdf = Column(String)
gcrt = Column(String)
str = Column(String)
gtc = Column(Integer)
tc = Column(Integer)
dcc = Column(Integer)
gcnseq_gi = Column(Integer)
gender = Column(Integer)
hic3_seqn = Column(Integer)
str60 = Column(String)
update_yn = Column(Boolean)
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from carepoint import Carepoint
from sqlalchemy import (Column,
Integer,
String,
Boolean,
ForeignKey,
)
class FdbGcnSeq(Carepoint.BASE):
__tablename__ = 'fdrgcnseq'
__dbname__ = 'cph'
gcn_seqno = Column(Integer, primary_key=True)
hic3 = Column(String)
hicl_seqno = Column(Integer)
gcdf = Column(
String,
ForeignKey('fdrdosed.gcdf'),
)
gcrt = Column(
String,
ForeignKey('fdrrouted.gcrt'),
)
str = Column(String)
gtc = Column(Integer)
tc = Column(Integer)
dcc = Column(Integer)
gcnseq_gi = Column(Integer)
gender = Column(Integer)
hic3_seqn = Column(Integer)
str60 = Column(String)
update_yn = Column(Boolean)
|
Add foreign keys for form and route in Fdb Gcn Seq in carepoint cph
|
Add foreign keys for form and route in Fdb Gcn Seq in carepoint cph
|
Python
|
mit
|
laslabs/Python-Carepoint
|
628d777e3751ec8e38f1b98f558799b28cda1569
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
src/services/TemperatureMonitor/TemperatureMonitor.py
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS), observers=sys.argv[1:])
tempMonitor.run()
|
import sys
from src.TemperatureMonitor import TemperatureMonitor
from src.temperature import TemperatureSensor
import argparse
parser = argparse.ArgumentParser(description='Broadcast temperatures to URLs')
parser.add_argument('observers', metavar='N', type=str, nargs='+',
help='the observers', default=())
parser.add_argument("-i", "--interval", type=int, help="the period between testing the temperature", default=60)
parser.add_argument("-s", "--smoothing", type=int, help="the number of samples to average when broadcasting a result", default=60)
args = parser.parse_args()
SENSOR_ADDRESS = 0x48
tempMonitor = TemperatureMonitor(TemperatureSensor(SENSOR_ADDRESS),
interval=args.interval,
smoothing=args.smoothing,
observers=args.observers)
tempMonitor.run()
|
Allow Control of Interval and Observers
|
Allow Control of Interval and Observers
|
Python
|
mit
|
IAPark/PITherm
|
f8c9cb7d353680f48146d0b37e01ac6761ad7904
|
example/bayesian-dark-knowledge/random_num_generator_bug.py
|
example/bayesian-dark-knowledge/random_num_generator_bug.py
|
import mxnet as mx
import mxnet.ndarray as nd
for i in range(1000):
noise = mx.random.normal(0,10,(i,i),ctx=mx.gpu())
|
import mxnet as mx
mx.random.normal(0,10,(3,3), ctx=mx.gpu()).asnumpy()
|
Update Bug for Normal Genrator
|
Update Bug for Normal Genrator
|
Python
|
apache-2.0
|
sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet
|
b77956a993f7f703626dbc9fc85003d6840b24fe
|
partner_compassion/models/partner_bank_compassion.py
|
partner_compassion/models/partner_bank_compassion.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Steve Ferry
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, _
# pylint: disable=C8107
class ResPartnerBank(models.Model):
""" This class upgrade the partners.bank to match Compassion needs.
"""
_inherit = 'res.partner.bank'
@api.model
def create(self, data):
"""Override function to notify creation in a message
"""
result = super(ResPartnerBank, self).create(data)
part = result.partner_id
part.message_post(_("<b>Account number: </b>" + result.acc_number),
_("New account created"), 'comment')
return result
@api.multi
def unlink(self):
"""Override function to notify delte in a message
"""
for account in self:
part = account.partner_id
part.message_post(_("<b>Account number: </b>" +
account.acc_number),
_("Account deleted"), 'comment')
result = super(ResPartnerBank, self).unlink()
return result
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Steve Ferry
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, _
# pylint: disable=C8107
class ResPartnerBank(models.Model):
""" This class upgrade the partners.bank to match Compassion needs.
"""
_inherit = 'res.partner.bank'
@api.model
def create(self, data):
"""Override function to notify creation in a message
"""
result = super(ResPartnerBank, self).create(data)
part = result.partner_id
if part:
part.message_post(_("<b>Account number: </b>" + result.acc_number),
_("New account created"), 'comment')
return result
@api.multi
def unlink(self):
"""Override function to notify delte in a message
"""
for account in self:
part = account.partner_id
part.message_post(_("<b>Account number: </b>" +
account.acc_number),
_("Account deleted"), 'comment')
result = super(ResPartnerBank, self).unlink()
return result
|
FIX only post message if a partner is existent
|
FIX only post message if a partner is existent
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland
|
031d31c65b66dafe15470aeefe6b2a3240bb4969
|
pysis/__init__.py
|
pysis/__init__.py
|
# -*- coding: utf-8 -*-
import os
ISIS_ROOT = os.environ.get('ISISROOT')
if ISIS_ROOT is None:
print 'Warning! ISISROOT is not defined. Bitch.'
(ISIS_VERSION, ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = 5 * (None,)
else:
with open(filename) as _f:
ISIS_VERSION = _f.readline().strip()
(ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = map(int, version.split('.'))
def require_isis_version(major, minor=None, patch=None, build=None):
err_msg = 'Version %s.%s.%s.%s of isis required (%s found).'
err = Exception(err_msg % (major, minor, patch, build, ISIS_VERSION))
if major != ISIS_VERSION_MAJOR:
raise err
if minor is not None and minor != ISIS_VERSION_MINOR:
raise err
if patch is not None and patch != ISIS_VERSION_PATCH:
raise err
if build is not None and build != ISIS_VERSION_BUILD:
raise err
|
# -*- coding: utf-8 -*-
import os, sys
ISIS_ROOT = os.environ.get('ISISROOT')
if ISIS_ROOT is None:
sys.stderr.write('Warning! ISISROOT is not defined. Bitch.\n')
(ISIS_VERSION, ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = 5 * (None,)
else:
with open(filename) as _f:
ISIS_VERSION = _f.readline().strip()
(ISIS_VERSION_MAJOR, ISIS_VERSION_MINOR,
ISIS_VERSION_PATCH, ISIS_VERSION_BUILD) = map(int, version.split('.'))
def require_isis_version(major, minor=None, patch=None, build=None):
err_msg = 'Version %s.%s.%s.%s of isis required (%s found).'
err = Exception(err_msg % (major, minor, patch, build, ISIS_VERSION))
if major != ISIS_VERSION_MAJOR:
raise err
if minor is not None and minor != ISIS_VERSION_MINOR:
raise err
if patch is not None and patch != ISIS_VERSION_PATCH:
raise err
if build is not None and build != ISIS_VERSION_BUILD:
raise err
|
Write warning to std err instead.
|
Write warning to std err instead.
|
Python
|
bsd-3-clause
|
wtolson/pysis,wtolson/pysis,michaelaye/Pysis,michaelaye/Pysis
|
040324578680a26f3816aef6f05a731be54a377d
|
pyroSAR/tests/test_dev_config.py
|
pyroSAR/tests/test_dev_config.py
|
import pytest
from pyroSAR._dev_config import Storage, LOOKUP, URL, STORAGE
class TestStorage:
def test_insert(self):
storage = Storage(a=1, b=2)
assert storage.a == 1
assert storage.b == 2
class TestLookup:
def test_suffix(self):
assert LOOKUP.snap.suffix[0]['Apply-Orbit-File'] == 'Orb'
assert LOOKUP.snap.suffix[0]['Terrain-Correction'] == 'TC'
def test_attributes(self):
assert LOOKUP.attributes['sensor'] == 'TEXT'
assert LOOKUP.attributes['vh'] == 'INTEGER'
class TestSTORAGE:
def test_STORAGE_URL(self):
assert STORAGE.URL.dem.ace == URL.dem.ace
assert STORAGE.URL.orbit.doris == URL.orbit.doris
assert STORAGE.URL.auxcal.ers == URL.auxcal.ers
def test_STORAGE_LOOKUP(self):
assert LOOKUP.snap.suffix[0]['Apply-Orbit-File'] == STORAGE.LOOKUP.snap.suffix[0]['Apply-Orbit-File']
assert LOOKUP.snap.suffix[0]['Terrain-Correction'] == STORAGE.LOOKUP.snap.suffix[0]['Terrain-Correction'] == 'TC'
assert LOOKUP.attributes['sensor'] == STORAGE.LOOKUP.attributes['sensor']
assert LOOKUP.attributes['vh'] == STORAGE.LOOKUP.attributes['vh']
|
import pytest
from pyroSAR._dev_config import Storage, LOOKUP, URL, STORAGE
class TestStorage:
def test_insert(self):
storage = Storage(a=1, b=2)
assert storage.a == 1
assert storage.b == 2
class TestLookup:
def test_suffix(self):
assert LOOKUP.snap.suffix['Apply-Orbit-File'] == 'Orb'
assert LOOKUP.snap.suffix['Terrain-Correction'] == 'TC'
def test_attributes(self):
assert LOOKUP.attributes['sensor'] == 'TEXT'
assert LOOKUP.attributes['vh'] == 'INTEGER'
class TestSTORAGE:
def test_STORAGE_URL(self):
assert STORAGE.URL.dem.ace == URL.dem.ace
assert STORAGE.URL.orbit.doris == URL.orbit.doris
assert STORAGE.URL.auxcal.ers == URL.auxcal.ers
def test_STORAGE_LOOKUP(self):
assert LOOKUP.snap.suffix['Apply-Orbit-File'] == STORAGE.LOOKUP.snap.suffix['Apply-Orbit-File']
assert LOOKUP.snap.suffix['Terrain-Correction'] == STORAGE.LOOKUP.snap.suffix['Terrain-Correction'] == 'TC'
assert LOOKUP.attributes['sensor'] == STORAGE.LOOKUP.attributes['sensor']
assert LOOKUP.attributes['vh'] == STORAGE.LOOKUP.attributes['vh']
|
Update due to changes in LOOKUP.
|
Update due to changes in LOOKUP.
|
Python
|
mit
|
johntruckenbrodt/pyroSAR,johntruckenbrodt/pyroSAR
|
63afb46b7a39881c3a3655af645d5414bdd730ea
|
edumed/forum.py
|
edumed/forum.py
|
from pybb.permissions import DefaultPermissionHandler
class ForumPermissionHandler(DefaultPermissionHandler):
def may_post_as_admin(self, user):
""" return True if `user` may post as admin """
return False
|
from pybb.permissions import DefaultPermissionHandler
class ForumPermissionHandler(DefaultPermissionHandler):
def may_post_as_admin(self, user):
""" return True if `user` may post as admin """
return False
def may_create_topic(self, user, forum):
""" return True if `user` is allowed to create a new topic in `forum` """
return user.is_authenticated()
def may_create_post(self, user, topic):
""" return True if `user` is allowed to create a new post in `topic` """
if topic.forum.hidden and (not user.is_staff):
# if topic is hidden, only staff may post
return False
if topic.closed and (not user.is_staff):
# if topic is closed, only staff may post
return False
return user.is_authenticated()
|
Allow for authenticated non super users to create posts and topics
|
Allow for authenticated non super users to create posts and topics
|
Python
|
agpl-3.0
|
fnp/edumed,fnp/edumed,fnp/edumed
|
ff63299cde0fe34fe3bfdac16593e1a0a989bec4
|
Hydv2/ScreenTools.py
|
Hydv2/ScreenTools.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'Olivier Larrieu'
from gtk import gdk
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
width = gdk.screen_width()
height = gdk.screen_height()
return {'width': width, 'height': height}
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'Olivier Larrieu'
class ScreenProperties(object):
"""
Usefull to get basic screen informations
"""
@classmethod
def screen_dimension(cls):
"""
Return a dic with the screen height and screen width
"""
from Xlib import display
display = display.Display()
root = display.screen().root
desktop = root.get_geometry()
return {'width': desktop.width, 'height': desktop.height}
|
Use Xlib instead of gtk to get screen width and screen height This limit dependances
|
Use Xlib instead of gtk to get screen width and screen height
This limit dependances
|
Python
|
artistic-2.0
|
OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL,OlivierLarrieu/HYDV2_EFL
|
7ce51c694e44e8503acd86de0f90dbc4078f4b82
|
user_deletion/managers.py
|
user_deletion/managers.py
|
from dateutil.relativedelta import relativedelta
from django.apps import apps
from django.utils import timezone
user_deletion_config = apps.get_app_config('user_deletion')
class UserDeletionManagerMixin:
def users_to_notify(self):
"""Finds all users who have been inactive and not yet notified."""
inactive_boundary = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_NOTIFICATION,
)
return self.filter(last_login__lte=inactive_boundary, notified=False)
def users_to_delete(self):
"""Finds all users who have been inactive and were notified."""
one_year = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_DELETION,
)
return self.filter(last_login__lte=one_year, notified=True)
|
from dateutil.relativedelta import relativedelta
from django.apps import apps
from django.utils import timezone
user_deletion_config = apps.get_app_config('user_deletion')
class UserDeletionManagerMixin:
def users_to_notify(self):
"""Finds all users who have been inactive and not yet notified."""
threshold = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_NOTIFICATION,
)
return self.filter(last_login__lte=threshold, notified=False)
def users_to_delete(self):
"""Finds all users who have been inactive and were notified."""
threshold = timezone.now() - relativedelta(
months=user_deletion_config.MONTH_DELETION,
)
return self.filter(last_login__lte=threshold, notified=True)
|
Use threshold for time boundary in manager
|
Use threshold for time boundary in manager
|
Python
|
bsd-2-clause
|
incuna/django-user-deletion
|
58f8f4881a9e97206ddf49ea6cfb7f48dd34bfb3
|
example/urls.py
|
example/urls.py
|
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r"^$", TemplateView.as_view(template_name="homepage.html")),
url(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
from django.urls import path, re_path
from django.views.generic import TemplateView
urlpatterns = [
path('', TemplateView.as_view(template_name="homepage.html")),
re_path(r"^remote.html$", TemplateView.as_view(template_name="remote.html"), name="remote.html"),
]
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Upgrade code to Python 3.6+, Django 2.2 and remove deprecations
|
Python
|
bsd-3-clause
|
bashu/django-fancybox,bashu/django-fancybox
|
9912974a283912acd31fa4ee85de2fb44c2cf862
|
nn/model.py
|
nn/model.py
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Tensor: # scalar loss
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
import abc
import tensorflow as tf
class Model(metaclass=abc.ABCMeta):
@abc.astractmethod
def __init__(self, **hyperparameters_and_initial_parameters):
return NotImplemented
@abc.astractmethod
def train(self, *input_tensors) -> tf.Operation: # training operation
return NotImplemented
@abc.astractmethod
def test(self, *input_tensors) -> tf.Tensor: # labels
return NotImplemented
|
Fix type annotation for Model.train()
|
Fix type annotation for Model.train()
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
33775cd9e740ac70e9213c37825077516e683e55
|
pyatv/support/device_info.py
|
pyatv/support/device_info.py
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
"""Lookup methods for device data."""
import re
from pyatv.const import DeviceModel
_MODEL_LIST = {
"AppleTV2,1": DeviceModel.Gen2,
"AppleTV3,1": DeviceModel.Gen3,
"AppleTV3,2": DeviceModel.Gen3,
"AppleTV5,3": DeviceModel.Gen4,
"AppleTV6,2": DeviceModel.Gen4K,
}
# Incomplete list here!
_VERSION_LIST = {
"17J586": "13.0",
"17K82": "13.2",
"17K449": "13.3",
"17K795": "13.3.1",
"17L256": "13.4",
}
def lookup_model(identifier):
"""Lookup device model from identifier."""
return _MODEL_LIST.get(identifier, DeviceModel.Unknown)
def lookup_version(build):
"""Lookup OS version from build."""
if not build:
return None
version = _VERSION_LIST.get(build)
if version:
return version
match = re.match(r"^(\d+)[A-Z]", build)
if match:
base = int(match.groups()[0])
# 17A123 corresponds to tvOS 13.x, 16A123 to tvOS 12.x and so on
return str(base - 4) + ".x"
return None
|
Add tvOS 13.4 build number
|
mrp: Add tvOS 13.4 build number
|
Python
|
mit
|
postlund/pyatv,postlund/pyatv
|
694a85c71c315ccdb3e2f2946f86ce95936ee684
|
sahara_dashboard/api/__init__.py
|
sahara_dashboard/api/__init__.py
|
from sahara_dashboard.api import sahara
__all__ = [
"sahara"
]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara_dashboard.api import sahara
__all__ = [
"sahara"
]
|
Add licensing info in source file.
|
Add licensing info in source file.
[H102 H103] Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I4f9ead44b5efa3616086f5a62a2e0e68854baf44
|
Python
|
apache-2.0
|
openstack/sahara-dashboard,openstack/sahara-dashboard,openstack/sahara-dashboard,openstack/sahara-dashboard
|
3016872091618c78f60e17338f5581856a17f7af
|
endpoints/tests/test_utils.py
|
endpoints/tests/test_utils.py
|
from utils.testcase import EndpointTestCase
from rest_framework import status
from rest_framework.test import APIClient
from django.utils.translation import ugettext_lazy as _
import sure
class TestUtils(EndpointTestCase):
def test_fail_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + 'wrongToken')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token.')})
def test_bad_formatted_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer token1 token2')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. Token string should not contain spaces.')})
client.credentials(HTTP_AUTHORIZATION='token')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
|
from utils.testcase import EndpointTestCase
from rest_framework import status
from rest_framework.test import APIClient
from django.utils.translation import ugettext_lazy as _
import sure
class TestUtils(EndpointTestCase):
def test_fail_authentication(self):
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + 'wrongToken')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token.')})
def test_bad_formatted_authentication(self):
client = APIClient()
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
client.credentials(HTTP_AUTHORIZATION='Bearer token1 token2')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. Token string should not contain spaces.')})
client.credentials(HTTP_AUTHORIZATION='token')
response = client.get('/room')
response.status_code.should.eql(status.HTTP_401_UNAUTHORIZED)
response.data.should.eql({'detail': _('Invalid token header. No credentials provided.')})
|
Add test for no HTTP_AUTHORIZATION header at all
|
Add test for no HTTP_AUTHORIZATION header at all
|
Python
|
mit
|
Amoki/Amoki-Music,Amoki/Amoki-Music,Amoki/Amoki-Music
|
5e2943b8e17ee753ddfafd1420c9e8155c496aba
|
example/tests/test_parsers.py
|
example/tests/test_parsers.py
|
import json
from django.test import TestCase
from io import BytesIO
from rest_framework_json_api.parsers import JSONParser
class TestJSONParser(TestCase):
def setUp(self):
class MockRequest(object):
def __init__(self):
self.method = 'GET'
request = MockRequest()
self.parser_context = {'request': request, 'kwargs': {}, 'view': 'BlogViewSet'}
data = {
'data': {
'id': 123,
'type': 'Blog'
},
'meta': {
'random_key': 'random_value'
}
}
self.string = json.dumps(data)
def test_parse_include_metadata(self):
parser = JSONParser()
stream = BytesIO(self.string.encode('utf-8'))
data = parser.parse(stream, None, self.parser_context)
self.assertEqual(data['_meta'], {'random_key': 'random_value'})
|
import json
from io import BytesIO
from django.test import TestCase
from rest_framework.exceptions import ParseError
from rest_framework_json_api.parsers import JSONParser
class TestJSONParser(TestCase):
def setUp(self):
class MockRequest(object):
def __init__(self):
self.method = 'GET'
request = MockRequest()
self.parser_context = {'request': request, 'kwargs': {}, 'view': 'BlogViewSet'}
data = {
'data': {
'id': 123,
'type': 'Blog'
},
'meta': {
'random_key': 'random_value'
}
}
self.string = json.dumps(data)
def test_parse_include_metadata(self):
parser = JSONParser()
stream = BytesIO(self.string.encode('utf-8'))
data = parser.parse(stream, None, self.parser_context)
self.assertEqual(data['_meta'], {'random_key': 'random_value'})
def test_parse_include_metadata(self):
parser = JSONParser()
string = json.dumps([])
stream = BytesIO(string.encode('utf-8'))
with self.assertRaises(ParseError):
parser.parse(stream, None, self.parser_context)
|
Test case for parsing invalid data.
|
Test case for parsing invalid data.
|
Python
|
bsd-2-clause
|
django-json-api/rest_framework_ember,Instawork/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api
|
4ee53117aa32b1ead7c979a49d7cb5709e18d8cc
|
dbaas/workflow/steps/util/deploy/check_dns.py
|
dbaas/workflow/steps/util/deploy/check_dns.py
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from util import check_nslookup
from util import get_credentials_for
from dbaas_dnsapi.models import DatabaseInfraDNSList
from dbaas_credentials.models import CredentialType
from ..base import BaseStep
from ....exceptions.error_codes import DBAAS_0005
LOG = logging.getLogger(__name__)
class CheckDns(BaseStep):
def __unicode__(self):
return "Waiting dns propagation..."
def do(self, workflow_dict):
try:
if not 'databaseinfra' in workflow_dict:
return False
dns_credentials = get_credentials_for(environment=workflow_dict['environment'],
credential_type=CredentialType.DNSAPI)
dns_list = DatabaseInfraDNSList.objects.filter(databaseinfra=workflow_dict['databaseinfra'].id)
for dns in dns_list:
LOG.info("Checking dns %s on %s" % (dns.dns, dns_credentials.project))
#check_nslookup(dns.dns, dns_credentials.project)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0005)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from util import check_nslookup
from util import get_credentials_for
from dbaas_dnsapi.models import DatabaseInfraDNSList
from dbaas_credentials.models import CredentialType
from ..base import BaseStep
from ....exceptions.error_codes import DBAAS_0005
LOG = logging.getLogger(__name__)
class CheckDns(BaseStep):
def __unicode__(self):
return "Waiting dns propagation..."
def do(self, workflow_dict):
try:
if not 'databaseinfra' in workflow_dict:
return False
dns_credentials = get_credentials_for(environment=workflow_dict['environment'],
credential_type=CredentialType.DNSAPI)
dns_list = DatabaseInfraDNSList.objects.filter(databaseinfra=workflow_dict['databaseinfra'].id)
for dns in dns_list:
LOG.info("Checking dns %s on %s" % (dns.dns, dns_credentials.project))
check_nslookup(dns.dns, dns_credentials.project)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0005)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
Remove comment from check nslookup
|
Remove comment from check nslookup
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
d2b4ec50442a00df85ef525cc82aca971b72eb86
|
erpnext/patches/v11_0/rename_field_max_days_allowed.py
|
erpnext/patches/v11_0/rename_field_max_days_allowed.py
|
import frappe
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("hr", "doctype", "leave_type")
frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed")
|
import frappe
def execute():
frappe.db.sql("""
UPDATE `tabLeave Type`
SET max_days_allowed = '0'
WHERE trim(coalesce(max_days_allowed, '')) = ''
""")
frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
|
Set null values to '0' before changing column type
|
[fix] Set null values to '0' before changing column type
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
d3a2e344caa34f763f7e46710db5b9ddefe73c55
|
doc/mkapidoc.py
|
doc/mkapidoc.py
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AbstractMethod',
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AbstractMethod',
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.StreamAnalyzer',
'--exclude Exscript.protocols.OsGuesser',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
Hide StreamAnalyzer and OsGuesser from the API docs.
|
Hide StreamAnalyzer and OsGuesser from the API docs.
|
Python
|
mit
|
maximumG/exscript,knipknap/exscript,knipknap/exscript,maximumG/exscript
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.