commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
435e8fc4d9ad8c071a96e37e483fcbc194a94fc6
|
tests/integration/files/file/base/_modules/runtests_decorators.py
|
tests/integration/files/file/base/_modules/runtests_decorators.py
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import time
# Import Salt libs
import salt.utils.decorators
def _fallbackfunc():
return False, 'fallback'
def working_function():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(True)
def booldependsTrue():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(False)
def booldependsFalse():
return True
@salt.utils.decorators.depends('time')
def depends():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123')
def missing_depends():
return True
@salt.utils.decorators.depends('time', fallback_function=_fallbackfunc)
def depends_will_not_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc)
def missing_depends_will_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import time
# Import Salt libs
import salt.utils.decorators
def _fallbackfunc():
return False, 'fallback'
def working_function():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(True)
def booldependsTrue():
'''
CLI Example:
.. code-block:: bash
'''
return True
@salt.utils.decorators.depends(False)
def booldependsFalse():
return True
@salt.utils.decorators.depends('time')
def depends():
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123')
def missing_depends():
return True
@salt.utils.decorators.depends('time', fallback_function=_fallbackfunc)
def depends_will_not_fallback():
'''
CLI Example:
.. code-block:: bash
'''
ret = {'ret': True,
'time': time.time()}
return ret
@salt.utils.decorators.depends('time123', fallback_function=_fallbackfunc)
def missing_depends_will_fallback():
ret = {'ret': True,
'time': time.time()}
return ret
|
Fix tests: add module function docstring
|
Fix tests: add module function docstring
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
75a70e31791c523da6bf6b0ce4409a77f2784ed5
|
byceps/services/user/transfer/models.py
|
byceps/services/user/transfer/models.py
|
"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from dataclasses import dataclass
from typing import Optional
from ....typing import UserID
@dataclass(frozen=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
Change user transfer model from `attrs` to `dataclass`
|
Change user transfer model from `attrs` to `dataclass`
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
60a4da0ea090e95ad566743b5ceba874d051d8d9
|
pronto/serializers/obo.py
|
pronto/serializers/obo.py
|
import io
from typing import BinaryIO, ClassVar
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._typedefs:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, (id, data) in enumerate(self.ont._terms.items()):
frame = self._to_term_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, (id, data) in enumerate(self.ont._relationships.items()):
frame = self._to_typedef_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
|
import io
from typing import BinaryIO, ClassVar
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._relationships:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, (id, data) in enumerate(self.ont._terms.items()):
frame = self._to_term_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, (id, data) in enumerate(self.ont._relationships.items()):
frame = self._to_typedef_frame(data)
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
|
Fix bug in OboSerializer` causing `Ontology.dump` to crash
|
Fix bug in OboSerializer` causing `Ontology.dump` to crash
|
Python
|
mit
|
althonos/pronto
|
e51d35545d038b5cb7035cc74f39e4a5c2b0756a
|
thinglang/execution/classes.py
|
thinglang/execution/classes.py
|
from thinglang.lexer.symbols.base import LexicalIdentifier
class ThingInstance(object):
def __init__(self, cls):
self.cls = cls
self.methods = {
x.name: x for x in self.cls.children
}
self.members = {}
def __contains__(self, item):
return item in self.members or item in self.methods
def __getitem__(self, item):
return self.members.get(item) or self.methods.get(item)
def __str__(self):
return f'Thing<{self.cls}>(members={self.members}, methods={self.methods})'
|
from thinglang.lexer.symbols.base import LexicalIdentifier
class ThingInstance(object):
def __init__(self, cls):
self.cls = cls
self.methods = {
x.name: x for x in self.cls.children
}
self.members = {}
def __contains__(self, item):
return item in self.members or item in self.methods
def __getitem__(self, item):
return self.members.get(item) if item in self.members else self.methods[item]
def __str__(self):
return f'Thing<{self.cls}>(members={self.members}, methods={self.methods})'
|
Fix bug in ThingInstace __setitem__
|
Fix bug in ThingInstace __setitem__
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
428938d5775d973822a0f72aa2ce54ae9e14d429
|
main.py
|
main.py
|
#!/usr/bin/env python3
"""TODO:
* more flexible sorting options
* use -o to specify output file
* check more explicitly for errors in JSON files
"""
import json, sys
if len(sys.argv) > 1:
inFn = sys.argv[1]
with open(inFn, 'r') as f:
try:
defs = json.load(f)
except:
sys.exit('{} has a syntax error'.format(inFn))
sort = sorted(defs, key=str.lower)
print('# My Dictionary')
print('## Definitions')
curLetter = None
for k in sort:
l = k[0].upper()
if curLetter != l:
curLetter = l
print('### {}'.format(curLetter))
word = k[0].upper() + k[1:]
print('* *{}* - {}'.format(word, defs[k]))
|
#!/usr/bin/env python3
"""TODO:
* more flexible sorting options
* use -o to specify output file
* check more explicitly for errors in JSON files
"""
import json, sys
if len(sys.argv) > 1:
inFn = sys.argv[1]
with open(inFn, 'r') as f:
try:
defs = json.load(f)
except:
sys.exit('{} has a syntax error'.format(inFn))
sort = sorted(defs, key=str.lower)
print('# My Dictionary')
print('\n## Definitions')
curLetter = None
for k in sort:
l = k[0].upper()
if curLetter != l:
curLetter = l
print('\n### {}'.format(curLetter))
word = k[0].upper() + k[1:]
print('* *{}* - {}'.format(word, defs[k]))
|
Add newlines in output for formatting purposes
|
Add newlines in output for formatting purposes
|
Python
|
mit
|
JoshuaBrockschmidt/dictbuilder
|
d51bc62e18a3589ad840bbc26952bac14acaf264
|
main.py
|
main.py
|
#!/usr/bin/env python
import os
import jinja2
import webapp2
JINJA_ENVIRONMENT = jinja2.Environment(
loader = jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'html')),
extensions = ['jinja2.ext.autoescape']
)
class MainHandler(webapp2.RequestHandler):
def get(self):
values = { 'value': 'hello stack-instant' }
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render(values))
app = webapp2.WSGIApplication([
('/', MainHandler),
], debug = True)
|
#!/usr/bin/env python
import os
import jinja2
import webapp2
JINJA_ENVIRONMENT = jinja2.Environment(
loader = jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'html')),
extensions = ['jinja2.ext.autoescape']
)
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write(JINJA_ENVIRONMENT.get_template('index.html').render())
app = webapp2.WSGIApplication([
('/', MainHandler),
], debug = True)
|
Remove template variables (not needed)
|
Remove template variables (not needed)
|
Python
|
bsd-3-clause
|
siddhantgoel/so-instant,siddhantgoel/so-instant
|
324f05e1cbffdad2da209a7ee515f1d9a32cf93b
|
main.py
|
main.py
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
pprint(get_all())
else:
post_id = sys.argv[1]
if validate_id(post_id):
pprint(get_by_id(int(post_id)))
else:
print('Quitting')
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
print('Loading data')
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
pprint(get_all())
else:
post_id = sys.argv[1]
if validate_id(post_id):
pprint(get_by_id(int(post_id)))
else:
print('Quitting')
|
Add message about starting loading data
|
Add message about starting loading data
|
Python
|
mit
|
sevazhidkov/rest-wrapper
|
42804d3182b9b7489583250856e31a8daaef5fa3
|
protolint/__init__.py
|
protolint/__init__.py
|
# -*- coding: utf-8 -*-
"""
protolint
~~~~~~~~~
"""
from . import cli
from . import linter
from . import output
__version__ = (1, 0, 0)
|
# -*- coding: utf-8 -*-
"""
protolint
~~~~~~~~~
"""
__version__ = (1, 0, 0)
from . import cli
from . import linter
from . import output
|
Fix CLI module during build
|
Fix CLI module during build
|
Python
|
mit
|
sgammon/codeclimate-protobuf,sgammon/codeclimate-protobuf
|
746d3e5aba7b4fb9bfd6c258c80b6a4565de4844
|
py/oldfart/handler.py
|
py/oldfart/handler.py
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean' anyone?)
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.send_error(500, 'Could not generate resource')
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
import http.server
import os
import oldfart.make
__all__ = ['make_http_request_handler_class']
def _send_head(self):
# FIXME: We die here if the directory doesn't exist ('make clean' anyone?)
path = self.translate_path(self.path)
target = os.path.relpath(path, self.maker.project_dir)
if not os.path.isdir(path):
retval, output = self.maker.make(target)
if retval == oldfart.make.FAILURE:
self.send_error(500, 'Could not generate resource')
return None
return super(self.__class__, self).send_head()
def make_http_request_handler_class(name, maker):
cls = type(name, (http.server.SimpleHTTPRequestHandler,), {
'maker': maker,
'send_head': _send_head
})
return cls
|
Stop handling after responding with 500
|
Bugfix: Stop handling after responding with 500
|
Python
|
bsd-3-clause
|
mjhanninen/oldfart,mjhanninen/oldfart,mjhanninen/oldfart
|
baa088e1e6cc503b9f0bcfbacf62327a6527550b
|
kmeldb/mounts.py
|
kmeldb/mounts.py
|
import os
def get_fat_mounts():
fat_mounts = []
mounts = os.popen('mount')
for line in mounts.readlines():
device, ign1, mount_point, ign2, filesystem, options = line.split()
if 'fat' in filesystem:
fat_mounts.append((mount_point, filesystem, device))
return fat_mounts
def main():
mounts = get_fat_mounts()
for mount in mounts:
print(mount)
if __name__ == '__main__':
main()
|
import os
try:
import psutil
except ImportError:
print('Falling back to parsing mounts output')
HAVE_PSUTIL = False
else:
print('Using psutil')
HAVE_PSUTIL = True
def get_fat_mounts():
# global HAVE_PSUTIL
# HAVE_PSUTIL = False
fat_mounts = []
if HAVE_PSUTIL:
partitions = psutil.disk_partitions()
for part in partitions:
if 'fat' in part.fstype:
fat_mounts.append((part.mountpoint, part.fstype, part.device))
else:
mounts = os.popen('mount')
for line in mounts.readlines():
device, ign1, mount_point, ign2, filesystem, options = line.split()
if 'fat' in filesystem:
fat_mounts.append((mount_point, filesystem, device))
return fat_mounts
def main():
mounts = get_fat_mounts()
for mount in mounts:
print(mount)
if __name__ == '__main__':
main()
|
Use psutil.disk_partitions to get FAT formatted partitions.
|
Use psutil.disk_partitions to get FAT formatted partitions.
|
Python
|
apache-2.0
|
chrrrisw/kmel_db,chrrrisw/kmel_db
|
8d9acd9447400608ddc2ab46948c7d05430b3e0b
|
reviewboard/hostingsvcs/tests/__init__.py
|
reviewboard/hostingsvcs/tests/__init__.py
|
from __future__ import unicode_literals
from reviewboard.hostingsvcs.tests.testcases import ServiceTests
__all__ = [
# Backwards-compatibility for third-party modules that used this import.
'ServiceTests',
]
|
Add a backwards-compatibility import for ServiceTests.
|
Add a backwards-compatibility import for ServiceTests.
We had a class, ServiceTests, which was a base testcase class for
hosting service unit tests. This was moved recently, which broke
extensions that provided hosting service unit tests based on this.
This restores the import for now. We may want to create something more
formal down the road.
Testing Done:
Review Board unit tests pass.
Power Pack unit tests pass.
Reviewed at https://reviews.reviewboard.org/r/7944/
|
Python
|
mit
|
davidt/reviewboard,chipx86/reviewboard,chipx86/reviewboard,davidt/reviewboard,davidt/reviewboard,chipx86/reviewboard,brennie/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,brennie/reviewboard,brennie/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,brennie/reviewboard,reviewboard/reviewboard,davidt/reviewboard,sgallagher/reviewboard,chipx86/reviewboard
|
|
3da48961d140d7d3909760603675785955856afc
|
recipes/search_indexes.py
|
recipes/search_indexes.py
|
import datetime
from haystack import indexes
from recipes.models import Recipe
class RecipeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
tags = indexes.MultiValueField()
popularity = indexes.DecimalField(model_attr='popularity')
# TODO: filter by added_by, is_public -- do in a custom search class
def get_model(self):
return Recipe
def index_queryset(self, using=None):
return self.get_model().objects.filter(date_added__lte=datetime.datetime.now())
def prepare(self, object):
self.prepared_data = super(RecipeIndex, self).prepare(object)
self.prepared_data['tags'] = [rtag.tag for rtag in object.tags.all()]
return self.prepared_data
|
import datetime
from django.utils import timezone
from haystack import indexes
from recipes.models import Recipe
class RecipeIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
tags = indexes.MultiValueField()
popularity = indexes.DecimalField(model_attr='popularity')
# TODO: filter by added_by, is_public -- do in a custom search class
def get_model(self):
return Recipe
def index_queryset(self, using=None):
return self.get_model().objects.filter(date_added__lte=timezone.now())
def prepare(self, object):
self.prepared_data = super(RecipeIndex, self).prepare(object)
self.prepared_data['tags'] = [rtag.tag for rtag in object.tags.all()]
return self.prepared_data
|
Fix for 'naive datetime' complaint when running searches
|
Fix for 'naive datetime' complaint when running searches
|
Python
|
agpl-3.0
|
kamni/nodonuts,kamni/nodonuts,kamni/nodonuts,kamni/nodonuts
|
d89e43c649aba78ac9722ca39f9e0c67be0cc897
|
precision/accounts/models.py
|
precision/accounts/models.py
|
from django.db import models
# Create your models here.
|
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.utils.translation import ugettext_lazy as _
class SchoolAdministrator(AbstractUser):
pass
|
Add an simple abstract user model for school administrators which will be used later
|
Add an simple abstract user model for school administrators which will be used later
|
Python
|
mit
|
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
|
4a29b5169524205bfa50a89379f4439d0de40296
|
refabric/context_managers.py
|
refabric/context_managers.py
|
from contextlib import contextmanager
from fabric.context_managers import settings, hide
from fabric.state import env
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
|
from contextlib import contextmanager
from fabric.context_managers import settings, hide
from fabric.state import env
from refabric.state import apply_role_definitions
@contextmanager
def sudo(user=None):
with settings(sudo_user=user or env.sudo_user or env.user, use_sudo=True):
yield
@contextmanager
def role(name):
with settings(roles=[name]):
yield
apply_role_definitions(None)
silent = lambda *h: settings(hide('commands', *h), warn_only=True)
hide_prefix = lambda: settings(output_prefix=False)
abort_on_error = lambda: settings(warn_only=False)
|
Add role context manager setting role and definitions
|
Add role context manager setting role and definitions
|
Python
|
mit
|
5monkeys/refabric
|
5748b1a7dc4a5be3b2b9da9959eabe586347078a
|
tensorflow_federated/python/program/value_reference.py
|
tensorflow_federated/python/program/value_reference.py
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the abstract interface for classes that reference values."""
import abc
from typing import Any
from tensorflow_federated.python.core.impl.types import typed_object
class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface for classes that reference values.
This interfaces provides the capability to maniplutate values without
requiring them to be materialized as Python objects.
"""
@abc.abstractmethod
def get_value(self) -> Any:
pass
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines abstract interfaces representing references to values.
These abstract interfaces provide the capability to handle values without
requiring them to be materialized as Python objects. Instances of these
abstract interfaces represent values of type `tff.TensorType` and can be placed
on the server, elements of structures that are placed on the server, or
unplaced.
"""
import abc
from typing import Union
import numpy as np
from tensorflow_federated.python.core.impl.types import typed_object
class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta):
"""An abstract interface representing references to server placed values."""
@abc.abstractmethod
def get_value(self) -> Union[np.generic, np.ndarray]:
"""Returns the referenced value as a numpy scalar or array."""
raise NotImplementedError
|
Update the Value Reference API to be more precise about the types of values being referenced.
|
Update the Value Reference API to be more precise about the types of values being referenced.
PiperOrigin-RevId: 404647934
|
Python
|
apache-2.0
|
tensorflow/federated,tensorflow/federated,tensorflow/federated
|
622b81296b292035b970891cd259eaac113d20c1
|
apps/accounts/conf.py
|
apps/accounts/conf.py
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
Change internal name of UNCCD role back to previous correct value
|
Change internal name of UNCCD role back to previous correct value
|
Python
|
apache-2.0
|
CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat
|
6e3151cd9e4c5309959c93b2ed683bb74d88a640
|
backend/breach/tests/test_sniffer.py
|
backend/breach/tests/test_sniffer.py
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint, '147.102.239.229', 'dionyziz.com', 'wlan0', '8080')
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start()
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read()
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete()
self.assertTrue(requests.post.called)
|
Migrate Sniffer unit test to new API
|
Migrate Sniffer unit test to new API
|
Python
|
mit
|
esarafianou/rupture,esarafianou/rupture,dimriou/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dionyziz/rupture,dionyziz/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimkarakostas/rupture
|
15e713f76f1fbfef26d9a7d3d3c95fac2d8f213e
|
casepro/settings_production_momza.py
|
casepro/settings_production_momza.py
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "msisdn_registrant", "field_name": "Cell Number"},
{"field": "language", "field_name": "Language Preference"},
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
Remove cell number and language from pod
|
Remove cell number and language from pod
We started using the identity store and the hub to fetch this information,
but unfortunately the field names are different depending on which service
the info is coming from.
These 2 fields are already displayed in the CasePro interface so it makes
sense to not use the pod at all for them.
|
Python
|
bsd-3-clause
|
praekelt/casepro,praekelt/casepro,praekelt/casepro
|
a6e0d8bab8e886688527372a4de267d274df7c51
|
conftest.py
|
conftest.py
|
#!/usr/bin/env python
# -*- encoding: utf-8
import collections
import os
import subprocess
import unittest
Result = collections.namedtuple('Result', 'rc stdout stderr')
ROOT = subprocess.check_output([
'git', 'rev-parse', '--show-toplevel']).decode('ascii').strip()
BINARY = os.path.join(ROOT, 'target', 'debug', 'safari')
subprocess.check_call(['cargo', 'build', '--release'], cwd=ROOT)
class BaseTest(unittest.TestCase):
def run_safari_rs(self, *args):
proc = subprocess.Popen([BINARY] + list(args),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
return Result(
rc=proc.returncode,
stdout=stdout.decode('ascii'),
stderr=stderr.decode('ascii')
)
|
#!/usr/bin/env python
# -*- encoding: utf-8
import collections
import os
import subprocess
import unittest
Result = collections.namedtuple('Result', 'rc stdout stderr')
ROOT = subprocess.check_output([
'git', 'rev-parse', '--show-toplevel']).decode('ascii').strip()
BINARY = os.path.join(ROOT, 'target', 'release', 'safari')
subprocess.check_call(['cargo', 'build', '--release'], cwd=ROOT)
class BaseTest(unittest.TestCase):
def run_safari_rs(self, *args):
proc = subprocess.Popen([BINARY] + list(args),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
return Result(
rc=proc.returncode,
stdout=stdout.decode('ascii'),
stderr=stderr.decode('ascii')
)
|
Make sure we test the right binary
|
Make sure we test the right binary
|
Python
|
mit
|
alexwlchan/safari.rs,alexwlchan/safari.rs
|
a0c23b5c27c4209cc22e138c72173842664fa98a
|
tests/query_test/test_decimal_queries.py
|
tests/query_test/test_decimal_queries.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Targeted tests for decimal type.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestDecimalQueries(ImpalaTestSuite):
BATCH_SIZES = [0, 1]
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestDecimalQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES))
# TODO: add parquet when that is supported.
# On CDH4, hive does not support decimal so we can't run these tests against
# the other file formats. Enable them on C5.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text')
def test_queries(self, vector):
new_vector = copy(vector)
new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size')
self.run_test_case('QueryTest/decimal', new_vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Targeted tests for decimal type.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestDecimalQueries(ImpalaTestSuite):
BATCH_SIZES = [0, 1]
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestDecimalQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES))
# TODO: add parquet when that is supported.
# On CDH4, hive does not support decimal so we can't run these tests against
# the other file formats. Enable them on C5.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and
v.get_value('table_format').compression_codec == 'none')
def test_queries(self, vector):
new_vector = copy(vector)
new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size')
self.run_test_case('QueryTest/decimal', new_vector)
|
Update decimal tests to only run on text/none.
|
Update decimal tests to only run on text/none.
Change-Id: I9a35f9e1687171fc3f06c17516bca2ea4b9af9e1
Reviewed-on: http://gerrit.ent.cloudera.com:8080/2217
Tested-by: jenkins
Reviewed-by: Ishaan Joshi <[email protected]>
|
Python
|
apache-2.0
|
AtScaleInc/Impala,placrosse/ImpalaToGo,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,rampage644/impala-cut,placrosse/ImpalaToGo,rampage644/impala-cut,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,ImpalaToGo/ImpalaToGo,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,placrosse/ImpalaToGo,rampage644/impala-cut,andybab/Impala,andybab/Impala,AtScaleInc/Impala,andybab/Impala,placrosse/ImpalaToGo,AtScaleInc/Impala,andybab/Impala,AtScaleInc/Impala,rampage644/impala-cut,rampage644/impala-cut,AtScaleInc/Impala,andybab/Impala,placrosse/ImpalaToGo,rampage644/impala-cut,andybab/Impala,gistic/PublicSpatialImpala,ImpalaToGo/ImpalaToGo,AtScaleInc/Impala,placrosse/ImpalaToGo,gistic/PublicSpatialImpala
|
7bf391e772cbece78b521f1e357ced4bef6908f4
|
bin/upload_version.py
|
bin/upload_version.py
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
Set automatic releases as 'prerelease'.
|
Set automatic releases as 'prerelease'.
|
Python
|
bsd-2-clause
|
pubnative/redash,pubnative/redash,imsally/redash,hudl/redash,denisov-vlad/redash,M32Media/redash,chriszs/redash,easytaxibr/redash,M32Media/redash,jmvasquez/redashtest,crowdworks/redash,pubnative/redash,ninneko/redash,hudl/redash,denisov-vlad/redash,amino-data/redash,EverlyWell/redash,imsally/redash,vishesh92/redash,44px/redash,EverlyWell/redash,guaguadev/redash,stefanseifert/redash,chriszs/redash,getredash/redash,jmvasquez/redashtest,useabode/redash,alexanderlz/redash,getredash/redash,ninneko/redash,rockwotj/redash,getredash/redash,guaguadev/redash,amino-data/redash,vishesh92/redash,easytaxibr/redash,alexanderlz/redash,hudl/redash,M32Media/redash,stefanseifert/redash,jmvasquez/redashtest,guaguadev/redash,vishesh92/redash,akariv/redash,crowdworks/redash,useabode/redash,chriszs/redash,useabode/redash,getredash/redash,stefanseifert/redash,guaguadev/redash,guaguadev/redash,imsally/redash,44px/redash,ninneko/redash,stefanseifert/redash,akariv/redash,EverlyWell/redash,crowdworks/redash,useabode/redash,EverlyWell/redash,crowdworks/redash,pubnative/redash,akariv/redash,44px/redash,ninneko/redash,rockwotj/redash,moritz9/redash,akariv/redash,denisov-vlad/redash,rockwotj/redash,amino-data/redash,amino-data/redash,alexanderlz/redash,easytaxibr/redash,getredash/redash,moritz9/redash,moritz9/redash,imsally/redash,ninneko/redash,rockwotj/redash,moritz9/redash,chriszs/redash,easytaxibr/redash,alexanderlz/redash,M32Media/redash,easytaxibr/redash,denisov-vlad/redash,pubnative/redash,akariv/redash,hudl/redash,44px/redash,jmvasquez/redashtest,stefanseifert/redash,vishesh92/redash,denisov-vlad/redash,jmvasquez/redashtest
|
694ea053fe87e4811acf0dde47826fec3eb1c9f7
|
source/run.py
|
source/run.py
|
import asyncio
import time
from autoreiv import AutoReiv
def main():
while True:
bot = AutoReiv()
bot.load()
try:
bot.run(bot.config.get('login'), bot.config.get('password'))
except Exception as e:
print('* Crashed with error: {}'.format(e))
finally:
print('* Disconnected.')
asyncio.set_event_loop(asyncio.new_event_loop())
print('* Waiting 10 seconds before reconnecting (press ^C to stop)...')
try:
time.sleep(10)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
import asyncio
import time
from autoreiv import AutoReiv
def main():
while True:
bot = AutoReiv()
bot.load()
try:
bot.run(bot.config.get('login'), bot.config.get('password'))
except Exception as e:
print('* Crashed with error: {}'.format(e))
finally:
if not bot.is_closed:
bot.close()
print('* Disconnected.')
asyncio.set_event_loop(asyncio.new_event_loop())
print('* Waiting 10 seconds before reconnecting (press ^C to stop)...')
try:
time.sleep(10)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
|
Make sure the event loop gets closed on disconnect
|
Make sure the event loop gets closed on disconnect
|
Python
|
mit
|
diath/AutoReiv
|
3149aaa319620c2e39434fea081968cf7040ef6d
|
common/djangoapps/enrollment/urls.py
|
common/djangoapps/enrollment/urls.py
|
"""
URLs for the Enrollment API
"""
from django.conf import settings
from django.conf.urls import patterns, url
from .views import (
EnrollmentView,
EnrollmentListView,
EnrollmentCourseDetailView
)
urlpatterns = patterns(
'enrollment.views',
url(
r'^enrollment/{username},{course_key}$'.format(
username=settings.USERNAME_PATTERN, course_key=settings.COURSE_ID_PATTERN
),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(
r'^enrollment/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(r'^enrollment$', EnrollmentListView.as_view(), name='courseenrollments'),
url(
r'^course/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentCourseDetailView.as_view(),
name='courseenrollmentdetails'
),
)
|
"""
URLs for the Enrollment API
"""
from django.conf import settings
from django.conf.urls import patterns, url
from .views import (
EnrollmentView,
EnrollmentListView,
EnrollmentCourseDetailView
)
urlpatterns = patterns(
'enrollment.views',
url(
r'^enrollment/{username},{course_key}/$'.format(
username=settings.USERNAME_PATTERN, course_key=settings.COURSE_ID_PATTERN
),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(
r'^enrollment/{course_key}/$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(r'^enrollment$', EnrollmentListView.as_view(), name='courseenrollments'),
url(
r'^course/{course_key}/$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentCourseDetailView.as_view(),
name='courseenrollmentdetails'
),
)
|
Add options trailing slashes to the Enrollment API.
|
Add options trailing slashes to the Enrollment API.
This allows the edX REST API Client to perform a sucessful GET against
this API, since Slumber (which our client is based off of) appends the
trailing slash by default.
|
Python
|
agpl-3.0
|
gymnasium/edx-platform,mbareta/edx-platform-ft,MakeHer/edx-platform,romain-li/edx-platform,BehavioralInsightsTeam/edx-platform,CourseTalk/edx-platform,Edraak/edraak-platform,hastexo/edx-platform,doganov/edx-platform,MakeHer/edx-platform,teltek/edx-platform,longmen21/edx-platform,Endika/edx-platform,EDUlib/edx-platform,fintech-circle/edx-platform,caesar2164/edx-platform,jjmiranda/edx-platform,JioEducation/edx-platform,itsjeyd/edx-platform,10clouds/edx-platform,ovnicraft/edx-platform,marcore/edx-platform,analyseuc3m/ANALYSE-v1,longmen21/edx-platform,amir-qayyum-khan/edx-platform,wwj718/edx-platform,alu042/edx-platform,JioEducation/edx-platform,gymnasium/edx-platform,naresh21/synergetics-edx-platform,msegado/edx-platform,tanmaykm/edx-platform,edx/edx-platform,romain-li/edx-platform,EDUlib/edx-platform,gsehub/edx-platform,CourseTalk/edx-platform,longmen21/edx-platform,10clouds/edx-platform,solashirai/edx-platform,CredoReference/edx-platform,defance/edx-platform,pabloborrego93/edx-platform,TeachAtTUM/edx-platform,eduNEXT/edx-platform,proversity-org/edx-platform,TeachAtTUM/edx-platform,tanmaykm/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,chrisndodge/edx-platform,miptliot/edx-platform,gsehub/edx-platform,antoviaque/edx-platform,eduNEXT/edx-platform,hastexo/edx-platform,a-parhom/edx-platform,jolyonb/edx-platform,franosincic/edx-platform,chrisndodge/edx-platform,Livit/Livit.Learn.EdX,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,mitocw/edx-platform,a-parhom/edx-platform,fintech-circle/edx-platform,fintech-circle/edx-platform,eduNEXT/edx-platform,ovnicraft/edx-platform,lduarte1991/edx-platform,angelapper/edx-platform,ahmedaljazzar/edx-platform,analyseuc3m/ANALYSE-v1,solashirai/edx-platform,ahmedaljazzar/edx-platform,caesar2164/edx-platform,wwj718/edx-platform,ESOedX/edx-platform,franosincic/edx-platform,cecep-edu/edx-platform,miptliot/edx-platform,edx-solutions/edx-platform,raccoongang/edx-platform,jolyonb/edx-platform,kmoocdev2/edx-platform,teltek/edx-platform,RPI-OPENEDX/edx-platform,eduNEXT/edunext-platform,analyseuc3m/ANALYSE-v1,louyihua/edx-platform,Endika/edx-platform,waheedahmed/edx-platform,antoviaque/edx-platform,longmen21/edx-platform,MakeHer/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,philanthropy-u/edx-platform,CourseTalk/edx-platform,defance/edx-platform,ESOedX/edx-platform,miptliot/edx-platform,cpennington/edx-platform,antoviaque/edx-platform,cpennington/edx-platform,IndonesiaX/edx-platform,louyihua/edx-platform,alu042/edx-platform,cecep-edu/edx-platform,ESOedX/edx-platform,procangroup/edx-platform,itsjeyd/edx-platform,chrisndodge/edx-platform,TeachAtTUM/edx-platform,eduNEXT/edunext-platform,defance/edx-platform,deepsrijit1105/edx-platform,UOMx/edx-platform,devs1991/test_edx_docmode,prarthitm/edxplatform,kmoocdev2/edx-platform,raccoongang/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,ahmedaljazzar/edx-platform,solashirai/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform,mitocw/edx-platform,Stanford-Online/edx-platform,RPI-OPENEDX/edx-platform,ovnicraft/edx-platform,Edraak/edraak-platform,miptliot/edx-platform,franosincic/edx-platform,Lektorium-LLC/edx-platform,waheedahmed/edx-platform,ampax/edx-platform,synergeticsedx/deployment-wipro,mbareta/edx-platform-ft,lduarte1991/edx-platform,stvstnfrd/edx-platform,solashirai/edx-platform,Livit/Livit.Learn.EdX,procangroup/edx-platform,jzoldak/edx-platform,RPI-OPENEDX/edx-platform,synergeticsedx/deployment-wipro,gymnasium/edx-platform,jzoldak/edx-platform,edx/edx-platform,pepeportela/edx-platform,Endika/edx-platform,jjmiranda/edx-platform,Ayub-Khan/edx-platform,procangroup/edx-platform,RPI-OPENEDX/edx-platform,pabloborrego93/edx-platform,stvstnfrd/edx-platform,Ayub-Khan/edx-platform,Edraak/edraak-platform,eduNEXT/edunext-platform,defance/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,CredoReference/edx-platform,angelapper/edx-platform,pabloborrego93/edx-platform,a-parhom/edx-platform,alu042/edx-platform,UOMx/edx-platform,BehavioralInsightsTeam/edx-platform,louyihua/edx-platform,devs1991/test_edx_docmode,naresh21/synergetics-edx-platform,deepsrijit1105/edx-platform,angelapper/edx-platform,ampax/edx-platform,amir-qayyum-khan/edx-platform,teltek/edx-platform,philanthropy-u/edx-platform,pepeportela/edx-platform,a-parhom/edx-platform,prarthitm/edxplatform,jolyonb/edx-platform,devs1991/test_edx_docmode,hastexo/edx-platform,IndonesiaX/edx-platform,UOMx/edx-platform,kmoocdev2/edx-platform,raccoongang/edx-platform,raccoongang/edx-platform,wwj718/edx-platform,franosincic/edx-platform,doganov/edx-platform,devs1991/test_edx_docmode,fintech-circle/edx-platform,eduNEXT/edx-platform,jjmiranda/edx-platform,marcore/edx-platform,pepeportela/edx-platform,doganov/edx-platform,JioEducation/edx-platform,gymnasium/edx-platform,msegado/edx-platform,wwj718/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,CredoReference/edx-platform,synergeticsedx/deployment-wipro,deepsrijit1105/edx-platform,shabab12/edx-platform,appsembler/edx-platform,mitocw/edx-platform,teltek/edx-platform,edx-solutions/edx-platform,romain-li/edx-platform,cpennington/edx-platform,EDUlib/edx-platform,hastexo/edx-platform,Ayub-Khan/edx-platform,arbrandes/edx-platform,msegado/edx-platform,Lektorium-LLC/edx-platform,waheedahmed/edx-platform,wwj718/edx-platform,marcore/edx-platform,proversity-org/edx-platform,pabloborrego93/edx-platform,ESOedX/edx-platform,mbareta/edx-platform-ft,mbareta/edx-platform-ft,lduarte1991/edx-platform,Ayub-Khan/edx-platform,cecep-edu/edx-platform,Stanford-Online/edx-platform,shabab12/edx-platform,EDUlib/edx-platform,waheedahmed/edx-platform,doganov/edx-platform,edx/edx-platform,Endika/edx-platform,naresh21/synergetics-edx-platform,IndonesiaX/edx-platform,Stanford-Online/edx-platform,IndonesiaX/edx-platform,lduarte1991/edx-platform,JioEducation/edx-platform,synergeticsedx/deployment-wipro,proversity-org/edx-platform,gsehub/edx-platform,jzoldak/edx-platform,devs1991/test_edx_docmode,proversity-org/edx-platform,mitocw/edx-platform,kmoocdev2/edx-platform,devs1991/test_edx_docmode,alu042/edx-platform,shabab12/edx-platform,ovnicraft/edx-platform,pepeportela/edx-platform,longmen21/edx-platform,romain-li/edx-platform,10clouds/edx-platform,franosincic/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,TeachAtTUM/edx-platform,ampax/edx-platform,eduNEXT/edunext-platform,caesar2164/edx-platform,edx-solutions/edx-platform,tanmaykm/edx-platform,jzoldak/edx-platform,Stanford-Online/edx-platform,MakeHer/edx-platform,procangroup/edx-platform,msegado/edx-platform,marcore/edx-platform,jjmiranda/edx-platform,arbrandes/edx-platform,shabab12/edx-platform,angelapper/edx-platform,romain-li/edx-platform,msegado/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,prarthitm/edxplatform,RPI-OPENEDX/edx-platform,edx/edx-platform,Ayub-Khan/edx-platform,BehavioralInsightsTeam/edx-platform,tanmaykm/edx-platform,Livit/Livit.Learn.EdX,kmoocdev2/edx-platform,UOMx/edx-platform,jolyonb/edx-platform,Edraak/edraak-platform,doganov/edx-platform,caesar2164/edx-platform,10clouds/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,edx-solutions/edx-platform,CourseTalk/edx-platform,itsjeyd/edx-platform,Lektorium-LLC/edx-platform,amir-qayyum-khan/edx-platform,antoviaque/edx-platform,cecep-edu/edx-platform,BehavioralInsightsTeam/edx-platform,ovnicraft/edx-platform,Livit/Livit.Learn.EdX,philanthropy-u/edx-platform,louyihua/edx-platform,amir-qayyum-khan/edx-platform,Lektorium-LLC/edx-platform,MakeHer/edx-platform,deepsrijit1105/edx-platform,analyseuc3m/ANALYSE-v1,devs1991/test_edx_docmode
|
509a2e925df6211ab53dd95f28e8ffa230ae9522
|
laufpartner_server/settings.sample.py
|
laufpartner_server/settings.sample.py
|
"""
Django settings for laufpartner_server project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from laufpartner_server.settings_global import *
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1b^azltf1pvzs$^p+2xlg=rot9!b%8(aj4%d4_e(xu@%!uf89u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': os.path.join(BASE_DIR, 'database.sqlite3'),
}
}
|
"""
Django settings for laufpartner_server project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from laufpartner_server.settings_global import *
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1b^azltf1pvzs$^p+2xlg=rot9!b%8(aj4%d4_e(xu@%!uf89u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': os.path.join(BASE_DIR, 'database.sqlite3'),
}
}
# Additional apps, e.g. for development
INSTALLED_APPS += (
#'django_extensions',
)
|
Add notice on how to add custom apps for development
|
Add notice on how to add custom apps for development
|
Python
|
agpl-3.0
|
serendi-app/serendi-server
|
27efc29b76ff0a65cd5ff12360701ca61231f53f
|
examples/thread_pool.py
|
examples/thread_pool.py
|
from diesel import quickstart, sleep
from diesel.util.pool import ThreadPool
from diesel.protocols.http import HttpClient, HttpHeaders
import random
def handle_it(i):
print 'S', i
sleep(random.random())
print 'E', i
def c():
for x in xrange(0, 20):
yield x
make_it = c().next
threads = ThreadPool(10, handle_it, make_it)
quickstart(threads)
|
from diesel import quickstart, sleep, quickstop
from diesel.util.pool import ThreadPool
import random
def handle_it(i):
print 'S', i
sleep(random.random())
print 'E', i
def c():
for x in xrange(0, 20):
yield x
make_it = c().next
def stop_it():
quickstop()
threads = ThreadPool(10, handle_it, make_it, stop_it)
quickstart(threads)
|
Clean it up with a finalizer.
|
Clean it up with a finalizer.
|
Python
|
bsd-3-clause
|
dieseldev/diesel
|
e5a872bd128e6b3ea3cc82df4094d41843148bce
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Add AuthenticationMiddleware to tests (for 1.7)
|
Add AuthenticationMiddleware to tests (for 1.7)
|
Python
|
mit
|
treyhunner/django-email-log,treyhunner/django-email-log
|
5a4e8ec1179b2ae3b37190ea45fb0d72ce4d7a90
|
canopen/sync.py
|
canopen/sync.py
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self):
"""Send out a SYNC message once."""
self.network.send_message(self.cob_id, [])
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self, count=None):
"""Send out a SYNC message once.
:param int count:
Counter to add in message.
"""
data = [count] if count is not None else []
self.network.send_message(self.cob_id, data)
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
Allow specifying counter in SYNC message
|
Allow specifying counter in SYNC message
Addresses #63
|
Python
|
mit
|
christiansandberg/canopen,christiansandberg/canopen
|
91bf74104c0eee2ca3d8d4fdd293390daf173166
|
checker/main.py
|
checker/main.py
|
#!/usr/bin/env python
import os
import sys
import subprocess
import getopt
class Chdir:
def __init__(self, newPath):
self.savedPath = os.getcwd()
os.chdir(newPath)
class Checker:
def __init__(self, path):
self.path = path
def get_jobs(self):
Chdir(self.path)
jobs = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
i = os.path.join(dirname, filename)
if i != "./__init__.py":
jobs.append(self.path + i[2:])
self.run_jobs(jobs)
def run_jobs(self, jobs):
for job in jobs:
subprocess.call(job)
if __name__ == '__main__':
opts, path = getopt.getopt(sys.argv[1], "h")
for opt, arg in opts:
if opt == '-h':
print './main.py /full/path/to/jobs'
sys.exit()
check = Checker(path)
check.get_jobs()
|
#!/usr/bin/env python
import os
import sys
import subprocess
import getopt
class Checker:
def __init__(self, path):
if not os.path.isdir(path):
sys.exit(1);
self.path = os.path.realpath(path)
self.jobs = self.getExecutableFiles(self.path)
def getExecutableFiles(self,path):
files = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
filename_path = os.path.join(dirname, filename)
if os.access(filename_path,os.X_OK):
files.append(filename_path)
return files;
def run(self):
for job in self.jobs:
subprocess.call(job)
if __name__ == '__main__':
opts, path = getopt.getopt(sys.argv[1], "h")
for opt, arg in opts:
if opt == '-h':
print './main.py /full/path/to/jobs'
sys.exit()
check = Checker(path)
check.run()
|
Streamline the filesystem looping code.
|
Streamline the filesystem looping code.
|
Python
|
mit
|
bsuweb/checker
|
a32f7cab9ce32c1c2169b55b1e37957a093e47f8
|
collect_district_court_case_details.py
|
collect_district_court_case_details.py
|
import datetime
import pymongo
import os
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': '702', \
'date_collected': {'$exists': False} \
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
print 'Finished'
|
import datetime
import pymongo
import os
import sys
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': sys.argv[1],
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
print 'Finished'
|
Set FIPS code from command line
|
Set FIPS code from command line
|
Python
|
mit
|
bschoenfeld/va-court-scraper,bschoenfeld/va-court-scraper
|
e0e2b4fc60a945e9680c171109fd1cbb6f21e304
|
celery/run_carrizo.py
|
celery/run_carrizo.py
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
scarplet.save_results(carrizo, res, base_dir='results/')
res.forget()
|
Add test script for Carrizo data
|
Add test script for Carrizo data
|
Python
|
mit
|
stgl/scarplet,rmsare/scarplet
|
d2b4810d74364394e7e7ecf8f8c5b1011a250f77
|
notescli/commands.py
|
notescli/commands.py
|
import config
import cliparser
import indexer
import io
import os
def command_ls(index):
with index.index.searcher() as searcher:
results = searcher.documents()
print "Indexed files:"
for result in results:
print result["filename"]
def command_view(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print open(result_file).read()
def command_add(config, filename):
full_path = os.path.join(config.notes_path, filename)
io.edit_file(full_path)
print "Added", full_path
def command_edit(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
io.edit_file(result_file)
def command_rm(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print "Are you sure you want to delete %s? (y/n)" % result_file
choice = io.get_choice()
if choice == "y":
os.remove(result_file)
def command_reindex(config):
indexer.reindex(config)
|
import config
import cliparser
import indexer
import io
import os
def command_ls(index):
print "Indexed files:"
for filename in index.list_files():
print filename
def command_view(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print open(result_file).read()
def command_add(config, filename):
full_path = os.path.join(config.notes_path, filename)
io.edit_file(full_path)
print "Added", full_path
def command_edit(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
io.edit_file(result_file)
def command_rm(index, query):
result_file = indexer.find_result(index.index, query)
if result_file is None:
print "No results found"
else:
print "Are you sure you want to delete %s? (y/n)" % result_file
choice = io.get_choice()
if choice == "y":
os.remove(result_file)
def command_reindex(config):
indexer.reindex(config)
|
Replace implementation of ls by using the indexer
|
Replace implementation of ls by using the indexer
|
Python
|
mit
|
phss/notes-cli
|
a5b4a657a1717e2fb9e4c53f93b5232dd58a1c68
|
shop_richcatalog/views.py
|
shop_richcatalog/views.py
|
from shop.views import ShopListView, ShopDetailView
from shop_richcatalog.models import Catalog
from shop.models import Product
class CatalogListView(ShopListView):
'''
TODO.
'''
model = Catalog
#generic_template = "shop_richcatalog/catalog_list.html"
class CatalogDetailView(ShopDetailView):
'''
TODO.
'''
model = Catalog
#generic_template = "shop_richcatalog/catalog_detail.html"
def get_context_data(self, **kwargs):
'''
TODO.
'''
# get context data from superclass
ctx = super(CatalogDetailView, self).get_context_data(**kwargs)
# update the context with active products in this catalog
product_list = self.object.products.filter(active=True)
if product_list:
ctx.update({"product_list": product_list})
# return the context
return ctx
|
from shop.views import ShopListView, ShopDetailView
from shop_richcatalog.models import Catalog
from shop.models import Product
class CatalogListView(ShopListView):
'''
Display all catalogs in a tree.
'''
model = Catalog
class CatalogDetailView(ShopDetailView):
'''
Display detailed catalog information.
'''
model = Catalog
def get_context_data(self, **kwargs):
'''
Get catalog context data.
'''
# get context data from superclass
ctx = super(CatalogDetailView, self).get_context_data(**kwargs)
# update the context with active products in this catalog
product_list = self.object.products.filter(active=True)
if product_list:
ctx.update({"product_list": product_list})
# return the context
return ctx
|
Document the view classes and make fix their spacing for pep8.
|
Document the view classes and make fix their spacing for pep8.
|
Python
|
bsd-3-clause
|
nimbis/django-shop-richcatalog,nimbis/django-shop-richcatalog
|
9639eb34f53444387621ed0a27ef9b273b38df79
|
slackclient/_slackrequest.py
|
slackclient/_slackrequest.py
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
# Only do this for requests that are UPLOADING files; downloading files
# use the 'file' argument to point to a File ID.
upload_requests = ['files.upload']
files = None
if request in upload_requests:
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
Fix bug preventing API calls requiring a file ID
|
Fix bug preventing API calls requiring a file ID
For example, an API call to files.info takes a file ID argument named
"file", which was stripped out by this call. Currently, there is only
one request type that accepts file data (files.upload). Every other use
of 'file' is an ID that aught to be contained in the request.
|
Python
|
mit
|
slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
|
34e36b77095d42f2d9e6a3634d86d09fffcc3411
|
molo/core/content_import/api/forms.py
|
molo/core/content_import/api/forms.py
|
from django import forms
# TODO: make the form return the valid JSON response
class MainImportForm(forms.Form):
url = forms.CharField(
max_length=100,
)
def __init__(self, **kwargs):
self.importer = kwargs.pop("importer")
super(MainImportForm, self).__init__(**kwargs)
def save(self):
# return valid API response
return self.importer.get_content_from_url("http://localhost:8000/api/v1/pages/")
|
from django import forms
# TODO: make the form return the valid JSON response
class MainImportForm(forms.Form):
url = forms.CharField(
max_length=100,
)
def __init__(self, **kwargs):
self.importer = kwargs.pop("importer")
super(MainImportForm, self).__init__(**kwargs)
class ArticleImportForm(forms.Form):
url = forms.CharField(
max_length=100,
required=False
)
def __init__(self, *args, **kwargs):
# generate fields dynamically for each article found in the response
self.importer = kwargs.pop("importer")
super(ArticleImportForm, self).__init__(*args, **kwargs)
if self.importer and self.importer.articles():
for i, article in enumerate(self.importer.articles()):
self.fields["%s" %i] = forms.BooleanField(
label=article["title"]
)
self.fields["%s" % i].required = False
def save(self):
if not self.importer.articles():
self.importer.get_content_from_url("http://localhost:8000/api/v1/pages/")
else:
# there is content, and some fields could have been selected,
# get the IDs of the articles so they can be saved
selected_choices = [int(k) for k,v in self.cleaned_data.items() if v]
# save articles
# for id in selected_choices:
# article = self.importer.save_articles(selected_choices)
return self.importer
|
Add form to allow admin user to specify URL for import
|
Add form to allow admin user to specify URL for import
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
2395d08c672250b5df273eb36415c8200dd7f801
|
tests/tests_twobody/test_mean_elements.py
|
tests/tests_twobody/test_mean_elements.py
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "PlanetNine"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
import pytest
from poliastro.bodies import Sun
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = Sun
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
Add test for error check
|
Add test for error check
|
Python
|
mit
|
poliastro/poliastro
|
599811e2a80b6f123d3beeb8906c0b82e975da86
|
maintenancemode/views/defaults.py
|
maintenancemode/views/defaults.py
|
from django.template import Context, loader
from maintenancemode import http
def temporary_unavailable(request, template_name='503.html'):
"""
Default 503 handler, which looks for the requested URL in the redirects
table, redirects if found, and displays 404 page if not redirected.
Templates: `503.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/')
"""
t = loader.get_template(template_name) # You need to create a 503.html template.
return http.HttpResponseTemporaryUnavailable(t.render(Context({})))
|
from django.template import RequestContext, loader
from maintenancemode import http
def temporary_unavailable(request, template_name='503.html'):
"""
Default 503 handler, which looks for the requested URL in the redirects
table, redirects if found, and displays 404 page if not redirected.
Templates: `503.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/')
"""
t = loader.get_template(template_name) # You need to create a 503.html template.
context = RequestContext(request, {'request_path': request.path})
return http.HttpResponseTemporaryUnavailable(t.render(context))
|
Use RequestContext instead of just Context.
|
Use RequestContext instead of just Context.
|
Python
|
bsd-3-clause
|
aarsan/django-maintenancemode,21strun/django-maintenancemode,shanx/django-maintenancemode,21strun/django-maintenancemode,shanx/django-maintenancemode,aarsan/django-maintenancemode
|
0983715cd2ee4eb3ac411e1ff24fa2e49df54eb5
|
src/manage.py
|
src/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Browsers doesn't use content negotiation using ETags with HTTP 1.0 servers
# Force Django to use HTTP 1.1 when using the runserver command
from wsgiref import simple_server
simple_server.ServerHandler.http_version = "1.1"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Allow to tests ETags when using the runserver command
|
Allow to tests ETags when using the runserver command
|
Python
|
agpl-3.0
|
jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud
|
8b82e15e3c5d8eb98dc9bf32d1d4b9e5d55be2c7
|
bitHopper/Website/__init__.py
|
bitHopper/Website/__init__.py
|
import logging, json
import bitHopper.Tracking
import bitHopper.util
import bitHopper.Network
import flask
app = flask.Flask(__name__, template_folder='./templates',
static_folder = './static')
app.Debug = False
@app.teardown_request
def teardown_request_wrap(exception):
"""
Prints tracebacks and handles bugs
"""
if exception:
logging.error(traceback.format_exc())
return json.dumps({"result":None, 'error':{'message':'Invalid request'}, 'id':1})
@app.route("/", methods=['POST'])
@app.route("/mine", methods=['POST','GET'])
def mine():
"""
Function that does basic handling of work requests
"""
try:
rpc_request = json.loads(request.data)
except ValueError, e:
return bitHopper.util.error_rpc()
#Check for valid rpc_request
if not bitHopper.util.validate_rpc(rpc_request):
return bitHopper.util.error_rpc()
#If getworks just feed them data
if rpc_request['params'] == []:
#TODO, pass in headers
content, headers = bitHopper.Network.get_work()
#Otherwise submit the work unit
else:
content, headers = bitHopper.Network.submit_work(rpc_request)
return content
|
import logging, json
import bitHopper.Tracking
import bitHopper.util
import bitHopper.Network
import flask
app = flask.Flask(__name__, template_folder='bitHopper/templates',
static_folder = 'bitHopper/static')
app.Debug = False
@app.teardown_request
def teardown_request_wrap(exception):
"""
Prints tracebacks and handles bugs
"""
if exception:
logging.error(traceback.format_exc())
return json.dumps({"result":None, 'error':{'message':'Invalid request'}, 'id':1})
@app.route("/", methods=['POST'])
@app.route("/mine", methods=['POST','GET'])
def mine():
"""
Function that does basic handling of work requests
"""
try:
rpc_request = json.loads(request.data)
except ValueError, e:
return bitHopper.util.error_rpc()
#Check for valid rpc_request
if not bitHopper.util.validate_rpc(rpc_request):
return bitHopper.util.error_rpc()
#If getworks just feed them data
if rpc_request['params'] == []:
#TODO, pass in headers
content, headers = bitHopper.Network.get_work()
#Otherwise submit the work unit
else:
content, headers = bitHopper.Network.submit_work(rpc_request)
return content
|
Make the template and static folders be inside bitHopper
|
Make the template and static folders be inside bitHopper
|
Python
|
mit
|
c00w/bitHopper,c00w/bitHopper
|
efebbe998ac67810f6e0f86b685ab18f1ccf2bda
|
nio_cli/commands/config.py
|
nio_cli/commands/config.py
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(requests.get(
self._base_url.format(self._resource),
json=config,
auth=self._auth).json())
except Exception as e:
print(e)
|
from .base import Base
import requests
class Config(Base):
""" Get basic nio info """
def __init__(self, options, *args, **kwargs):
super().__init__(options, *args, **kwargs)
self._resource = 'services' if self.options['services'] else 'blocks'
self._resource_name = \
self.options['<service-name>'] if self.options['services'] else \
self.options['<block-name>'] if self.options['blocks'] else \
""
def run(self):
response = requests.get(
self._base_url.format(
'{}/{}'.format(self._resource, self._resource_name)),
auth=self._auth)
try:
config = response.json()
print(config)
except Exception as e:
print(e)
|
Remove additional http get request
|
Remove additional http get request
|
Python
|
apache-2.0
|
nioinnovation/nio-cli,neutralio/nio-cli
|
2a1a5073e069b1fbf5b7803417b59339ec72d026
|
netdisco/discoverables/belkin_wemo.py
|
netdisco/discoverables/belkin_wemo.py
|
""" Discovers Belkin Wemo devices. """
from . import SSDPDiscoverable
class Discoverable(SSDPDiscoverable):
""" Adds support for discovering Belkin WeMo platform devices. """
def info_from_entry(self, entry):
""" Returns most important info from a uPnP entry. """
device = entry.description['device']
return (device['friendlyName'], device['modelName'],
entry.values['location'])
def get_entries(self):
""" Returns all Belkin Wemo entries. """
return self.find_by_device_description(
{'manufacturer': 'Belkin International Inc.'})
|
""" Discovers Belkin Wemo devices. """
from . import SSDPDiscoverable
class Discoverable(SSDPDiscoverable):
""" Adds support for discovering Belkin WeMo platform devices. """
def info_from_entry(self, entry):
""" Returns most important info from a uPnP entry. """
device = entry.description['device']
return (device['friendlyName'], device['modelName'],
entry.values['location'], device['macAddress'])
def get_entries(self):
""" Returns all Belkin Wemo entries. """
return self.find_by_device_description(
{'manufacturer': 'Belkin International Inc.'})
|
Add MAC address to wemo discovery attributes
|
Add MAC address to wemo discovery attributes
|
Python
|
mit
|
sfam/netdisco,brburns/netdisco,balloob/netdisco
|
8900aa1b47449bd6ad204725c3a98f75e17eb3ba
|
python/array_manipulation.py
|
python/array_manipulation.py
|
#!/bin/python3
import math
import os
import random
import re
import sys
def arrayManipulation(n, queries):
# An array used to capture the difference of an element
# compared to the previous element.
# Therefore the value of diffs[n] after all array manipulations is
# the cumulative sum of values from diffs[0] to diffs[n - 1]
diffs = [0] * n
for a, b, k in queries:
# Adds "k" to all subsequent elements in the array
diffs[a - 1] += k
# Ignore if b is out of range
if (b < n):
# Subtracts "k" from all subsequent elements in the array
diffs[b] -= k
sumSoFar = 0
maxSoFar = 0
for diff in diffs:
sumSoFar += diff
if sumSoFar > maxSoFar:
maxSoFar = sumSoFar
return maxSoFar
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nm = input().split()
n = int(nm[0])
m = int(nm[1])
queries = []
for _ in range(m):
queries.append(list(map(int, input().rstrip().split())))
result = arrayManipulation(n, queries)
fptr.write(str(result) + '\n')
fptr.close()
|
#!/bin/python3
import math
import os
import random
import re
import sys
def arrayManipulation(n, queries):
diffs = getArrayOfDiffs(n, queries)
return maxFromDiffs(diffs)
def maxFromDiffs(diffs):
sumSoFar = 0
maxSoFar = 0
for diff in diffs:
sumSoFar += diff
if sumSoFar > maxSoFar:
maxSoFar = sumSoFar
return maxSoFar
def getArrayOfDiffs(n, queries):
# An array used to capture the difference of an element
# compared to the previous element.
# Therefore the value of diffs[n] after all array manipulations is
# the cumulative sum of values from diffs[0] to diffs[n - 1]
diffs = [0] * n
for a, b, k in queries:
# Adds "k" to all subsequent elements in the array
diffs[a - 1] += k
# Ignore if b is out of range
if (b < n):
# Subtracts "k" from all subsequent elements in the array
diffs[b] -= k
return diffs
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nm = input().split()
n = int(nm[0])
m = int(nm[1])
queries = []
for _ in range(m):
queries.append(list(map(int, input().rstrip().split())))
result = arrayManipulation(n, queries)
fptr.write(str(result) + '\n')
fptr.close()
|
Refactor into getArrayOfDiffs and maxFromDiffs
|
Refactor into getArrayOfDiffs and maxFromDiffs
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
e8584f4193658399ea9bb2317915caff78fce88b
|
corehq/apps/commtrack/management/commands/update_supply_point_locations.py
|
corehq/apps/commtrack/management/commands/update_supply_point_locations.py
|
from django.core.management.base import BaseCommand
from casexml.apps.case.models import CommCareCase
from corehq.util.couch import iter_update, DocUpdate
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
def add_location(case):
if not case['location_id']:
return None
if case['owner_id'] != case['location_id']:
case['owner_id'] = case['location_id']
return DocUpdate(case)
iter_update(
CommCareCase.get_db(),
add_location,
self.get_case_ids(),
verbose=True
)
def get_case_ids(self):
return (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
reduce=False,
include_docs=False,
).all())
|
from xml.etree import ElementTree
from django.core.management.base import BaseCommand
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.models import CommCareCase
from dimagi.utils.chunked import chunked
from dimagi.utils.couch.database import iter_docs
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import submit_case_blocks
def needs_update(case):
return (case.get('location_id', None) and
case['owner_id'] != case['location_id'])
def case_block(case):
return ElementTree.tostring(CaseBlock(
create=False,
case_id=case['_id'],
owner_id=case['location_id'],
).as_xml())
def get_cases(domain):
supply_point_ids = (case['id'] for case in CommCareCase.get_db().view(
'commtrack/supply_point_by_loc',
startkey=[domain],
endkey=[domain, {}],
reduce=False,
include_docs=False,
).all())
return iter_docs(CommCareCase.get_db(), supply_point_ids)
def update_supply_points(domain):
case_blocks = (case_block(c) for c in get_cases(domain) if needs_update(c))
if case_blocks:
for chunk in chunked(case_blocks, 100):
submit_case_blocks(chunk, domain)
print "updated {} cases on domain {}".format(len(chunk), domain)
class Command(BaseCommand):
help = ("Make sure all supply point cases have their owner_id set "
"to the location_id")
def handle(self, *args, **options):
all_domains = Domain.get_all_names()
total = len(all_domains)
finished = 0
for domain in all_domains:
update_supply_points(domain)
finished += 1
if finished % 100 == 0:
print "Processed {} of {} domains".format(finished, total)
|
Use CaseBlocks to update case owner_ids
|
Use CaseBlocks to update case owner_ids
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
553731a0ea12a8303076dc3d83bfbba91e6bc3e8
|
scripts/merge_duplicate_users.py
|
scripts/merge_duplicate_users.py
|
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
|
from django.db.models.functions import Lower
from django.db.models import Count
from bluebottle.members.models import Member
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.activities.models import Activity, Contributor
from bluebottle.initiatives.models import Initiative
for client in Client.objects.all():
with LocalTenant(client):
duplicate = Member.objects.annotate(
lower=Lower('email')
).values('lower').annotate(count=Count('lower')).filter(count__gt=1)
for result in duplicate:
first = Member.objects.filter(email__iexact=result['lower']).order_by('date_joined').first()
for duplicate in Member.objects.filter(
email__iexact=result['lower']
).exclude(pk=first.pk).order_by('date_joined'):
for activity in Activity.objects.filter(owner=duplicate):
activity.owner = first
activity.execute_triggers(send_messages=False)
activity.save()
for contributor in Contributor.objects.filter(user=duplicate):
contributor.user = first
contributor.execute_triggers(send_messages=False)
contributor.save()
for initiative in Initiative.objects.filter(owner=duplicate):
initiative.owner = first
initiative.execute_triggers(send_messages=False)
initiative.save()
duplicate.anonymize()
duplicate.email = 'merged-{}@example.com'.format(first.pk)
duplicate.save()
|
Make sure we remember to which the user was merged
|
Make sure we remember to which the user was merged
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
2ca3f28b4423fc8ecd19591a039b7a5c814ab25b
|
webserver/codemanagement/validators.py
|
webserver/codemanagement/validators.py
|
from django.core.validators import RegexValidator
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_validator = RegexValidator(regex="^[A-Za-z][\w\-\.]+[A-Za-z]$",
message="Must be letters and numbers" +
" separated by dashes, dots, or underscores")
|
from django.core.validators import RegexValidator
from django.core.exceptions import ValidationError
from dulwich.repo import check_ref_format
import re
sha1_validator = RegexValidator(regex="^[a-f0-9]{40}$",
message="Must be valid sha1 sum")
tag_regex = re.compile(r'^[A-Za-z][\w\-\.]+[A-Za-z]$')
def tag_validator(value):
if not tag_regex.match(value):
msg = "Must be letters and numbers separated "
msg += "by dashes, dots, or underscores"
raise ValidationError(msg)
if not check_ref_format('refs/tags/' + value):
msg = "Invalid tag. Tags must adhere to ref formats defined here: "
msg += "https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html"
raise ValidationError(msg)
|
Make dulwich check the tag.
|
Make dulwich check the tag.
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
1e32cddfa5c9999f02c896e13666004260f8047a
|
examples/guv_simple_http_response.py
|
examples/guv_simple_http_response.py
|
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
from pympler import tracker
tr = tracker.SummaryTracker()
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
response_times = []
def get_avg_time():
global response_times
times = response_times[-1000:]
avg = sum(times) / len(times)
if len(response_times) > 5000:
response_times = times
return avg
def handle(sock, addr):
# client connected
start_time = time.perf_counter()
sock.sendall(create_example())
sock.close()
total_time = time.perf_counter() - start_time
response_times.append(total_time)
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
tr.print_diff()
log.debug('Bye!')
|
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
try:
from pympler import tracker
tr = tracker.SummaryTracker()
except ImportError:
tr = None
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
def handle(sock, addr):
# client connected
sock.sendall(create_example())
sock.close()
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
if tr:
tr.print_diff()
log.debug('Bye!')
|
Use pympler only if available
|
Use pympler only if available
|
Python
|
mit
|
veegee/guv,veegee/guv
|
290bf5b5e577673a15e9a71033a5df2704ccff7a
|
opencademy/model/openacademy_session.py
|
opencademy/model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id.name", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
# vim:expandtab:smart indent: tabstop=4:softtabstop=4:shifwidth=4;
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
LihanHA/opencademy-project
|
e4cd2982d488b18af4046eec39a213faa2afa857
|
common/djangoapps/dark_lang/models.py
|
common/djangoapps/dark_lang/models.py
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
"""
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.lower().strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
|
Store released dark_lang codes as all lower-case
|
Store released dark_lang codes as all lower-case
|
Python
|
agpl-3.0
|
louyihua/edx-platform,JCBarahona/edX,JCBarahona/edX,ahmadio/edx-platform,chudaol/edx-platform,Edraak/edraak-platform,appsembler/edx-platform,Semi-global/edx-platform,halvertoluke/edx-platform,B-MOOC/edx-platform,tanmaykm/edx-platform,longmen21/edx-platform,zofuthan/edx-platform,vasyarv/edx-platform,mbareta/edx-platform-ft,polimediaupv/edx-platform,pabloborrego93/edx-platform,prarthitm/edxplatform,tanmaykm/edx-platform,ampax/edx-platform,cpennington/edx-platform,Semi-global/edx-platform,Semi-global/edx-platform,inares/edx-platform,ampax/edx-platform,TeachAtTUM/edx-platform,zofuthan/edx-platform,jzoldak/edx-platform,cpennington/edx-platform,playm2mboy/edx-platform,ahmadiga/min_edx,martynovp/edx-platform,shurihell/testasia,antoviaque/edx-platform,mjirayu/sit_academy,shashank971/edx-platform,openfun/edx-platform,kxliugang/edx-platform,jolyonb/edx-platform,ak2703/edx-platform,eduNEXT/edx-platform,leansoft/edx-platform,stvstnfrd/edx-platform,nagyistoce/edx-platform,polimediaupv/edx-platform,ovnicraft/edx-platform,arbrandes/edx-platform,J861449197/edx-platform,ZLLab-Mooc/edx-platform,edx/edx-platform,etzhou/edx-platform,ahmedaljazzar/edx-platform,antoviaque/edx-platform,appliedx/edx-platform,xingyepei/edx-platform,zerobatu/edx-platform,gymnasium/edx-platform,devs1991/test_edx_docmode,doganov/edx-platform,shubhdev/edx-platform,jonathan-beard/edx-platform,pepeportela/edx-platform,philanthropy-u/edx-platform,rismalrv/edx-platform,romain-li/edx-platform,marcore/edx-platform,arifsetiawan/edx-platform,Shrhawk/edx-platform,hastexo/edx-platform,CourseTalk/edx-platform,atsolakid/edx-platform,wwj718/edx-platform,ferabra/edx-platform,romain-li/edx-platform,shubhdev/edxOnBaadal,simbs/edx-platform,rismalrv/edx-platform,IONISx/edx-platform,fintech-circle/edx-platform,Edraak/edx-platform,jzoldak/edx-platform,devs1991/test_edx_docmode,hamzehd/edx-platform,eduNEXT/edx-platform,rismalrv/edx-platform,mjirayu/sit_academy,rismalrv/edx-platform,iivic/BoiseStateX,xuxiao19910803/edx-platform,vasyarv/edx-platform,mushtaqak/edx-platform,ubc/edx-platform,nagyistoce/edx-platform,pepeportela/edx-platform,martynovp/edx-platform,SivilTaram/edx-platform,Shrhawk/edx-platform,simbs/edx-platform,ampax/edx-platform,edx/edx-platform,angelapper/edx-platform,jamiefolsom/edx-platform,jbzdak/edx-platform,BehavioralInsightsTeam/edx-platform,alu042/edx-platform,edry/edx-platform,vikas1885/test1,mitocw/edx-platform,kamalx/edx-platform,adoosii/edx-platform,appliedx/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,hastexo/edx-platform,defance/edx-platform,teltek/edx-platform,mahendra-r/edx-platform,prarthitm/edxplatform,TeachAtTUM/edx-platform,iivic/BoiseStateX,miptliot/edx-platform,atsolakid/edx-platform,4eek/edx-platform,doismellburning/edx-platform,martynovp/edx-platform,leansoft/edx-platform,jamiefolsom/edx-platform,xinjiguaike/edx-platform,BehavioralInsightsTeam/edx-platform,tiagochiavericosta/edx-platform,zerobatu/edx-platform,Kalyzee/edx-platform,gsehub/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,jolyonb/edx-platform,nanolearningllc/edx-platform-cypress,adoosii/edx-platform,nanolearningllc/edx-platform-cypress-2,edx/edx-platform,jonathan-beard/edx-platform,jazztpt/edx-platform,martynovp/edx-platform,solashirai/edx-platform,nanolearningllc/edx-platform-cypress-2,jazkarta/edx-platform,ahmadiga/min_edx,proversity-org/edx-platform,pomegranited/edx-platform,louyihua/edx-platform,pomegranited/edx-platform,shabab12/edx-platform,jbassen/edx-platform,Ayub-Khan/edx-platform,zofuthan/edx-platform,xingyepei/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,devs1991/test_edx_docmode,openfun/edx-platform,Stanford-Online/edx-platform,doismellburning/edx-platform,SravanthiSinha/edx-platform,caesar2164/edx-platform,deepsrijit1105/edx-platform,RPI-OPENEDX/edx-platform,solashirai/edx-platform,tanmaykm/edx-platform,IONISx/edx-platform,Edraak/circleci-edx-platform,ferabra/edx-platform,vikas1885/test1,utecuy/edx-platform,doismellburning/edx-platform,mcgachey/edx-platform,itsjeyd/edx-platform,JCBarahona/edX,bitifirefly/edx-platform,doismellburning/edx-platform,CredoReference/edx-platform,benpatterson/edx-platform,motion2015/edx-platform,knehez/edx-platform,alexthered/kienhoc-platform,chauhanhardik/populo,don-github/edx-platform,zadgroup/edx-platform,Ayub-Khan/edx-platform,MakeHer/edx-platform,10clouds/edx-platform,CredoReference/edx-platform,naresh21/synergetics-edx-platform,jazkarta/edx-platform,Softmotions/edx-platform,Shrhawk/edx-platform,kxliugang/edx-platform,alexthered/kienhoc-platform,Lektorium-LLC/edx-platform,J861449197/edx-platform,SivilTaram/edx-platform,doganov/edx-platform,bigdatauniversity/edx-platform,waheedahmed/edx-platform,zadgroup/edx-platform,vasyarv/edx-platform,kamalx/edx-platform,AkA84/edx-platform,appsembler/edx-platform,4eek/edx-platform,mitocw/edx-platform,prarthitm/edxplatform,martynovp/edx-platform,proversity-org/edx-platform,xuxiao19910803/edx-platform,angelapper/edx-platform,bigdatauniversity/edx-platform,ahmadio/edx-platform,ak2703/edx-platform,wwj718/edx-platform,BehavioralInsightsTeam/edx-platform,arifsetiawan/edx-platform,xinjiguaike/edx-platform,jbassen/edx-platform,SivilTaram/edx-platform,MakeHer/edx-platform,tiagochiavericosta/edx-platform,inares/edx-platform,Livit/Livit.Learn.EdX,nttks/edx-platform,naresh21/synergetics-edx-platform,don-github/edx-platform,ovnicraft/edx-platform,defance/edx-platform,AkA84/edx-platform,msegado/edx-platform,ampax/edx-platform,proversity-org/edx-platform,openfun/edx-platform,mjirayu/sit_academy,atsolakid/edx-platform,chauhanhardik/populo_2,shubhdev/edxOnBaadal,adoosii/edx-platform,kmoocdev2/edx-platform,jamesblunt/edx-platform,zubair-arbi/edx-platform,devs1991/test_edx_docmode,nanolearningllc/edx-platform-cypress,zerobatu/edx-platform,Kalyzee/edx-platform,arbrandes/edx-platform,fly19890211/edx-platform,chudaol/edx-platform,fintech-circle/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,analyseuc3m/ANALYSE-v1,pepeportela/edx-platform,tiagochiavericosta/edx-platform,teltek/edx-platform,Livit/Livit.Learn.EdX,marcore/edx-platform,RPI-OPENEDX/edx-platform,zadgroup/edx-platform,vasyarv/edx-platform,cognitiveclass/edx-platform,louyihua/edx-platform,raccoongang/edx-platform,benpatterson/edx-platform,Edraak/edx-platform,fintech-circle/edx-platform,Semi-global/edx-platform,xingyepei/edx-platform,mcgachey/edx-platform,TeachAtTUM/edx-platform,doganov/edx-platform,Shrhawk/edx-platform,devs1991/test_edx_docmode,kmoocdev2/edx-platform,jbassen/edx-platform,wwj718/edx-platform,solashirai/edx-platform,pabloborrego93/edx-platform,jazztpt/edx-platform,amir-qayyum-khan/edx-platform,mitocw/edx-platform,vikas1885/test1,jazztpt/edx-platform,romain-li/edx-platform,kxliugang/edx-platform,franosincic/edx-platform,ahmadiga/min_edx,JioEducation/edx-platform,IndonesiaX/edx-platform,jjmiranda/edx-platform,pomegranited/edx-platform,cognitiveclass/edx-platform,motion2015/edx-platform,B-MOOC/edx-platform,raccoongang/edx-platform,simbs/edx-platform,fintech-circle/edx-platform,Edraak/circleci-edx-platform,JCBarahona/edX,jamesblunt/edx-platform,stvstnfrd/edx-platform,cognitiveclass/edx-platform,fly19890211/edx-platform,msegado/edx-platform,nanolearningllc/edx-platform-cypress,franosincic/edx-platform,shurihell/testasia,fly19890211/edx-platform,MakeHer/edx-platform,jbzdak/edx-platform,chand3040/cloud_that,ak2703/edx-platform,Softmotions/edx-platform,xuxiao19910803/edx,arifsetiawan/edx-platform,shurihell/testasia,10clouds/edx-platform,jbzdak/edx-platform,halvertoluke/edx-platform,fly19890211/edx-platform,tiagochiavericosta/edx-platform,eduNEXT/edunext-platform,iivic/BoiseStateX,ahmedaljazzar/edx-platform,halvertoluke/edx-platform,nttks/edx-platform,ESOedX/edx-platform,ZLLab-Mooc/edx-platform,kxliugang/edx-platform,mushtaqak/edx-platform,nanolearningllc/edx-platform-cypress-2,nikolas/edx-platform,IONISx/edx-platform,nagyistoce/edx-platform,eduNEXT/edunext-platform,nanolearningllc/edx-platform-cypress,ahmadio/edx-platform,shubhdev/edx-platform,UOMx/edx-platform,procangroup/edx-platform,ovnicraft/edx-platform,synergeticsedx/deployment-wipro,shabab12/edx-platform,cecep-edu/edx-platform,alu042/edx-platform,zofuthan/edx-platform,franosincic/edx-platform,stvstnfrd/edx-platform,polimediaupv/edx-platform,gsehub/edx-platform,SravanthiSinha/edx-platform,teltek/edx-platform,adoosii/edx-platform,Endika/edx-platform,appliedx/edx-platform,Semi-global/edx-platform,Softmotions/edx-platform,shashank971/edx-platform,philanthropy-u/edx-platform,angelapper/edx-platform,shabab12/edx-platform,zubair-arbi/edx-platform,Edraak/edx-platform,jamiefolsom/edx-platform,shashank971/edx-platform,knehez/edx-platform,bigdatauniversity/edx-platform,CourseTalk/edx-platform,inares/edx-platform,zerobatu/edx-platform,xuxiao19910803/edx-platform,mitocw/edx-platform,xuxiao19910803/edx,devs1991/test_edx_docmode,bitifirefly/edx-platform,xingyepei/edx-platform,jbassen/edx-platform,Edraak/circleci-edx-platform,edry/edx-platform,devs1991/test_edx_docmode,halvertoluke/edx-platform,playm2mboy/edx-platform,ahmadiga/min_edx,shashank971/edx-platform,benpatterson/edx-platform,utecuy/edx-platform,angelapper/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,xinjiguaike/edx-platform,mbareta/edx-platform-ft,EDUlib/edx-platform,antoviaque/edx-platform,louyihua/edx-platform,appliedx/edx-platform,Stanford-Online/edx-platform,synergeticsedx/deployment-wipro,hamzehd/edx-platform,utecuy/edx-platform,alu042/edx-platform,tanmaykm/edx-platform,jazkarta/edx-platform,synergeticsedx/deployment-wipro,kursitet/edx-platform,jazkarta/edx-platform,Kalyzee/edx-platform,motion2015/edx-platform,Ayub-Khan/edx-platform,arifsetiawan/edx-platform,shurihell/testasia,jazkarta/edx-platform,kursitet/edx-platform,playm2mboy/edx-platform,chauhanhardik/populo,a-parhom/edx-platform,doganov/edx-platform,procangroup/edx-platform,franosincic/edx-platform,cpennington/edx-platform,cpennington/edx-platform,solashirai/edx-platform,waheedahmed/edx-platform,eduNEXT/edunext-platform,mbareta/edx-platform-ft,4eek/edx-platform,10clouds/edx-platform,eduNEXT/edx-platform,appsembler/edx-platform,edry/edx-platform,zerobatu/edx-platform,ahmadiga/min_edx,jjmiranda/edx-platform,chauhanhardik/populo_2,motion2015/edx-platform,caesar2164/edx-platform,polimediaupv/edx-platform,deepsrijit1105/edx-platform,chand3040/cloud_that,xuxiao19910803/edx,nikolas/edx-platform,mcgachey/edx-platform,waheedahmed/edx-platform,4eek/edx-platform,zubair-arbi/edx-platform,nttks/edx-platform,edry/edx-platform,nttks/edx-platform,simbs/edx-platform,kamalx/edx-platform,Softmotions/edx-platform,chrisndodge/edx-platform,synergeticsedx/deployment-wipro,4eek/edx-platform,utecuy/edx-platform,lduarte1991/edx-platform,devs1991/test_edx_docmode,Endika/edx-platform,Stanford-Online/edx-platform,shashank971/edx-platform,eduNEXT/edx-platform,halvertoluke/edx-platform,romain-li/edx-platform,UOMx/edx-platform,ESOedX/edx-platform,xinjiguaike/edx-platform,pomegranited/edx-platform,cecep-edu/edx-platform,jolyonb/edx-platform,amir-qayyum-khan/edx-platform,EDUlib/edx-platform,itsjeyd/edx-platform,don-github/edx-platform,chudaol/edx-platform,knehez/edx-platform,pabloborrego93/edx-platform,IONISx/edx-platform,gsehub/edx-platform,zofuthan/edx-platform,caesar2164/edx-platform,SivilTaram/edx-platform,jzoldak/edx-platform,jamesblunt/edx-platform,wwj718/edx-platform,adoosii/edx-platform,shubhdev/edx-platform,proversity-org/edx-platform,BehavioralInsightsTeam/edx-platform,atsolakid/edx-platform,mushtaqak/edx-platform,don-github/edx-platform,eduNEXT/edunext-platform,openfun/edx-platform,SravanthiSinha/edx-platform,leansoft/edx-platform,defance/edx-platform,nikolas/edx-platform,atsolakid/edx-platform,mushtaqak/edx-platform,Edraak/circleci-edx-platform,pabloborrego93/edx-platform,kursitet/edx-platform,edx-solutions/edx-platform,ZLLab-Mooc/edx-platform,jamesblunt/edx-platform,chudaol/edx-platform,xuxiao19910803/edx-platform,naresh21/synergetics-edx-platform,ahmedaljazzar/edx-platform,jonathan-beard/edx-platform,solashirai/edx-platform,zubair-arbi/edx-platform,jbzdak/edx-platform,miptliot/edx-platform,vasyarv/edx-platform,benpatterson/edx-platform,Edraak/edraak-platform,etzhou/edx-platform,MakeHer/edx-platform,lduarte1991/edx-platform,waheedahmed/edx-platform,doismellburning/edx-platform,IndonesiaX/edx-platform,franosincic/edx-platform,kmoocdev2/edx-platform,gymnasium/edx-platform,jzoldak/edx-platform,rismalrv/edx-platform,mjirayu/sit_academy,zhenzhai/edx-platform,ubc/edx-platform,mjirayu/sit_academy,edx-solutions/edx-platform,hamzehd/edx-platform,leansoft/edx-platform,knehez/edx-platform,playm2mboy/edx-platform,appsembler/edx-platform,Lektorium-LLC/edx-platform,bitifirefly/edx-platform,hamzehd/edx-platform,xuxiao19910803/edx-platform,Endika/edx-platform,amir-qayyum-khan/edx-platform,waheedahmed/edx-platform,longmen21/edx-platform,alexthered/kienhoc-platform,B-MOOC/edx-platform,kamalx/edx-platform,polimediaupv/edx-platform,bigdatauniversity/edx-platform,B-MOOC/edx-platform,msegado/edx-platform,don-github/edx-platform,caesar2164/edx-platform,jamesblunt/edx-platform,kursitet/edx-platform,ak2703/edx-platform,IndonesiaX/edx-platform,Endika/edx-platform,RPI-OPENEDX/edx-platform,zhenzhai/edx-platform,ESOedX/edx-platform,ahmadio/edx-platform,nagyistoce/edx-platform,procangroup/edx-platform,mushtaqak/edx-platform,a-parhom/edx-platform,jonathan-beard/edx-platform,cognitiveclass/edx-platform,defance/edx-platform,shabab12/edx-platform,alexthered/kienhoc-platform,kamalx/edx-platform,marcore/edx-platform,Edraak/circleci-edx-platform,jazztpt/edx-platform,zadgroup/edx-platform,chudaol/edx-platform,openfun/edx-platform,procangroup/edx-platform,AkA84/edx-platform,J861449197/edx-platform,RPI-OPENEDX/edx-platform,ferabra/edx-platform,JCBarahona/edX,ak2703/edx-platform,ZLLab-Mooc/edx-platform,deepsrijit1105/edx-platform,analyseuc3m/ANALYSE-v1,chrisndodge/edx-platform,SivilTaram/edx-platform,jjmiranda/edx-platform,shurihell/testasia,raccoongang/edx-platform,mahendra-r/edx-platform,antoviaque/edx-platform,chand3040/cloud_that,mahendra-r/edx-platform,jbassen/edx-platform,inares/edx-platform,shubhdev/edx-platform,zubair-arbi/edx-platform,J861449197/edx-platform,gymnasium/edx-platform,bitifirefly/edx-platform,iivic/BoiseStateX,kmoocdev2/edx-platform,naresh21/synergetics-edx-platform,edx/edx-platform,cecep-edu/edx-platform,arbrandes/edx-platform,shubhdev/edxOnBaadal,fly19890211/edx-platform,doganov/edx-platform,analyseuc3m/ANALYSE-v1,vikas1885/test1,EDUlib/edx-platform,TeachAtTUM/edx-platform,Lektorium-LLC/edx-platform,IONISx/edx-platform,tiagochiavericosta/edx-platform,kursitet/edx-platform,SravanthiSinha/edx-platform,AkA84/edx-platform,chrisndodge/edx-platform,bitifirefly/edx-platform,wwj718/edx-platform,jonathan-beard/edx-platform,cognitiveclass/edx-platform,utecuy/edx-platform,raccoongang/edx-platform,shubhdev/edxOnBaadal,vikas1885/test1,ZLLab-Mooc/edx-platform,RPI-OPENEDX/edx-platform,nikolas/edx-platform,etzhou/edx-platform,zadgroup/edx-platform,itsjeyd/edx-platform,lduarte1991/edx-platform,CredoReference/edx-platform,chand3040/cloud_that,pomegranited/edx-platform,itsjeyd/edx-platform,ubc/edx-platform,cecep-edu/edx-platform,Edraak/edraak-platform,teltek/edx-platform,motion2015/edx-platform,Edraak/edx-platform,miptliot/edx-platform,CourseTalk/edx-platform,deepsrijit1105/edx-platform,mbareta/edx-platform-ft,B-MOOC/edx-platform,xuxiao19910803/edx,chand3040/cloud_that,ovnicraft/edx-platform,analyseuc3m/ANALYSE-v1,etzhou/edx-platform,J861449197/edx-platform,CourseTalk/edx-platform,mcgachey/edx-platform,arbrandes/edx-platform,xinjiguaike/edx-platform,appliedx/edx-platform,MakeHer/edx-platform,jolyonb/edx-platform,leansoft/edx-platform,Ayub-Khan/edx-platform,Kalyzee/edx-platform,ahmadio/edx-platform,alu042/edx-platform,longmen21/edx-platform,xuxiao19910803/edx,ovnicraft/edx-platform,UOMx/edx-platform,marcore/edx-platform,cecep-edu/edx-platform,gsehub/edx-platform,jjmiranda/edx-platform,shubhdev/edxOnBaadal,chrisndodge/edx-platform,IndonesiaX/edx-platform,bigdatauniversity/edx-platform,a-parhom/edx-platform,ferabra/edx-platform,playm2mboy/edx-platform,chauhanhardik/populo_2,philanthropy-u/edx-platform,edx-solutions/edx-platform,mahendra-r/edx-platform,Livit/Livit.Learn.EdX,romain-li/edx-platform,JioEducation/edx-platform,EDUlib/edx-platform,simbs/edx-platform,jbzdak/edx-platform,hamzehd/edx-platform,Stanford-Online/edx-platform,etzhou/edx-platform,zhenzhai/edx-platform,shubhdev/edx-platform,knehez/edx-platform,jamiefolsom/edx-platform,ubc/edx-platform,AkA84/edx-platform,longmen21/edx-platform,Edraak/edraak-platform,Livit/Livit.Learn.EdX,benpatterson/edx-platform,edx-solutions/edx-platform,xingyepei/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,mahendra-r/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,CredoReference/edx-platform,philanthropy-u/edx-platform,gymnasium/edx-platform,nikolas/edx-platform,SravanthiSinha/edx-platform,10clouds/edx-platform,chauhanhardik/populo_2,chauhanhardik/populo,chauhanhardik/populo,ferabra/edx-platform,IndonesiaX/edx-platform,JioEducation/edx-platform,iivic/BoiseStateX,chauhanhardik/populo_2,pepeportela/edx-platform,nanolearningllc/edx-platform-cypress-2,nanolearningllc/edx-platform-cypress,chauhanhardik/populo,nanolearningllc/edx-platform-cypress-2,jamiefolsom/edx-platform,UOMx/edx-platform,lduarte1991/edx-platform,msegado/edx-platform,jazztpt/edx-platform,Edraak/edx-platform,nagyistoce/edx-platform,prarthitm/edxplatform,kxliugang/edx-platform,ubc/edx-platform,inares/edx-platform,hastexo/edx-platform,ESOedX/edx-platform,JioEducation/edx-platform,mcgachey/edx-platform,Shrhawk/edx-platform,edry/edx-platform,arifsetiawan/edx-platform,Kalyzee/edx-platform,amir-qayyum-khan/edx-platform,nttks/edx-platform
|
42b69fdb0d9267c339200185feddefb430aea6ae
|
geartracker/admin.py
|
geartracker/admin.py
|
from django.contrib import admin
from geartracker.models import *
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("make", "model", "size")}
list_display = ('__unicode__', 'type', 'metric_weight', 'acquired')
list_filter = ('archived', 'category', 'type', 'make', 'tags')
search_fields = ('make', 'model')
filter_horizontal = ('related', 'tags')
admin.site.register(Item, ItemAdmin)
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('__unicode__', 'number_items')
admin.site.register(Category, CategoryAdmin)
class TypeAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('category', 'name', 'number_items')
list_filter = ('category',)
admin.site.register(Type, TypeAdmin)
class ListItemRelationshipInline(admin.TabularInline):
model = ListItem
extra = 1
class ListAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (ListItemRelationshipInline,)
list_display = ('name', 'total_metric_weight', 'start_date', 'end_date',
'public')
list_filter = ('public',)
admin.site.register(List, ListAdmin)
|
from django.contrib import admin
from geartracker.models import *
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("make", "model", "size")}
list_display = ('__unicode__', 'type', 'metric_weight', 'acquired')
list_filter = ('archived', 'category', 'type', 'make')
search_fields = ('make', 'model')
filter_horizontal = ('related',)
admin.site.register(Item, ItemAdmin)
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('__unicode__', 'number_items')
admin.site.register(Category, CategoryAdmin)
class TypeAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('category', 'name', 'number_items')
list_filter = ('category',)
admin.site.register(Type, TypeAdmin)
class ListItemRelationshipInline(admin.TabularInline):
model = ListItem
extra = 1
class ListAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (ListItemRelationshipInline,)
list_display = ('name', 'total_metric_weight', 'start_date', 'end_date',
'public')
list_filter = ('public',)
admin.site.register(List, ListAdmin)
|
Remove tags from list_filter and filter_horizontal
|
Remove tags from list_filter and filter_horizontal
|
Python
|
bsd-3-clause
|
pigmonkey/django-geartracker
|
2a6399a74110b6a9e0d48349c68775986c13a579
|
pyservice/context.py
|
pyservice/context.py
|
"""
RequestContext stores state relevant to the current request, as well as
keeping track of the plugin execution order and providing a simple method
`advance` for calling the next plugin in the chain.
"""
import collections
class Container(collections.defaultdict):
DEFAULT_FACTORY = lambda: None
def __init__(self):
super().__init__(self, Container.DEFAULT_FACTORY)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self[name] = value
class Context(object):
def __init__(self, service, operation):
self.service = service
self.operation = operation
def execute(self):
self.service.continue_execution(self)
|
"""
RequestContext stores state relevant to the current request, as well as
keeping track of the plugin execution order and providing a simple method
`advance` for calling the next plugin in the chain.
"""
import ujson
import collections
class Container(collections.defaultdict):
DEFAULT_FACTORY = lambda: None
def __init__(self):
super().__init__(self, Container.DEFAULT_FACTORY)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self[name] = value
class Context(object):
def __init__(self, service, operation, processor):
self.service = service
self.operation = operation
self.processor = processor
def process_request(self):
self.processor.continue_execution()
class Processor(object):
def __init__(self, service, operation, request_body):
self.service = service
self.operation = operation
self.context = Context(service, operation, self)
self.request = Container()
self.request_body = request_body
self.response = Container()
self.response_body = None
self.plugins = service.get_plugins(operation)
self.index = -1
self.state = "request" # request -> operation -> function
def execute(self):
self.context.process_request()
def continue_execution(self):
self.index += 1
plugins = self.plugins[self.state]
n = len(plugins)
if self.index > n:
# Terminal point so that service.invoke
# can safely call context.process_request()
return
elif self.index == n:
if self.state == "request":
self.index = -1
self.state = "operation"
self._deserialize_request()
self.continue_execution()
self._serialize_response()
elif self.state == "operation":
self.service.invoke(self.operation, self.request,
self.response, self.context)
# index < n
else:
if self.state == "request":
plugins[self.index](self.context)
elif self.state == "operation":
plugins[self.index](self.request, self.response, self.context)
def _deserialize_request(self):
self.request.update(ujson.loads(self.request_body))
def _serialize_response(self):
self.response_body = ujson.dumps(self.response)
|
Create class for request process recursion
|
Create class for request process recursion
|
Python
|
mit
|
numberoverzero/pyservice
|
cd342448675f3174bf74118de0447c1b0f169f3e
|
python/volumeBars.py
|
python/volumeBars.py
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 16
pi = numpy.pi
barHeights = numpy.empty([16])
for i in range(16):
barHeights[i] = i * pi / 16
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.sin(barHeights)
barHeights += pi / 16
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y < 2:
nextFrame.SetPixel(x, y, 255, 0, 0)
elif y < 6:
nextFrame.SetPixel(x, y, 200, 200, 0)
else:
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
#!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import numpy
import math
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
barWidth = width / 16
pi = numpy.pi
barHeights = numpy.empty([16])
for i in range(16):
barHeights[i] = i * pi / 16
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
heights = numpy.empty([16])
for i in range(len(barHeights)):
heights[i] = (math.sin(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x) + math.cos(randint(-3, 3) * x)) / 3
barHeights += pi / 16
for x in range(width):
barHeight = int(heights[int(x / barWidth)] * height)
for y in range(height):
if height - y <= barHeight:
if y < 2:
nextFrame.SetPixel(x, y, 255, 0, 0)
elif y < 6:
nextFrame.SetPixel(x, y, 200, 200, 0)
else:
nextFrame.SetPixel(x, y, 0, 200, 0)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(0.2)
|
Create a more random function
|
Create a more random function
|
Python
|
mit
|
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
|
6c6021cd1a206a91432da096400358e5eb0255fe
|
nasa_data.py
|
nasa_data.py
|
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
raise TypeError
image_data = requests.get(image_url, stream=True)
except (requests.HTTPError or TypeError):
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
# check if website is accessible
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY")
apod_data.raise_for_status()
apod_data = apod_data.json()
# check if image is accessible
image_url = apod_data["url"]
image_data = requests.get(image_url, stream=True)
image_data.raise_for_status()
except requests.HTTPError:
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
# Twitter limitation: .gif must be smaller than 3MB
if image_url.endswith(".gif") and os.path.getsize(os.path.join("APODs", os.path.basename(image_url))) >= 3145728:
return
else:
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
Update 0.7.0 - specified try-block to check the status - changed except block - allowed .gif format but only up to 3MP (Twitter limitation)
|
Update 0.7.0
- specified try-block to check the status
- changed except block
- allowed .gif format but only up to 3MP (Twitter limitation)
|
Python
|
mit
|
FXelix/space_facts_bot
|
b702569c800953eb3476c927fbc1085e67c88dbd
|
ghettoq/messaging.py
|
ghettoq/messaging.py
|
from Queue import Empty
from itertools import cycle
class Queue(object):
def __init__(self, backend, name):
self.name = name
self.backend = backend
def put(self, payload):
self.backend.put(self.name, payload)
def get(self):
payload = self.backend.get(self.name)
if payload is not None:
return payload
raise Empty
class QueueSet(object):
def __init__(self, backend, queues):
self.backend = backend
self.queues = map(self.backend.Queue, queues)
self.cycle = cycle(self.queues)
def get(self):
while True:
try:
return self.cycle.next().get()
except QueueEmpty:
pass
|
from Queue import Empty
from itertools import cycle
class Queue(object):
def __init__(self, backend, name):
self.name = name
self.backend = backend
def put(self, payload):
self.backend.put(self.name, payload)
def get(self):
payload = self.backend.get(self.name)
if payload is not None:
return payload
raise Empty
class QueueSet(object):
def __init__(self, backend, queues):
self.backend = backend
self.queue_names = queues
self.queues = map(self.backend.Queue, self.queue_names)
self.cycle = cycle(self.queues)
self.all = frozenset(self.queue_names)
def get(self):
tried = set()
while True:
queue = self.cycle.next()
try:
return queue.get()
except QueueEmpty:
tried.add(queue)
if tried == self.all:
raise
|
Raise QueueEmpty when all queues has been tried.
|
QueueSet: Raise QueueEmpty when all queues has been tried.
|
Python
|
bsd-3-clause
|
ask/ghettoq
|
e4275c4f1a408dd9f8095bef4ed650ccc54401e9
|
packages/mono-llvm-2-10.py
|
packages/mono-llvm-2-10.py
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
979aada6964a5c8ef1f5c787ce84d72420626901
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
"""Add Build.priority
Revision ID: 36cbde703cc0
Revises: fe743605e1a
Create Date: 2014-10-06 10:10:14.729720
"""
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = 'fe743605e1a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
"""Add Build.priority
Revision ID: 36cbde703cc0
Revises: fe743605e1a
Create Date: 2014-10-06 10:10:14.729720
"""
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
Update build priority down revision
|
Update build priority down revision
2c6662281b66
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes
|
3fdd43166cd9280960afad5cc13f1cd34e0f944d
|
scripts/populate-noteworthy-projects.py
|
scripts/populate-noteworthy-projects.py
|
"""
This will update node links on NOTEWORTHY_LINKS_NODE.
"""
import sys
import json
import urllib2
import logging
from modularodm import Q
from website.app import init_app
from website import models
from framework.auth.core import Auth
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.settings.defaults import NOTEWORTHY_LINKS_NODE
logger = logging.getLogger(__name__)
def get_popular_nodes():
# TODO Change discover url to production url in production
discover_url = 'http://127.0.0.1:5000/api/v1/explore/activity/popular/raw/'
response = urllib2.urlopen(discover_url)
data = json.load(response)
return data
def main(dry_run=True):
init_app(routes=False)
with TokuTransaction():
noteworthy_node = models.Node.find(Q('_id', 'eq', NOTEWORTHY_LINKS_NODE))[0]
logger.warn('Repopulating {} with latest noteworthy nodes.'.format(noteworthy_node._id))
# popular_nodes = get_popular_nodes()['popular_node_ids'] # TODO uncomment this
popular_nodes = ["njadc", "qgtvw", "bg9ha", "w4g8v", "bpuh9"] # TODO delete this
user = noteworthy_node.creator
auth = Auth(user)
for i in xrange(len(noteworthy_node.nodes)-1, -1, -1):
pointer = noteworthy_node.nodes[i]
noteworthy_node.rm_pointer(pointer, auth)
logger.info('Removed node link to {}'.format(pointer.node._id))
for n_id in popular_nodes:
n = models.Node.find(Q('_id', 'eq', n_id))[0]
noteworthy_node.add_pointer(n, auth, save=True)
logger.info('Added node link to {}'.format(n))
if not dry_run:
try:
noteworthy_node.save()
logger.info('Noteworthy nodes updated.')
except:
logger.error('Could not migrate noteworthy nodes due to error')
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry_run=dry_run)
|
Add script for taking most popular nodes and adding them as node links to NOTEWORTHY_LINKS_NODE.
|
Add script for taking most popular nodes and adding them as node links to NOTEWORTHY_LINKS_NODE.
|
Python
|
apache-2.0
|
RomanZWang/osf.io,Johnetordoff/osf.io,chennan47/osf.io,aaxelb/osf.io,acshi/osf.io,mfraezz/osf.io,chennan47/osf.io,hmoco/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,wearpants/osf.io,samchrisinger/osf.io,amyshi188/osf.io,RomanZWang/osf.io,aaxelb/osf.io,caneruguz/osf.io,sloria/osf.io,saradbowman/osf.io,adlius/osf.io,baylee-d/osf.io,adlius/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,abought/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,rdhyee/osf.io,mfraezz/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,adlius/osf.io,alexschiller/osf.io,mluke93/osf.io,leb2dg/osf.io,sloria/osf.io,cwisecarver/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,wearpants/osf.io,hmoco/osf.io,aaxelb/osf.io,Nesiehr/osf.io,kwierman/osf.io,mfraezz/osf.io,jnayak1/osf.io,zamattiac/osf.io,felliott/osf.io,DanielSBrown/osf.io,mluo613/osf.io,Johnetordoff/osf.io,acshi/osf.io,emetsger/osf.io,felliott/osf.io,mluo613/osf.io,felliott/osf.io,icereval/osf.io,icereval/osf.io,SSJohns/osf.io,baylee-d/osf.io,zamattiac/osf.io,cwisecarver/osf.io,icereval/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,zamattiac/osf.io,crcresearch/osf.io,mluke93/osf.io,crcresearch/osf.io,kwierman/osf.io,mattclark/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,abought/osf.io,kwierman/osf.io,caseyrollins/osf.io,caneruguz/osf.io,amyshi188/osf.io,caneruguz/osf.io,caseyrollins/osf.io,alexschiller/osf.io,binoculars/osf.io,hmoco/osf.io,cwisecarver/osf.io,acshi/osf.io,mluo613/osf.io,mluke93/osf.io,zachjanicki/osf.io,SSJohns/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,saradbowman/osf.io,emetsger/osf.io,doublebits/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,chrisseto/osf.io,adlius/osf.io,TomBaxter/osf.io,acshi/osf.io,mluo613/osf.io,samchrisinger/osf.io,abought/osf.io,jnayak1/osf.io,wearpants/osf.io,wearpants/osf.io,kch8qx/osf.io,Nesiehr/osf.io,binoculars/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,erinspace/osf.io,DanielSBrown/osf.io,mattclark/osf.io,doublebits/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,abought/osf.io,mattclark/osf.io,rdhyee/osf.io,caseyrollins/osf.io,rdhyee/osf.io,felliott/osf.io,emetsger/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,erinspace/osf.io,baylee-d/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,cslzchen/osf.io,chrisseto/osf.io,TomHeatwole/osf.io,hmoco/osf.io,caneruguz/osf.io,doublebits/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,mluke93/osf.io,sloria/osf.io,kwierman/osf.io,Nesiehr/osf.io,binoculars/osf.io,chrisseto/osf.io,crcresearch/osf.io,amyshi188/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,doublebits/osf.io,laurenrevere/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,alexschiller/osf.io,emetsger/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,acshi/osf.io,Nesiehr/osf.io,leb2dg/osf.io,cslzchen/osf.io,zamattiac/osf.io,erinspace/osf.io,jnayak1/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,kch8qx/osf.io,RomanZWang/osf.io,amyshi188/osf.io,TomHeatwole/osf.io,kch8qx/osf.io
|
|
c0e00f3caf12ad95bc753e65fc3721623c552aa0
|
diceware.py
|
diceware.py
|
from random import randint
def sysrand(sides=6, rolls=5):
return ''.join(map(str, [randint(1, sides) for i in range(rolls)]))
def randorg(sides=6, rolls=5):
raise NotImplemented
def generate(suggestions=1, words=6, apikey=''):
with open('diceware.wordlist.asc.txt', 'r') as f:
wordlist = dict([map(str.strip, line.split()) for line in f if line.strip() != ''])
for i in range(suggestions):
password = []
getkey = randorg if apikey else sysrand
while len(password) < words:
key = None
while key not in wordlist:
key = getkey()
password.append(wordlist[key])
yield ' '.join(password)
|
from __future__ import print_function
from httplib import HTTPSConnection
from random import randint
from uuid import uuid4
import json, sys
def sysrand(suggestions, words, rolls=5, sides=6, **kwargs):
print('sysrand', file=sys.stderr)
for i in range(suggestions):
yield [''.join(map(str, [randint(1, sides) for i in range(rolls)])) for j in range(words)]
def randorg(suggestions, words, rolls=5, sides=6, apiKey=''):
conn = HTTPSConnection('api.random.org')
body = json.dumps({
'jsonrpc': '2.0',
'id': str(uuid4()),
'method': 'generateIntegers',
'params': {
'apiKey': apiKey,
'n' : rolls * words * suggestions,
'min': 1,
'max': sides
}
})
headers = {
'Content-Type': 'raw'
}
conn.request('POST', '/json-rpc/1/invoke', body, headers)
resp = conn.getresponse()
data = json.loads(resp.read())
conn.close()
digits = map(str, data['result']['random']['data'])
for i in range(suggestions):
start = i * words * rolls
yield [''.join(digits[start + (j * rolls):start + ((j + 1) * rolls)]) for j in range(words)]
def generate(suggestions=1, words=6, apikey=''):
with open('diceware.wordlist.asc.txt', 'r') as f:
wordlist = dict([map(str.strip, line.split()) for line in f if line.strip() != ''])
getkey = randorg if apikey else sysrand
for keys in getkey(suggestions, words, apiKey=apikey):
yield ' '.join([wordlist[k] for k in keys])
|
Add random.org support for generating keys
|
Add random.org support for generating keys
* Replace tabs with spaces (d'oh!)
* Implement random key functions as generators so that we can retrieve
numbers necessary for all suggestions in one request from random.org
without burning through requests
* Send a request to the random.org API and parse the response into an
array of keys
|
Python
|
mit
|
darthmall/Alfred-Diceware-Workflow
|
62d5c5b2bf33a228938924a44e229f2f2cb4e02c
|
registrasion/urls.py
|
registrasion/urls.py
|
from django.conf.urls import url, include, patterns
urlpatterns = patterns(
"registrasion.views",
url(r"^category/([0-9]+)$", "product_category", name="product_category"),
url(r"^checkout$", "checkout", name="checkout"),
url(r"^invoice/([0-9]+)$", "invoice", name="invoice"),
url(r"^invoice/([0-9]+)/pay$", "pay_invoice", name="pay_invoice"),
url(r"^profile$", "edit_profile", name="profile"),
url(r"^register$", "guided_registration", name="guided_registration"),
url(r"^register/([0-9]+)$", "guided_registration",
name="guided_registration"),
# Required by django-nested-admin.
url(r'^nested_admin/', include('nested_admin.urls')),
)
|
from django.conf.urls import url, patterns
urlpatterns = patterns(
"registrasion.views",
url(r"^category/([0-9]+)$", "product_category", name="product_category"),
url(r"^checkout$", "checkout", name="checkout"),
url(r"^invoice/([0-9]+)$", "invoice", name="invoice"),
url(r"^invoice/([0-9]+)/pay$", "pay_invoice", name="pay_invoice"),
url(r"^profile$", "edit_profile", name="profile"),
url(r"^register$", "guided_registration", name="guided_registration"),
url(r"^register/([0-9]+)$", "guided_registration",
name="guided_registration"),
)
|
Revert "Registrasion URLs now include django-nested-admin"
|
Revert "Registrasion URLs now include django-nested-admin"
This reverts commit 58eed33c429c1035801e840b41aa7104c02b9b5a.
|
Python
|
apache-2.0
|
chrisjrn/registrasion,chrisjrn/registrasion
|
e7afd7ccda4bb86769386891719d4bc4f7418509
|
plugins/PointCloudAlignment/__init__.py
|
plugins/PointCloudAlignment/__init__.py
|
from . import PointCloudAlignTool
from . import PointCloudAlignView
from UM.Application import Application
def getMetaData():
return {
'type': 'tool',
'plugin':
{
"name": "PointCloudAlignment",
'author': 'Jaime van Kessel',
'version': '1.0',
'description': ''
},
'view':
{
'name': 'PointCloudAlignmentView',
'visible': False
},
'tool':
{
'name': 'PointCloudAlignmentTool'
}
}
def register(app):
#TODO: Once multiple plugin types are supported, this needs to be removed.
view = PointCloudAlignView.PointCloudAlignView()
view.setPluginId("PointCloudAlign")
Application.getInstance().getController().addView(view)
return PointCloudAlignTool.PointCloudAlignTool()
|
from . import PointCloudAlignTool
from . import PointCloudAlignView
from UM.Application import Application
def getMetaData():
return {
'type': 'tool',
'plugin':
{
"name": "PointCloudAlignment",
'author': 'Jaime van Kessel',
'version': '1.0',
'description': ''
},
'view':
{
'name': 'PointCloudAlignmentView',
'visible': False
},
'tool':
{
'name': 'PointCloudAlignmentTool',
},
'cura': {
'tool': {
'visible': False
}
}
}
def register(app):
#TODO: Once multiple plugin types are supported, this needs to be removed.
view = PointCloudAlignView.PointCloudAlignView()
view.setPluginId("PointCloudAlignment")
Application.getInstance().getController().addView(view)
return PointCloudAlignTool.PointCloudAlignTool()
|
Hide PointCloudAlignment things from Cura
|
Hide PointCloudAlignment things from Cura
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
b089522f108c9071013e0cc00813e29bc415595c
|
logbot/irc_client.py
|
logbot/irc_client.py
|
import irc.client
import sys
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.logger)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def logger(self, connection, event):
sys.stdout.write(event.arguments[0])
sys.stdout.flush()
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
import irc.client
import sys
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.logger)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def logger(self, connection, event):
sys.stdout.write("{0}: {1}\n".format(event.source.nick, event.arguments[0]))
sys.stdout.flush()
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
Add nick to the log
|
Add nick to the log
|
Python
|
mit
|
mlopes/LogBot
|
ced2be321f347f3e28e79e5cfac4e4a83f6b6819
|
fireplace/cards/blackrock/collectible.py
|
fireplace/cards/blackrock/collectible.py
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Solemn Vigil
class BRM_001:
action = [Draw(CONTROLLER) * 2]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
# Imp Gang Boss
class BRM_006:
events = [
Damage(SELF).on(Summon(CONTROLLER, "BRM_006t"))
]
##
# Spells
# Solemn Vigil
class BRM_001:
action = [Draw(CONTROLLER) * 2]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
# Demonwrath
class BRM_005:
action = [Hit(ALL_MINIONS - DEMON, 2)]
|
Implement Demonwrath and Imp Gang Boss
|
Implement Demonwrath and Imp Gang Boss
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,Meerkov/fireplace,smallnamespace/fireplace,butozerca/fireplace,NightKev/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,Meerkov/fireplace,beheh/fireplace,liujimj/fireplace,liujimj/fireplace,oftc-ftw/fireplace,amw2104/fireplace,Ragowit/fireplace,jleclanche/fireplace,oftc-ftw/fireplace
|
d5b439577d6e609ccb736b5d66c19911a95fc460
|
icekit/articles/page_type_plugins.py
|
icekit/articles/page_type_plugins.py
|
from django.conf.urls import patterns, url
from django.http import Http404
from django.template.response import TemplateResponse
from fluent_pages.extensions import page_type_pool
from icekit.page_types.layout_page.admin import LayoutPageAdmin
from icekit.plugins import ICEkitFluentContentsPagePlugin
class ListingPagePlugin(ICEkitFluentContentsPagePlugin):
render_template = 'icekit/layouts/listing.html'
model_admin = LayoutPageAdmin
def get_view_response(self, request, page, view_func, view_args, view_kwargs):
"""
Render the custom view that was exposed by the extra plugin URL patterns.
This gives the ability to add extra middleware logic.
"""
return view_func(request, page, *view_args, **view_kwargs)
def _detail_view(request, parent, slug):
try:
page = parent.get_visible_items().get(slug=slug)
except:
raise Http404
# If the article defines its own response, use that.
if hasattr(page, 'get_response'):
return page.get_response(request, parent=parent)
raise AttributeError("Make sure to define `get_response()` in your item's model, or set `detail_view' on your Listing Page plugin.")
detail_view = _detail_view
urls = patterns('',
url(
'^(?P<slug>[-\w]+)/$',
detail_view,
),
)
|
from django.conf.urls import patterns, url
from django.http import Http404
from django.template.response import TemplateResponse
from fluent_pages.extensions import page_type_pool
from icekit.page_types.layout_page.admin import LayoutPageAdmin
from icekit.plugins import ICEkitFluentContentsPagePlugin
class ListingPagePlugin(ICEkitFluentContentsPagePlugin):
render_template = 'icekit/layouts/listing.html'
model_admin = LayoutPageAdmin
# TODO Awful hack to make request available to listing page class as
# `_request` class attribute. There must be a better way...
def get_response(self, request, page, **kwargs):
page._plugin_request = request
return super(ListingPagePlugin, self).get_response(
request, page, **kwargs)
def get_view_response(self, request, page, view_func, view_args, view_kwargs):
"""
Render the custom view that was exposed by the extra plugin URL patterns.
This gives the ability to add extra middleware logic.
"""
return view_func(request, page, *view_args, **view_kwargs)
def _detail_view(request, parent, slug):
try:
page = parent.get_visible_items().get(slug=slug)
except:
raise Http404
# If the article defines its own response, use that.
if hasattr(page, 'get_response'):
return page.get_response(request, parent=parent)
raise AttributeError("Make sure to define `get_response()` in your item's model, or set `detail_view' on your Listing Page plugin.")
detail_view = _detail_view
urls = patterns('',
url(
'^(?P<slug>[-\w]+)/$',
detail_view,
),
)
|
Make current request available to `ListingPage`s item methods
|
Make current request available to `ListingPage`s item methods
This is an awful hack. Hopefully we can find a better way.
See ICEKit ticket #154 in Assembla
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
2959fa0a9f69cbfb7611bbc12488089921d26ab8
|
IPython/frontend/html/notebook/__init__.py
|
IPython/frontend/html/notebook/__init__.py
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
else:
if tornado.version_info < (2,1,0):
raise ImportError(msg+", but you have %s"%tornado.version)
del msg
|
"""The IPython HTML Notebook"""
# check for tornado 2.1.0
msg = "The IPython Notebook requires tornado >= 2.1.0"
try:
import tornado
except ImportError:
raise ImportError(msg)
try:
version_info = tornado.version_info
except AttributeError:
raise ImportError(msg + ", but you have < 1.1.0")
if version_info < (2,1,0):
raise ImportError(msg + ", but you have %s" % tornado.version)
del msg
|
Fix for tornado check for tornado < 1.1.0
|
Fix for tornado check for tornado < 1.1.0
Tornado < 1.1.0 does not have the ``version_info`` variable to check.
Debian squeeze has tornado 1.0.1.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
fb256b042a485aefa2d9e45b39daa551a3f779ff
|
examples/open_file_dialog.py
|
examples/open_file_dialog.py
|
import webview
import threading
"""
This example demonstrates creating an open file dialog.
"""
def open_file_dialog():
import time
time.sleep(5)
print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True))
if __name__ == '__main__':
t = threading.Thread(target=open_file_dialog)
t.start()
webview.create_window("Open file dialog example", "http://www.flowrl.com")
|
import webview
import threading
"""
This example demonstrates creating an open file dialog.
"""
def open_file_dialog():
import time
time.sleep(5)
file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)')
print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types))
if __name__ == '__main__':
t = threading.Thread(target=open_file_dialog)
t.start()
webview.create_window("Open file dialog example", "http://www.flowrl.com")
|
Modify example to include file_types param
|
[All] Modify example to include file_types param
|
Python
|
bsd-3-clause
|
r0x0r/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,r0x0r/pywebview
|
d1e5f55681eda2b2b358013ad5dca3a58619c914
|
pycom/objects.py
|
pycom/objects.py
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
|
# encoding: utf-8
### Attribute Wrapper
class AttrWrapper(object):
attrs = []
def __setattr__(self, name, value):
if name not in self.attrs:
raise AttributeError("'%s' is not supported" % name)
object.__setattr__(self, name, value)
def __repr__(self):
attrs = []
template = "%s=%s"
for name in self.attrs:
try:
attrs.append(template % (name, getattr(self, name)))
except AttributeError:
pass
return "%s(%s)" % (self.__class__.__name__, ", ".join(attrs))
def val(obj, name, default=None):
if hasattr(obj, name):
return obj.name
elif name in obj:
return obj[name]
elif isinstance(obj, (list, tuple)) and isinstance(name, int):
try:
return obj[name]
except Exception:
return default
else:
return default
|
Add the function to get the value of object, dict, list, or tuple.
|
Add the function to get the value of object, dict, list, or tuple.
|
Python
|
mit
|
xgfone/pycom,xgfone/xutils
|
482b8f7738da51c394969e526b37093d3c52d663
|
pyconkr/tests.py
|
pyconkr/tests.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.http import HttpResponse
from django.test import Client
from django.core.urlresolvers import reverse_lazy, reverse
from django.contrib.auth import get_user_model
from pyconkr.helper import render_io_error
User = get_user_model()
class HelperFunctionTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_render_io_error(self):
a = render_io_error("test reason")
self.assertEqual(a.status_code, 406, "render io error status code must be 406")
class PaymentTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user('testname', '[email protected]', 'testpassword')
self.client.login(username='testname', password='testpassword')
def tearDown(self):
pass
def test_view_registration_payment(self):
url = reverse('registration_payment')
response = self.client.post(url, {'test': 1})
self.assertEqual(response['content-type'], 'application/javascript', 'error raise and must be ajax' )
print response.content
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.http import HttpResponse
from django.test import Client
from django.core.urlresolvers import reverse_lazy, reverse
from django.contrib.auth import get_user_model
from pyconkr.helper import render_io_error
User = get_user_model()
class HelperFunctionTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_render_io_error(self):
a = render_io_error("test reason")
self.assertEqual(a.status_code, 406, "render io error status code must be 406")
class PaymentTestCase(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user('testname', '[email protected]', 'testpassword')
self.client.login(username='testname', password='testpassword')
def tearDown(self):
pass
def test_view_registration_payment(self):
url = reverse('registration_payment')
response = self.client.post(url, {'test': 1})
self.assertEqual(response['content-type'], 'application/json', 'Result has to be JSON')
class ProfileTest(TestCase):
def test_profile_is_created_when_user_save(self):
user = User.objects.create_user('test', '[email protected]', 'password')
self.assertNotEqual(user.profile, None)
|
Add profile model signal test case
|
Add profile model signal test case
|
Python
|
mit
|
pythonkr/pyconapac-2016,pythonkr/pyconapac-2016,pythonkr/pyconapac-2016
|
bb20e4e0f8527f2a24a4164022028793336ab17c
|
bazaar/utils.py
|
bazaar/utils.py
|
from __future__ import unicode_literals
from djmoney_rates.utils import convert_money
import moneyed
from .settings import bazaar_settings
def convert_money_to_default_currency(money):
"""
Convert money amount to the system default currency. If money has no 'currency' attribute
does nothing
"""
if hasattr(money, "currency"):
default_currency = moneyed.CURRENCIES[bazaar_settings.DEFAULT_CURRENCY]
if money.currency != default_currency:
amount = convert_money(money.amount, money.currency.code, default_currency.code)
money = moneyed.Money(amount, default_currency)
return money
|
Add helper function to convert money instances to default currency
|
Add helper function to convert money instances to default currency
|
Python
|
bsd-2-clause
|
evonove/django-bazaar,evonove/django-bazaar,meghabhoj/NEWBAZAAR,meghabhoj/NEWBAZAAR,evonove/django-bazaar,meghabhoj/NEWBAZAAR
|
|
ab99a515995e121944e0e7b355e8980984a2fd98
|
util.py
|
util.py
|
__author__ = 'zifnab'
import string
from passlib.hash import sha512_crypt
import database
from flask_login import login_user
def random_string(size=10, chars=string.ascii_letters + string.digits):
import random
return ''.join(random.choice(chars) for x in range(size))
def create_user(**kwargs):
username = kwargs.get('username')
password = kwargs.get('password')
email = kwargs.get('email')
hash = sha512_crypt.encrypt(password)
user = database.User(username=username,
hash=hash,
email=email)
if database.User.objects().count() == 0:
user.admin = True
user.save()
login_user(user)
def authenticate_user(username, password):
user = database.User.objects(username__iexact=username).first()
if user is None:
return None
if (sha512_crypt.verify(password, user.hash)):
return user
else:
return None
def lookup_user(username):
user = database.User.objects(username__iexact=username).first()
return user
|
__author__ = 'zifnab'
import string
from passlib.hash import sha512_crypt
from random import SystemRandom
import database
from flask_login import login_user
_random = SystemRandom()
def random_string(size=10, chars=string.ascii_letters + string.digits):
return ''.join(_random.choice(chars) for x in range(size))
def create_user(**kwargs):
username = kwargs.get('username')
password = kwargs.get('password')
email = kwargs.get('email')
hash = sha512_crypt.encrypt(password)
user = database.User(username=username,
hash=hash,
email=email)
if database.User.objects().count() == 0:
user.admin = True
user.save()
login_user(user)
def authenticate_user(username, password):
user = database.User.objects(username__iexact=username).first()
if user is None:
return None
if (sha512_crypt.verify(password, user.hash)):
return user
else:
return None
def lookup_user(username):
user = database.User.objects(username__iexact=username).first()
return user
|
Use a cryptographically secure PRNG in random_string().
|
Use a cryptographically secure PRNG in random_string().
By default python uses a non-CS PRNG, so with some analysis, "random_string"s could be predicted.
|
Python
|
mit
|
zifnab06/zifb.in,zifnab06/zifb.in
|
d283c4c94d9ba510460c2530d602fe0c1eb14be5
|
server/proxy_util.py
|
server/proxy_util.py
|
#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
start = datetime.datetime.now()
data = self._get(self.har)
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
return json.loads(content)
|
#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
start = datetime.datetime.now()
data = self._get(self.har)
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
|
Add a comment on HAR encoding.
|
Add a comment on HAR encoding.
|
Python
|
apache-2.0
|
kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa
|
2aa45922f7d018398e028c2aed964cf2ec00038a
|
bika/lims/browser/widgets/recordswidget.py
|
bika/lims/browser/widgets/recordswidget.py
|
from AccessControl import ClassSecurityInfo
from Products.ATExtensions.widget import RecordsWidget as ATRecordsWidget
from Products.Archetypes.Registry import registerWidget
class RecordsWidget(ATRecordsWidget):
security = ClassSecurityInfo()
_properties = ATRecordsWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/recordswidget",
'helper_js': ("bika_widgets/recordswidget.js",),
'helper_css': ("bika_widgets/recordswidget.css",),
'allowDelete': True,
})
registerWidget(RecordsWidget,
title = 'RecordsWidget',
description = (''),
)
|
from AccessControl import ClassSecurityInfo
from Products.ATExtensions.widget import RecordsWidget as ATRecordsWidget
from Products.Archetypes.Registry import registerWidget
class RecordsWidget(ATRecordsWidget):
security = ClassSecurityInfo()
_properties = ATRecordsWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/recordswidget",
'helper_js': ("bika_widgets/recordswidget.js",),
'helper_css': ("bika_widgets/recordswidget.css",),
'allowDelete': True,
})
def process_form(self, instance, field, form, empty_marker=None,
emptyReturnsMarker=False):
"""
Basic impl for form processing in a widget plus allowing empty
values to be saved
"""
value = form.get(field.getName(), empty_marker)
print value
if not value:
return value, {}
if value is empty_marker:
return empty_marker
if emptyReturnsMarker and value == '':
return empty_marker
return value, {}
registerWidget(RecordsWidget,
title = 'RecordsWidget',
description = (''),
)
|
Allow empty values in Records Widget
|
Allow empty values in Records Widget
|
Python
|
agpl-3.0
|
anneline/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS
|
c088a28c9f7020cb64c25eb0e83dfdcd286015d3
|
app/assets.py
|
app/assets.py
|
from flask.ext.assets import Bundle
app_css = Bundle(
'app.scss',
'map.scss',
filters='scss',
output='styles/app.css'
)
app_js = Bundle(
'app.js',
'descriptor.js',
'map.js',
'resources.js',
filters='jsmin',
output='scripts/app.js'
)
vendor_css = Bundle(
'vendor/semantic.min.css',
output='styles/vendor.css'
)
vendor_js = Bundle(
'vendor/jquery.min.js',
'vendor/async.js',
'vendor/address-autocomplete.js',
'vendor/papaparse.min.js',
'vendor/semantic.min.js',
'vendor/tablesort.min.js',
filters='jsmin',
output='scripts/vendor.js'
)
|
from flask.ext.assets import Bundle
app_css = Bundle(
'*.scss',
filters='scss',
output='styles/app.css'
)
app_js = Bundle(
'app.js',
'descriptor.js',
'map.js',
'resources.js',
filters='jsmin',
output='scripts/app.js'
)
vendor_css = Bundle(
'vendor/semantic.min.css',
output='styles/vendor.css'
)
vendor_js = Bundle(
'vendor/jquery.min.js',
'vendor/async.js',
'vendor/address-autocomplete.js',
'vendor/papaparse.min.js',
'vendor/semantic.min.js',
'vendor/tablesort.min.js',
filters='jsmin',
output='scripts/vendor.js'
)
|
Generalize scss bundle to track all scss files
|
Generalize scss bundle to track all scss files
|
Python
|
mit
|
hack4impact/maps4all,hack4impact/asylum-connect-catalog,hack4impact/maps4all-jlc-sp2,hack4impact/maps4all,hack4impact/asylum-connect-catalog,hack4impact/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog,hack4impact/maps4all,hack4impact/maps4all-jlc-sp2,hack4impact/maps4all-jlc-sp2,hack4impact/asylum-connect-catalog,hack4impact/maps4all-jlc-sp2,AsylumConnect/asylum-connect-catalog,hack4impact/maps4all,AsylumConnect/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog
|
ddbc9624aacf9e15897bdfb46fc2016888db114b
|
git/pmstats2/get-pm-stats.py
|
git/pmstats2/get-pm-stats.py
|
#!/usr/bin/env python
# get-pmstats.py
# Henry J Schmale
# November 25, 2017
#
# Calculates the additions and deletions per day within a git repository
# by parsing out the git log. It opens the log itself.
# Produces output as a CSV
import subprocess
from datetime import datetime
changes_by_date = {}
git_log = subprocess.Popen(
'git log --numstat --pretty="%at"',
stdout=subprocess.PIPE,
shell=True)
date = None
day_changes = [0, 0]
for line in git_log.stdout:
args = line.rstrip().split()
if len(args) == 1:
old_date = date
date = datetime.fromtimestamp(int(args[0]))
if day_changes != [0, 0] and date.date() != old_date.date():
changes_by_date[str(date.date())] = day_changes
day_changes = [0, 0]
elif len(args) >= 3:
day_changes = [sum(x) for x in zip(day_changes, map(int, args[0:2]))]
print('date,ins,del')
for key,vals in changes_by_date.items():
print(','.join(map(str, [key, vals[0], vals[1]])))
|
#!/usr/bin/env python
# get-pmstats.py
# Henry J Schmale
# November 25, 2017
#
# Calculates the additions and deletions per day within a git repository
# by parsing out the git log. It opens the log itself.
# Produces output as a CSV
import subprocess
from datetime import datetime
def chomp_int(val):
try:
return int(val)
except ValueError:
return 0
changes_by_date = {}
git_log = subprocess.Popen(
'git log --numstat --pretty="%at"',
stdout=subprocess.PIPE,
shell=True)
date = None
day_changes = [0, 0]
for line in git_log.stdout:
args = line.rstrip().split()
if len(args) == 1:
old_date = date
date = datetime.fromtimestamp(int(args[0]))
if day_changes != [0, 0] and date.date() != old_date.date():
changes_by_date[str(date.date())] = day_changes
day_changes = [0, 0]
elif len(args) >= 3:
day_changes = [sum(x) for x in zip(day_changes, map(chomp_int, args[0:2]))]
print('date,ins,del')
for key,vals in changes_by_date.items():
print(','.join(map(str, [key, vals[0], vals[1]])))
|
Fix script for repos with binaries
|
Fix script for repos with binaries
|
Python
|
mit
|
HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts,HSchmale16/UsefulScripts
|
c1a2a1052d215f9971c7bb1e580fd88ab0b395f8
|
background_hang_reporter_job/tracked.py
|
background_hang_reporter_job/tracked.py
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(frame is not None and "devtools/" in frame
for lib, frame in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs]
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for lib, frame in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs]
|
Fix frame string check in devtools tracking
|
Fix frame string check in devtools tracking
|
Python
|
mit
|
squarewave/background-hang-reporter-job,squarewave/background-hang-reporter-job
|
a8596fd4a76460bd3e15509825d3cb3f82a3f8c4
|
test/integration/ggrc/converters/test_import_delete.py
|
test/integration/ggrc/converters/test_import_delete.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data = self.import_file(filename)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
|
Optimize basic delete import tests
|
Optimize basic delete import tests
The dry-run check is now automatically performed on each import and we
do not need to duplicate the work in the delete test.
|
Python
|
apache-2.0
|
selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core
|
9d10d279a1f7de2a5572229d68a7065fb9353ab9
|
linkedin_scraper/parsers/employment.py
|
linkedin_scraper/parsers/employment.py
|
from typing import Tuple
from linkedin_scraper.parsers.base import BaseParser
class EmploymentParser(BaseParser):
def __init__(self):
self.professions_list = self.get_lines_from_datafile(
'professions_list.txt')
def parse(self, item: str) -> Tuple[str, str]:
"""
Parse LinkedIn employment string into position and company.
:param item: employment string
:return: position, company
"""
if ' at ' in item:
# Simplest case, standard LinkedIn format <position> at <company>
return tuple(item.split(' at ', maxsplit=1))
words = item.split()
for index, word in enumerate(reversed(item.split())):
normalized_word = word.strip(',.-').lower()
if normalized_word in self.professions_list:
founded_profession_index = len(words) - index
break
else:
# We don't know which is which so return whole string as a position
return item, ''
# We found profession name in employment string, everything
# after it is company name
return (' '.join(words[:founded_profession_index]).rstrip(',.- '),
' '.join(words[founded_profession_index:]).lstrip(',.- '))
|
from typing import Tuple
from linkedin_scraper.parsers.base import BaseParser
class EmploymentParser(BaseParser):
def __init__(self):
self.professions_list = self.get_lines_from_datafile(
'professions_list.txt')
def parse(self, item: str) -> Tuple[str, str]:
"""
Parse LinkedIn employment string into position and company.
:param item: employment string
:return: position, company
"""
if ' at ' in item:
# Simplest case, standard LinkedIn format <position> at <company>
return tuple(item.split(' at ', maxsplit=1))
words = item.split()
for index, word in enumerate(reversed(words)):
normalized_word = word.strip(',.-').lower()
if normalized_word in self.professions_list:
founded_profession_index = len(words) - index
break
else:
# We don't know which is which so return whole string as a position
return item, ''
# We found profession name in employment string, everything
# after it is company name
return (' '.join(words[:founded_profession_index]).rstrip(',.- '),
' '.join(words[founded_profession_index:]).lstrip(',.- '))
|
Remove duplicated split call from EmploymentParser.
|
Remove duplicated split call from EmploymentParser.
|
Python
|
mit
|
nihn/linkedin-scraper,nihn/linkedin-scraper
|
a7afe12e241ee8f6ca8b85850ff43b777220ec62
|
cdf/__init__.py
|
cdf/__init__.py
|
import django
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
},
)
django.setup()
|
Add basic django config to be able to manipulate form classes
|
Add basic django config to be able to manipulate form classes
|
Python
|
mit
|
ana-balica/classy-django-forms,ana-balica/classy-django-forms,ana-balica/classy-django-forms
|
|
650a4733aa6e15b80e2adeec34fc479a3b2885e3
|
src/cmdline/config.py
|
src/cmdline/config.py
|
import os
import sys
try:
import pkg_resources
d = pkg_resources.get_distribution('metermaid')
pkg_locations = (
os.path.join(d.location, 'config'),
os.path.join(os.path.dirname(d.location), 'config'),
)
except ImportError:
pkg_locations = ()
def get_config_paths(filename=None):
script_name = os.path.basename(sys.argv[0])
for dirpath in pkg_locations + (
os.path.join(sys.prefix, 'config'),
'/etc/{}'.format(script_name),
os.path.expanduser('~/.{}'.format(script_name)),
):
full_path = dirpath
if filename:
full_path = os.path.join(full_path, filename)
yield full_path
|
import os
import sys
try:
import pkg_resources
d = pkg_resources.get_distribution('metermaid')
pkg_locations = (
os.path.join(d.location, 'config'),
os.path.join(os.path.dirname(d.location), 'config'),
)
except ImportError:
pkg_locations = ()
def get_config_paths(filename=None):
script_name = os.path.basename(sys.argv[0])
for dirpath in pkg_locations + (
os.path.join(sys.prefix, 'config'),
os.path.join(sys.prefix, 'etc', script_name),
os.path.expanduser('~/.{}'.format(script_name)),
):
full_path = dirpath
if filename:
full_path = os.path.join(full_path, filename)
yield full_path
|
Use etc relative to sys.prefix
|
Use etc relative to sys.prefix
|
Python
|
apache-2.0
|
rca/cmdline
|
6384e6a23f73eddf1099e01ed0d8c067141651a5
|
tcelery/__init__.py
|
tcelery/__init__.py
|
from __future__ import absolute_import
import celery
from tornado import ioloop
from .connection import ConnectionPool
from .producer import NonBlockingTaskProducer
from .result import AsyncResult
VERSION = (0, 4, 0)
__version__ = '.'.join(map(str, VERSION)) + '-dev'
def setup_nonblocking_producer(celery_app=None, io_loop=None,
on_ready=None, result_cls=AsyncResult,
limit=1):
celery_app = celery_app or celery.current_app
io_loop = io_loop or ioloop.IOLoop.instance()
NonBlockingTaskProducer.app = celery_app
NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop)
NonBlockingTaskProducer.result_cls = result_cls
if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'):
celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer
def connect():
broker_url = celery_app.connection().as_uri(include_password=True)
options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {})
NonBlockingTaskProducer.conn_pool.connect(broker_url,
options=options,
callback=on_ready)
io_loop.add_callback(connect)
|
from __future__ import absolute_import
import celery
from tornado import ioloop
from .connection import ConnectionPool
from .producer import NonBlockingTaskProducer
from .result import AsyncResult
VERSION = (0, 3, 4)
__version__ = '.'.join(map(str, VERSION))
def setup_nonblocking_producer(celery_app=None, io_loop=None,
on_ready=None, result_cls=AsyncResult,
limit=1):
celery_app = celery_app or celery.current_app
io_loop = io_loop or ioloop.IOLoop.instance()
NonBlockingTaskProducer.app = celery_app
NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop)
NonBlockingTaskProducer.result_cls = result_cls
if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'):
celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer
def connect():
broker_url = celery_app.connection().as_uri(include_password=True)
options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {})
NonBlockingTaskProducer.conn_pool.connect(broker_url,
options=options,
callback=on_ready)
io_loop.add_callback(connect)
|
Set release version to 0.3.4
|
Set release version to 0.3.4
|
Python
|
bsd-3-clause
|
shnjp/tornado-celery,qudos-com/tornado-celery,mher/tornado-celery,sangwonl/tornado-celery
|
2a7a65afc84556396822933f95aa080a56824aaa
|
wsgi.py
|
wsgi.py
|
# Activate virtualenv
import settings
activate_this = settings.VENV
execfile(activate_this, dict(__file__=activate_this))
from webhaak import app as application
if __name__ == "__main__":
# application is ran standalone
application.run()
|
# Activate virtualenv
import settings
activate_this = settings.VENV
execfile(activate_this, dict(__file__=activate_this))
from webhaak import app as application
if __name__ == "__main__":
# application is ran standalone
application.run(debug=settings.DEBUG)
|
Use the DEBUG setting for enabling/disabling Flask debug
|
Use the DEBUG setting for enabling/disabling Flask debug
|
Python
|
apache-2.0
|
aquatix/webhaak,aquatix/webhaak
|
eb25c6900b307792821f7db6bcfa92cc62a80298
|
lims/pricebook/views.py
|
lims/pricebook/views.py
|
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from lims.permissions.permissions import IsInAdminGroupOrRO
from lims.shared.mixins import AuditTrailViewMixin
from .models import PriceBook
from .serializers import PriceBookSerializer
from lims.pricebook.management.commands.getpricebooks import get_pricebooks
class PriceBookViewSet(AuditTrailViewMixin, viewsets.ModelViewSet):
queryset = PriceBook.objects.all()
serializer_class = PriceBookSerializer
permission_classes = (IsInAdminGroupOrRO,)
filter_fields = ('name', 'identifier',)
@list_route()
def updateall(self, request):
get_pricebooks()
return Response({'message': 'Pricebooks updated'})
|
from django.conf import settings
from simple_salesforce import Salesforce
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from lims.permissions.permissions import IsInAdminGroupOrRO
from lims.shared.mixins import AuditTrailViewMixin
from .models import PriceBook
from .serializers import PriceBookSerializer
from lims.pricebook.management.commands.getpricebooks import get_pricebooks
class PriceBookViewSet(AuditTrailViewMixin, viewsets.ModelViewSet):
queryset = PriceBook.objects.all()
serializer_class = PriceBookSerializer
permission_classes = (IsInAdminGroupOrRO,)
filter_fields = ('name', 'identifier',)
def perform_create(self, serializer):
serializer.save()
get_pricebooks()
@list_route(methods=['POST'])
def updateall(self, request):
get_pricebooks()
return Response({'message': 'Pricebooks updated'})
@list_route()
def on_crm(self, request):
"""
List of all pricebooks available on thr CRM
"""
sf = Salesforce(instance_url=settings.SALESFORCE_URL,
username=settings.SALESFORCE_USERNAME,
password=settings.SALESFORCE_PASSWORD,
security_token=settings.SALESFORCE_TOKEN)
pricebooks = sf.query("SELECT id,name FROM Pricebook2")
return Response(pricebooks['records'])
|
Add list crm pricebooks endpoint and update pricebook fetching
|
Add list crm pricebooks endpoint and update pricebook fetching
|
Python
|
mit
|
GETLIMS/LIMS-Backend,GETLIMS/LIMS-Backend
|
07dc719807a6d890fa33338746caca61704de0a1
|
src/genbank-gff-to-nquads.py
|
src/genbank-gff-to-nquads.py
|
#!/usr/bin/env python
import jargparse
#################
### CONSTANTS ###
#################
metadataPrefix = '#'
accessionKey = '#!genome-build-accession NCBI_Assembly:'
locusTagAttributeKey = 'locus_tag'
#################
### FUNCTIONS ###
#################
def parseRecord(record, locusTags):
components = record.split()
type = components[2]
rawAttributes = components[8]
if type == 'gene':
attributes = rawAttributes.split(';')
for a in attributes:
(key, value) = a.split('=')
# print a
if key == locusTagAttributeKey:
locusTags.append(value)
parser = jargparse.ArgParser('Convert Genbank GFF into an n-quad file')
parser.add_argument('gffPath', help='path to the GFF')
parser.add_argument('outPath', help='path to output the n-quads')
args = parser.parse_args()
accessionIdentifier = 'NONE FOUND'
locusTags = []
with open(args.gffPath) as f:
for line in f:
line = line.strip()
if line.startswith(metadataPrefix):
if line.startswith(accessionKey):
accessionIdentifier = line[len(accessionKey):]
else:
parseRecord(line, locusTags)
with open(args.outPath, 'w') as f:
for locusTag in locusTags:
f.write('<%s> <locus> "%s" .\n' % (accessionIdentifier, locusTag))
|
#!/usr/bin/env python
import jargparse
#################
### CONSTANTS ###
#################
metadataPrefix = '#'
accessionKey = '#!genome-build-accession NCBI_Assembly:'
#################
### FUNCTIONS ###
#################
def parseRecord(record, locusTags):
locusTagAttributeKey = 'locus_tag'
components = record.split()
type = components[2]
rawAttributes = components[8]
if type == 'gene':
attributes = rawAttributes.split(';')
for a in attributes:
(key, value) = a.split('=')
# print a
if key == locusTagAttributeKey:
locusTags.append(value)
parser = jargparse.ArgParser('Convert Genbank GFF into an n-quad file')
parser.add_argument('gffPath', help='path to the GFF')
parser.add_argument('outPath', help='path to output the n-quads')
args = parser.parse_args()
accessionIdentifier = 'NONE FOUND'
locusTags = []
with open(args.gffPath) as f:
for line in f:
line = line.strip()
if line.startswith(metadataPrefix):
if line.startswith(accessionKey):
accessionIdentifier = line[len(accessionKey):]
else:
parseRecord(line, locusTags)
with open(args.outPath, 'w') as f:
for locusTag in locusTags:
f.write('<%s> <locus> "%s" .\n' % (accessionIdentifier, locusTag))
|
Move locus tag attribute key name into the function that uses it
|
Move locus tag attribute key name into the function that uses it
|
Python
|
apache-2.0
|
justinccdev/biolta
|
af85d44d9a6f7cf65fe504816bcf4a10ba603d51
|
pdfdocument/utils.py
|
pdfdocument/utils.py
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return PDFDocument(response, **kwargs), response
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
**kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return pdfdocument(response, **kwargs), response
|
Make the PDFDocument class used in pdf_response configurable
|
Make the PDFDocument class used in pdf_response configurable
|
Python
|
bsd-3-clause
|
matthiask/pdfdocument,dongguangming/pdfdocument
|
130009c1d995cc11454f37fbfe18d2c5e7e36fde
|
stock_request_ux/models/stock_move.py
|
stock_request_ux/models/stock_move.py
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields
class StockMove(models.Model):
_inherit = 'stock.move'
request_order_id = fields.Many2one(
related='stock_request_ids.order_id',
readonly=True,
)
def _split(self, qty, restrict_partner_id=False):
""" When we are on a move created by a stock_request and we create a
backorder, we create a new allocation linked to this new move and
update quantities
"""
new_move_id = super(StockMove, self)._split(
qty, restrict_partner_id=restrict_partner_id)
remaining_to_allocate = qty
for allocation in self.allocation_ids:
if not remaining_to_allocate:
break
to_allocate = min(
remaining_to_allocate, allocation.requested_product_uom_qty)
remaining_to_allocate -= to_allocate
allocation.copy({
'stock_move_id': new_move_id,
'requested_product_uom_qty': to_allocate,
})
allocation.requested_product_uom_qty -= to_allocate
return new_move_id
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields
class StockMove(models.Model):
_inherit = 'stock.move'
request_order_id = fields.Many2one(
related='stock_request_ids.order_id',
readonly=True,
)
def _split(self, qty, restrict_partner_id=False):
""" When we are on a move created by a stock_request and we create a
backorder, we create a new allocation linked to this new move and
update quantities
"""
new_move_id = super(StockMove, self)._split(
qty, restrict_partner_id=restrict_partner_id)
remaining_to_allocate = qty
for allocation in self.allocation_ids:
if not remaining_to_allocate:
break
to_allocate = min(
remaining_to_allocate, allocation.requested_product_uom_qty)
remaining_to_allocate -= to_allocate
allocation.copy({
'stock_move_id': new_move_id,
'requested_product_uom_qty': to_allocate,
})
allocation.requested_product_uom_qty -= to_allocate
return new_move_id
# TODO remove in v12 if this part are not implemented
def copy_data(self, default=None):
""" Nosotros ya copiamos la allocation en el split de arriba y ademas
si se copiasen en el copy data, con algunas rutas se esta duplicando
el allocation en casos donde no debe hacerlo, solo queremos duplicar
allocation en entregas parciales (con el split)
"""
if 'allocation_ids' in default:
default.pop('allocation_ids')
return super(StockMove, self).copy_data(default)
|
Fix a problem with the allocations
|
[FIX] stock_request_ux: Fix a problem with the allocations
For this change https://github.com/OCA/stock-logistics-warehouse/commit/4464be475999c8ada492c56a1c30ca2b0eaa264e
If you confirm with a rute with 3 steps and create 3 pickings them has related with the request by allocation, and the qty_done, qty_In_progress are the sum of all of them moves and the are duplicated the values, that it's wrong behavior
In this Module are introduce the modification in the "Split" method to fix the behavior related when do an backorder or not.
|
Python
|
agpl-3.0
|
ingadhoc/stock
|
d4e8839ac02935b86c1634848476a9a8512c376d
|
delivery_transsmart/models/res_partner.py
|
delivery_transsmart/models/res_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Delivery Transsmart Ingegration
# © 2016 - 1200 Web Development <http://1200wd.com/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
class ProductProduct(models.Model):
_inherit = 'product.product'
service_level_id = fields.Many2one(
'delivery.service.level',
string='Service Level')
service_level_time_id = fields.Many2one(
'delivery.service.level.time',
string='Service Level Time')
class ResPartner(models.Model):
_inherit = 'res.partner'
transsmart_code = fields.Char(
size=128,
string="Transsmart Code")
transsmart_id = fields.Integer(
"Transsmart ID")
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Delivery Transsmart Ingegration
# © 2016 - 1200 Web Development <http://1200wd.com/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
class ResPartner(models.Model):
_inherit = 'res.partner'
transsmart_code = fields.Char(
size=128,
string="Transsmart Code")
transsmart_id = fields.Integer(
"Transsmart ID")
|
Remove double product field definitions
|
[DEL] Remove double product field definitions
|
Python
|
agpl-3.0
|
1200wd/1200wd_addons,1200wd/1200wd_addons
|
b8d73fb12fa91a6f0aa33ed985dd5521843e05b8
|
src/zeit/content/dynamicfolder/browser/tests/test_folder.py
|
src/zeit/content/dynamicfolder/browser/tests/test_folder.py
|
import zeit.cms.interfaces
import zeit.cms.testing
import zeit.content.dynamicfolder.testing
class EditDynamicFolder(zeit.cms.testing.BrowserTestCase):
layer = zeit.content.dynamicfolder.testing.DYNAMIC_LAYER
def test_check_out_and_edit_folder(self):
b = self.browser
b.open('http://localhost/++skin++vivi/repository/dynamicfolder')
b.getLink('Checkout').click()
b.getControl(
'Configuration file').value = 'http://xml.zeit.de/testcontent'
b.getControl('Apply').click()
self.assertEllipsis('...Updated on...', b.contents)
b.getLink('Checkin').click()
self.assertIn('repository', b.url)
with zeit.cms.testing.site(self.getRootFolder()):
folder = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/dynamicfolder')
self.assertEqual(
'http://xml.zeit.de/testcontent', folder.config_file.uniqueId)
|
import zeit.cms.interfaces
import zeit.cms.testing
import zeit.content.dynamicfolder.testing
class EditDynamicFolder(zeit.cms.testing.BrowserTestCase):
layer = zeit.content.dynamicfolder.testing.DYNAMIC_LAYER
def test_check_out_and_edit_folder(self):
b = self.browser
b.open('http://localhost/++skin++vivi/repository/dynamicfolder')
b.getLink('Checkout').click()
b.getControl(
'Configuration file').value = 'http://xml.zeit.de/testcontent'
b.getControl('Apply').click()
self.assertEllipsis('...Updated on...', b.contents)
b.getLink('Checkin').click()
self.assertIn('repository', b.url)
folder = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/dynamicfolder')
self.assertEqual(
'http://xml.zeit.de/testcontent', folder.config_file.uniqueId)
|
Remove superfluous test setup after zeit.cms got smarter
|
MAINT: Remove superfluous test setup after zeit.cms got smarter
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.dynamicfolder
|
865ed33da572f2f364dcd89774eac60738bb446c
|
UI/engine.py
|
UI/engine.py
|
# -*- coding: utf-8 -*-
import logging
import storj
from .utilities.account_manager import AccountManager
class StorjEngine:
__logger = logging.getLogger('%s.StorjEngine' % __name__)
def __init__(self):
self.account_manager = AccountManager()
self.password = None
if self.account_manager.if_logged_in():
self.password = self.account_manager.get_user_password()
self.email = self.account_manager.get_user_email()
# initialize Storj
self.storj_client = storj.Client(
email=self.email,
password=self.password,
do_hashing=False)
self.__logger.debug('Login from credentials xml file')
self.__logger.debug('testlogin, StorjEngine')
|
# -*- coding: utf-8 -*-
import logging
import storj
from .utilities.account_manager import AccountManager
class StorjEngine:
__logger = logging.getLogger('%s.StorjEngine' % __name__)
def __init__(self):
self.account_manager = AccountManager()
self.password = None
if self.account_manager.if_logged_in():
self.password = self.account_manager.get_user_password()
self.email = self.account_manager.get_user_email()
# initialize Storj
self.storj_client = storj.Client(
email=self.email,
password=self.password,
do_hashing=False, timeout=15)
self.__logger.debug('Login from credentials xml file')
self.__logger.debug('testlogin, StorjEngine')
|
Add hardcoded timeout 15 seconds
|
Add hardcoded timeout 15 seconds
|
Python
|
mit
|
lakewik/storj-gui-client
|
1a1a45fe5175d002c239610be487607dbb7cdde1
|
thinc/neural/_classes/feed_forward.py
|
thinc/neural/_classes/feed_forward.py
|
from .model import Model
from ... import describe
def _run_child_hooks(model, X, y):
for layer in model._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
X = layer(X[:1000])
@describe.on_data(_run_child_hooks)
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
self._layers = layers
Model.__init__(self, **kwargs)
@property
def input_shape(self):
return self._layers[0].input_shape
@property
def output_shape(self):
return self._layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self._layers:
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
if gradient is None or callback == None:
break
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
from .model import Model
from ... import describe
def _run_child_hooks(model, X, y):
for layer in model._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
X = layer(X)
if hasattr(X, 'shape'):
X = model.ops.xp.ascontiguousarray(X)
@describe.on_data(_run_child_hooks)
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
self._layers = layers
Model.__init__(self, **kwargs)
@property
def input_shape(self):
return self._layers[0].input_shape
@property
def output_shape(self):
return self._layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self._layers:
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
if gradient is None or callback == None:
break
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
Make copy of X in feed-forward
|
Make copy of X in feed-forward
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
c862a4c40f17040017e9bb6f67f5b9fa293c23e5
|
mcb_interface/driver.py
|
mcb_interface/driver.py
|
#!/usr/bin/env python3
from romi import Romi
romi = Romi()
from time import sleep
# from math import pi
while True:
battery_millivolts = romi.read_battery_millivolts()
v_x, v_theta, x, y, theta = romi.read_odometry()
romi.velocity_command(0.1, 0)
print "Battery Voltage: ", battery_millivolts[0], " Volts."
print "Vx: ", v_x, " m/s"
print "Vtheta: ", v_theta, "rad/s"
sleep(0.01)
|
#!/usr/bin/env python3
from romi import Romi
romi = Romi()
from time import sleep
# from math import pi
while True:
battery_millivolts = romi.read_battery_millivolts()
v_x, v_theta, x, y, theta = romi.read_odometry()
romi.velocity_command(1.0, 0)
print "Battery Voltage: ", battery_millivolts[0], " Volts."
print "Vx: ", v_x, " m/s"
print "Vtheta: ", v_theta, "rad/s"
print "X: ", x, " Y: ", y, " Theta: ", theta
sleep(0.01)
|
Add more output printing, increase velocity command.
|
Add more output printing, increase velocity command.
|
Python
|
mit
|
waddletown/sw
|
5f888f5ee388efa046bc9e0de0622e5c8b66d712
|
src/viewsapp/views.py
|
src/viewsapp/views.py
|
from django.shortcuts import (
get_object_or_404, render)
from django.views.decorators.http import \
require_http_methods
from .models import ExampleModel
@require_http_methods(['GET', 'HEAD'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
|
from django.shortcuts import (
get_object_or_404, render)
from django.views.decorators.http import \
require_safe
from .models import ExampleModel
@require_safe
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
|
Switch HTTP restriction decorator to require_safe.
|
Switch HTTP restriction decorator to require_safe.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
|
b2df5972bcc9f3367c3832719d1590410317bbba
|
swift/obj/dedupe/fp_index.py
|
swift/obj/dedupe/fp_index.py
|
__author__ = 'mjwtom'
import sqlite3
import unittest
class fp_index:
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
|
__author__ = 'mjwtom'
import sqlite3
import unittest
class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
'''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
'''
|
Use database to detect the duplication. But the md5 value does not match. Need to add some code here
|
Use database to detect the duplication. But the md5 value does not match. Need to add some code here
|
Python
|
apache-2.0
|
mjwtom/swift,mjwtom/swift
|
bdee8b95429a6ac96cb0577e7eddbd25b764ebfc
|
mirrit/web/models.py
|
mirrit/web/models.py
|
from humbledb import Mongo, Document
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update({'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
Fix stupid pseudo-django model crap in signup
|
Fix stupid pseudo-django model crap in signup
|
Python
|
bsd-3-clause
|
1stvamp/mirrit
|
f76bba08c1a8cfd3c821f641adb2b10e3cfa47b9
|
tests/test_base_os.py
|
tests/test_base_os.py
|
from .fixtures import elasticsearch
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
def test_java_home_env_var(host):
java_path_cmdline = '$JAVA_HOME/bin/java -version'
assert host.run(java_path_cmdline).exit_status == 0
|
from .fixtures import elasticsearch
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
def test_java_home_env_var(host):
java_path_cmdline = '$JAVA_HOME/bin/java -version'
assert host.run(java_path_cmdline).exit_status == 0
def test_no_core_files_exist_in_root(host):
core_file_check_cmdline = 'ls -l /core*'
assert host.run(core_file_check_cmdline).exit_status != 0
|
Add acceptance test to ensure image doesn't contain core files in /
|
Add acceptance test to ensure image doesn't contain core files in /
In some occasions, depending on the build platform (noticed with aufs with old docker-ce versions) may create a /corefile.<pid>.
Fail a build if the produced image containers any /core* files.
Relates #97
|
Python
|
apache-2.0
|
jarpy/elasticsearch-docker,jarpy/elasticsearch-docker
|
5fc80b347191761d848f6bf736358ec1ec351f33
|
fbmsgbot/bot.py
|
fbmsgbot/bot.py
|
from http_client import HttpClient
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message, completion):
def _completion(response, error):
print error
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
print response
completion(response)
self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
_completion)
def set_welcome(self, message, completion):
def _completion(response, error):
print error
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
print response
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
|
from http_client import HttpClient
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
_completion)
def set_welcome(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
|
Remove print statments and fix completion logic
|
Remove print statments and fix completion logic
|
Python
|
mit
|
ben-cunningham/pybot,ben-cunningham/python-messenger-bot
|
a32831dbf6b46b33691a76e43012e9fbbbc80e17
|
superlists/lists/tests.py
|
superlists/lists/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
class SmokeTest(TestCase):
def test_bad_maths(self):
self.assertEqual(1 + 1, 3)
|
Add app for lists, with deliberately failing unit test
|
Add app for lists, with deliberately failing unit test
|
Python
|
mit
|
jrwiegand/tdd-project,jrwiegand/tdd-project,jrwiegand/tdd-project
|
41cf41f501b715902cf180b5a2f62ce16a816f30
|
oscar/core/prices.py
|
oscar/core/prices.py
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
def __repr__(self):
if self.is_tax_known:
return "%s(currency=%r, excl_tax=%r, incl_tax=%r, tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax,
self.incl_tax, self.tax)
return "%s(currency=%r, excl_tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax)
|
Define __repr__ for the core Price class
|
Define __repr__ for the core Price class
|
Python
|
bsd-3-clause
|
saadatqadri/django-oscar,WillisXChen/django-oscar,adamend/django-oscar,sasha0/django-oscar,faratro/django-oscar,bnprk/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj.com,WillisXChen/django-oscar,WadeYuChen/django-oscar,taedori81/django-oscar,taedori81/django-oscar,bschuon/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,pasqualguerrero/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,bschuon/django-oscar,QLGu/django-oscar,sonofatailor/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,binarydud/django-oscar,marcoantoniooliveira/labweb,nfletton/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,josesanch/django-oscar,nfletton/django-oscar,Bogh/django-oscar,Bogh/django-oscar,john-parton/django-oscar,sasha0/django-oscar,binarydud/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,QLGu/django-oscar,josesanch/django-oscar,dongguangming/django-oscar,solarissmoke/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,eddiep1101/django-oscar,okfish/django-oscar,faratro/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,vovanbo/django-oscar,dongguangming/django-oscar,john-parton/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,bnprk/django-oscar,ka7eh/django-oscar,machtfit/django-oscar,manevant/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj.com,mexeniz/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,monikasulik/django-oscar,faratro/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,kapt/django-oscar,binarydud/django-oscar,Bogh/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,nickpack/django-oscar,MatthewWilkes/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,kapari/django-oscar,nickpack/django-oscar,Bogh/django-oscar,taedori81/django-oscar,jinnykoo/christmas,okfish/django-oscar,manevant/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,itbabu/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,jlmadurga/django-oscar,john-parton/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,nfletton/django-oscar,vovanbo/django-oscar,Jannes123/django-oscar,spartonia/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,mexeniz/django-oscar,rocopartners/django-oscar,manevant/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,eddiep1101/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj,itbabu/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,bschuon/django-oscar,jinnykoo/christmas,adamend/django-oscar,marcoantoniooliveira/labweb,amirrpp/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,DrOctogon/unwash_ecom,amirrpp/django-oscar,kapt/django-oscar,ahmetdaglarbas/e-commerce,marcoantoniooliveira/labweb,ademuk/django-oscar,jlmadurga/django-oscar,manevant/django-oscar,pasqualguerrero/django-oscar,pasqualguerrero/django-oscar,anentropic/django-oscar,jmt4/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,thechampanurag/django-oscar,ahmetdaglarbas/e-commerce,okfish/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,spartonia/django-oscar,adamend/django-oscar,lijoantony/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,solarissmoke/django-oscar,faratro/django-oscar,mexeniz/django-oscar,spartonia/django-oscar,sasha0/django-oscar,josesanch/django-oscar,machtfit/django-oscar,vovanbo/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,django-oscar/django-oscar,nickpack/django-oscar,adamend/django-oscar,rocopartners/django-oscar,pasqualguerrero/django-oscar,ademuk/django-oscar,QLGu/django-oscar,amirrpp/django-oscar,ahmetdaglarbas/e-commerce,bnprk/django-oscar,Jannes123/django-oscar
|
a08dea560931c42d04c8bee8c56c1cb548730f21
|
synesthesia/preprocess.py
|
synesthesia/preprocess.py
|
""" Contains functions for preprocessing audio signals. """
import numpy as np
import pandas as pd
def down_mix(x):
""" Performs down mixing on the audio signal. Reduces
multi-channel audio signals into one channel. It
reduces this by taking the mean across all channels
into one.
:param x: the audio signal of shape N x C, where N
is the number of samples, and C is the number of
channels
:return: an audio signal of shape N x 1, where N
is the number of samples. """
return np.mean(x, axis=1)
def down_sample(x, sample_rate, k=2):
""" Performs down sampling on the audio signal. It takes
ever kth sample of the signal and returns the resulting
audio signal and the resulting sample rate.
:param x: the audio signal of shape N x C, where N
is the number of samples, and C is the number of
channels
:param k: the number of every k samples to return
:return: a tuple of sample rate and the audio signal
down-sampled to include ever kth sample. """
if len(x.shape[0]) < 2:
return sample_rate / k, x[::k]
return sample_rate / k, x[:, ::k]
def normalize(x):
""" Normalizes the amplitude of the audio signal. It
results in dividing the audio signal by the absolute
value of the maximum of the audio signal
:param x: the audio signal of shape N x C, where N
is the number of samples, and C is the number of
channels
:return: a normalized audio signal of shape N x C, where
N is the number of samples, and C is the number of
channels """
if len(x.shape[0]) < 2:
return x.astype(float) / np.max(np.abs(x))
return x.astype(float) / np.max(np.abs(x), axis=0)
|
Add some functions for down sampling and normalization
|
Add some functions for down sampling and normalization
|
Python
|
mit
|
mcraig2/synesthesia
|
|
836e946e5c6bfb6b097622193a4239c7eba1ca9a
|
thinglang/parser/blocks/handle_block.py
|
thinglang/parser/blocks/handle_block.py
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodeJump, OpcodePopLocal
from thinglang.lexer.blocks.exceptions import LexicalHandle
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class HandleBlock(BaseNode):
"""
An exception handling block
"""
def __init__(self, exception_type: Identifier, exception_name: Identifier=None):
super(HandleBlock, self).__init__([exception_type, exception_name])
self.exception_type, self.exception_name = exception_type, exception_name
def compile(self, context: CompilationBuffer):
assert self.parent is None, 'Handle blocks may not be part of the AST after finalization'
buffer = context.optional()
if self.exception_name is not None:
buffer.append(OpcodePopLocal.from_reference(context.resolve(self.exception_name)), self.source_ref)
super(HandleBlock, self).compile(buffer)
buffer.append(OpcodeJump(context.next_index, absolute=True), self.source_ref)
return context.epilogue(buffer)
@staticmethod
@ParserRule.mark
def parse_handle_block_with_value(_: LexicalHandle, exception_type: Identifier, exception_name: Identifier):
return HandleBlock(exception_type, exception_name)
@staticmethod
@ParserRule.mark
def parse_handle_block(_: LexicalHandle, exception_type: Identifier):
return HandleBlock(exception_type)
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodeJump, OpcodePopLocal, OpcodePop
from thinglang.lexer.blocks.exceptions import LexicalHandle
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class HandleBlock(BaseNode):
"""
An exception handling block
"""
def __init__(self, exception_type: Identifier, exception_name: Identifier=None):
super(HandleBlock, self).__init__([exception_type, exception_name])
self.exception_type, self.exception_name = exception_type, exception_name
def compile(self, context: CompilationBuffer):
assert self.parent is None, 'Handle blocks may not be part of the AST after finalization'
buffer = context.optional()
if self.exception_name is not None:
buffer.append(OpcodePopLocal.from_reference(context.resolve(self.exception_name)), self.source_ref)
else:
buffer.append(OpcodePop(), self.source_ref)
super(HandleBlock, self).compile(buffer)
buffer.append(OpcodeJump(context.next_index, absolute=True), self.source_ref)
return context.epilogue(buffer)
@staticmethod
@ParserRule.mark
def parse_handle_block_with_value(_: LexicalHandle, exception_type: Identifier, exception_name: Identifier):
return HandleBlock(exception_type, exception_name)
@staticmethod
@ParserRule.mark
def parse_handle_block(_: LexicalHandle, exception_type: Identifier):
return HandleBlock(exception_type)
|
Add missing void pop in uncaptured exception blocks
|
Add missing void pop in uncaptured exception blocks
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
44110a305b5a23609c5f6366da9d746244807dbb
|
power/__init__.py
|
power/__init__.py
|
# coding=utf-8
"""
Provides crossplatform checking of current power source, battery warning level and battery time remaining estimate.
Allows you to add observer for power notifications if platform supports it.
Usage:
from power import PowerManagement, PowerManagementObserver # Automatically imports platform-specific implementation
class Observer(PowerManagementObserver):
def on_power_sources_change(self, power_management):
print("Power sources did change.")
def on_time_remaining_change(self, power_management):
print("Time remaining did change.")
# class Observer(object):
# ...
# PowerManagementObserver.register(Observer)
"""
from sys import platform
from power.common import *
from power.version import VERSION
__version__ = VERSION
try:
if platform.startswith('darwin'):
from power.darwin import PowerManagement
elif platform.startswith('freebsd'):
from power.freebsd import PowerManagement
elif platform.startswith('win32'):
from power.win32 import PowerManagement
elif platform.startswith('linux'):
from power.linux import PowerManagement
else:
raise RuntimeError("{platform} is not supported.".format(platform=platform))
except RuntimeError as e:
import warnings
warnings.warn("Unable to load PowerManagement for {platform}. No-op PowerManagement class is used: {error}".format(error=str(e), platform=platform))
from power.common import PowerManagementNoop as PowerManagement
|
# coding=utf-8
"""
Provides crossplatform checking of current power source, battery warning level and battery time remaining estimate.
Allows you to add observer for power notifications if platform supports it.
Usage:
from power import PowerManagement, PowerManagementObserver # Automatically imports platform-specific implementation
class Observer(PowerManagementObserver):
def on_power_sources_change(self, power_management):
print("Power sources did change.")
def on_time_remaining_change(self, power_management):
print("Time remaining did change.")
# class Observer(object):
# ...
# PowerManagementObserver.register(Observer)
"""
from sys import platform
from power.common import *
from power.version import VERSION
__version__ = VERSION
try:
if platform.startswith('darwin'):
from power.darwin import PowerManagement
elif platform.startswith('freebsd'):
from power.freebsd import PowerManagement
elif platform.startswith('win32'):
from power.win32 import PowerManagement
elif platform.startswith('linux'):
from power.linux import PowerManagement
else:
raise RuntimeError("{platform} is not supported.".format(platform=platform))
except (RuntimeError, ImportError) as e:
import warnings
warnings.warn("Unable to load PowerManagement for {platform}. No-op PowerManagement class is used: {error}".format(error=str(e), platform=platform))
from power.common import PowerManagementNoop as PowerManagement
|
Use PowerManagementNoop on import errors
|
Use PowerManagementNoop on import errors
Platform implementation can fail to import its dependencies.
|
Python
|
mit
|
Kentzo/Power
|
0f7ba6290696e1ce75e42327fdfc4f9eae8614c3
|
pdfdocument/utils.py
|
pdfdocument/utils.py
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response), response
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, **kwargs):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response, **kwargs), response
|
Allow passing initialization kwargs to PDFDocument through pdf_response
|
Allow passing initialization kwargs to PDFDocument through pdf_response
|
Python
|
bsd-3-clause
|
matthiask/pdfdocument,dongguangming/pdfdocument
|
edfd2edc5496cb412477b7409f43aa53acf7dea9
|
tests/test_loadproblem.py
|
tests/test_loadproblem.py
|
# -*- coding: utf-8 -*-
import unittest
import os
from mathdeck import loadproblem
class TestMathdeckLoadProblem(unittest.TestCase):
def test_loadproblem_has_answers_attribute(self):
file_name = 'has_answers_attribute.py'
problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem')
problem = loadproblem.load_file_as_module(problem_dir,file_name)
self.assertTrue(hasattr(problem,'answers'))
def test_loadproblem_has_no_answers_attribute(self):
file_name = 'has_no_answers_attribute.py'
problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem')
self.assertRaises(Exception, loadproblem. \
load_file_as_module(problem_dir,file_name))
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
import os
from mathdeck import loadproblem
class TestMathdeckLoadProblem(unittest.TestCase):
def test_loadproblem_has_answers_attribute(self):
file_name = 'has_answers_attribute.py'
file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem', file_name)
problem = loadproblem.load_file_as_module(file)
self.assertTrue(hasattr(problem,'answers'))
def test_loadproblem_has_no_answers_attribute(self):
file_name = 'has_no_answers_attribute.py'
file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem', file_name)
self.assertRaises(Exception, loadproblem.load_file_as_module(file))
if __name__ == '__main__':
unittest.main()
|
Fix parameter values for load function
|
Fix parameter values for load function
|
Python
|
apache-2.0
|
patrickspencer/mathdeck,patrickspencer/mathdeck
|
503f92796b9368a78f39c41fb6bb596f32728b8d
|
herana/views.py
|
herana/views.py
|
import json
from django.shortcuts import render
from django.views.generic import View
from models import Institute, ProjectDetail
from forms import SelectInstituteForm, SelectOrgLevelForm
def home(request):
return render(request, 'index.html')
class ResultsView(View):
template_name = 'results.html'
def get(self, request, *args, **kwargs):
projects = ProjectDetail.objects.filter(
record_status=2,
is_rejected=False,
is_deleted=False)
institutes = {proj.institute for proj in projects}
data = {}
data['projects'] = [p.as_dict() for p in projects]
data['institutes'] = [i.as_dict() for i in institutes]
if request.user.is_proj_leader or request.user.is_institute_admin:
data['user_institute'] = request.user.get_user_institute().as_dict()
context = {
"data": json.dumps(data),
}
return render(
request,
self.template_name,
context=context)
|
import json
from django.shortcuts import render
from django.views.generic import View
from models import Institute, ProjectDetail
from forms import SelectInstituteForm, SelectOrgLevelForm
def home(request):
return render(request, 'index.html')
class ResultsView(View):
template_name = 'results.html'
def get(self, request, *args, **kwargs):
projects = ProjectDetail.objects.filter(
record_status=2,
is_rejected=False,
is_deleted=False)
institutes = {proj.institute for proj in projects}
data = {}
data['projects'] = [p.as_dict() for p in projects]
data['institutes'] = [i.as_dict() for i in institutes]
if request.user.is_authenticated():
if request.user.is_proj_leader or request.user.is_institute_admin:
data['user_institute'] = request.user.get_user_institute().as_dict()
context = {
"data": json.dumps(data),
}
return render(
request,
self.template_name,
context=context)
|
Check if user in logged in
|
Check if user in logged in
|
Python
|
mit
|
Code4SA/herana,Code4SA/herana,Code4SA/herana,Code4SA/herana
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.